content stringlengths 5 1.05M |
|---|
# tag::oneplane_imports[]
import numpy as np
from dlgo.encoders.base import Encoder
from dlgo.goboard import Point
# end::oneplane_imports[]
# tag::oneplane_encoder[]
class OnePlaneEncoder(Encoder):
def __init__(self, board_size):
self.board_width, self.board_height = board_size
self.num_planes = 1
def name(self): # <1>
return 'oneplane'
def encode(self, game_state): # <2>
board_matrix = np.zeros(self.shape())
next_player = game_state.next_player
for r in range(self.board_height):
for c in range(self.board_width):
p = Point(row=r + 1, col=c + 1)
go_string = game_state.board.get_go_string(p)
if go_string is None:
continue
if go_string.color == next_player:
board_matrix[0, r, c] = 1
else:
board_matrix[0, r, c] = -1
return board_matrix
# <1> We can reference this encoder by the name "oneplane".
# <2> To encode, we fill a matrix with 1 if the point contains one of the current player's stones, -1 if the point contains the opponent's stones and 0 if the point is empty.
# end::oneplane_encoder[]
# tag::oneplane_encoder_2[]
def encode_point(self, point): # <1>
return self.board_width * (point.row - 1) + (point.col - 1)
def decode_point_index(self, index): # <2>
row = index // self.board_width
col = index % self.board_width
return Point(row=row + 1, col=col + 1)
def num_points(self):
return self.board_width * self.board_height
def shape(self):
return self.num_planes, self.board_height, self.board_width
# <1> Turn a board point into an integer index.
# <2> Turn an integer index into a board point.
# end::oneplane_encoder_2[]
# tag::oneplane_create[]
def create(board_size):
return OnePlaneEncoder(board_size)
# end::oneplane_create[]
|
""" Settings submodule """
# Forward or reverse modes, options are "reverse" or "forward"
__DEFAULT_AD_MODE__ = "forward"
class Settings():
def __init__(self):
self.mode = __DEFAULT_AD_MODE__
def set_mode(self, mode):
if mode not in ["reverse", "forward"]:
raise ValueError("Mode must be either \"forward\" or \"reverse\"")
self.mode = mode
def current_mode(self):
return self.mode
# Global settings
settings = Settings()
|
import lacosmic
import matplotlib.colors as colors
import matplotlib.pyplot as plt
import numpy as np
from os import path
import autocti as ac
"""
__Path__
Grab the relative path to this file for loading the charge injection imaging which includes cosmic rays.
"""
dir_path = path.dirname(path.realpath(__file__))
"""
__Dataset Paths__
The 'dataset_type' describes the type of data being simulated (in this case, imaging data). They define the folder
the dataset is output to on your hard-disk:
- The image will be output to '/autocti_workspace/dataset/dataset_type/image.fits'.
- The noise-map will be output to '/autocti_workspace/dataset/dataset_type/noise_map.fits'.
- The pre_cti_data will be output to '/autocti_workspace/dataset/dataset_type/pre_cti_data.fits'.
"""
# dataset_type = "imaging_noise"
# dataset_type = "imaging_ci"
dataset_type = "imaging_ci_non_uniform"
"""
Returns the path where the dataset will be output, which in this case is
'/autocti_workspace/dataset/imaging_ci/uniform_cosmic_rays/parallel_x1'
"""
dataset_path = path.join(dir_path, "dataset", dataset_type)
"""
__Layout__
The 2D shape of the image.
"""
shape_native = (500, 500)
"""
The locations (using NumPy array indexes) of the parallel overscan, serial prescan and serial overscan on the image.
"""
parallel_overscan = ac.Region2D((1980, 2000, 5, 95))
serial_prescan = ac.Region2D((0, 2000, 0, 5))
serial_overscan = ac.Region2D((0, 1980, 95, 100))
"""
Specify the charge injection regions on the CCD, which in this case is 5 equally spaced rectangular blocks.
"""
regions_list = [
# (0, 200, serial_prescan[3], serial_overscan[2]),
# (400, 600, serial_prescan[3], serial_overscan[2]),
# (800, 1000, serial_prescan[3], serial_overscan[2]),
# (1200, 1400, serial_prescan[3], serial_overscan[2]),
# (1600, 1800, serial_prescan[3], serial_overscan[2]),
]
"""
The normalization of every charge injection image, which determines how many images are simulated.
"""
normalization_list = [100, 5000, 25000, 200000]
"""
Create the layout of the charge injection pattern for every charge injection normalization.
"""
layout_list = [
ac.Layout2DCI(
shape_2d=shape_native,
region_list=regions_list,
normalization=normalization,
parallel_overscan=parallel_overscan,
serial_prescan=serial_prescan,
serial_overscan=serial_overscan,
)
for normalization in normalization_list
]
"""
We can now load every image, noise-map and pre-CTI charge injection image as instances of the `ImagingCI` object.
"""
imaging_ci_list = [
ac.ImagingCI.from_fits(
image_path=path.join(dataset_path, f"image_{int(layout.normalization)}.fits"),
noise_map_path=path.join(
dataset_path, f"noise_map_{int(layout.normalization)}.fits"
),
pre_cti_data_path=path.join(
dataset_path, f"pre_cti_data_{int(layout.normalization)}.fits"
),
cosmic_ray_map_path=path.join(
dataset_path, f"cosmic_ray_map_{int(layout.normalization)}.fits"
),
layout=layout,
pixel_scales=0.1,
)
for layout in layout_list
]
"""
__LACosmic Cosmic Ray Flagging__
Use the LACosmic algorithm to flag cosmic rays in the data.
"""
for imaging_ci in imaging_ci_list:
clean_data, cr_flag_mask = lacosmic.lacosmic(
data=imaging_ci.data.native,
contrast=1.0,
cr_threshold=4.0,
neighbor_threshold=4.0,
error=imaging_ci.noise_map.native,
maxiter=8,
)
figsize = (50, 40)
norm = colors.Normalize(vmin=0.0, vmax=normalization)
plt.figure(figsize=figsize)
plt.imshow(imaging_ci.data.native, norm=norm)
plt.colorbar()
plt.show()
plt.figure(figsize=figsize)
plt.imshow(clean_data, norm=norm)
plt.colorbar()
plt.show()
plt.figure(figsize=figsize)
plt.imshow(cr_flag_mask)
plt.colorbar()
plt.show()
cr_mask_true = imaging_ci.cosmic_ray_map.native > 0.0
plt.figure(figsize=figsize)
plt.imshow(cr_mask_true)
plt.colorbar()
plt.show()
total_cr_true = np.sum(cr_mask_true)
total_cr_flagged = np.sum(cr_flag_mask)
cr_flagged_correctly_map = cr_mask_true * cr_flag_mask
total_cr_flagged_correctly = np.sum(cr_flagged_correctly_map)
cr_flagged_incorrectly_map = np.logical_and(cr_flag_mask, np.invert(cr_mask_true))
total_cr_flagged_incorrectly = np.sum(cr_flagged_incorrectly_map)
cr_unflagged_map = np.logical_and(np.invert(cr_flag_mask), cr_mask_true)
total_cr_unflagged = np.sum(cr_unflagged_map)
max_unflagged_signal_to_noise = np.max(
imaging_ci.absolute_signal_to_noise_map.native[cr_unflagged_map == True]
)
print(f"NORMALIZATION {normalization}\n")
print(f"CR True = {total_cr_true}")
print(f"CR Flagged = {total_cr_flagged}")
print("\nFlagging Values: \n")
print(f"CR Correct Flags = {total_cr_flagged_correctly}")
print(f"CR Incorrect Flags = {total_cr_flagged_incorrectly}")
print(f"CR Unflags = {total_cr_unflagged}")
print("\nFlagging Percentages: \n")
print(f"CR Correct Flags = {100.0*total_cr_flagged_correctly / total_cr_true}")
print(f"CR Incorrect Flags = {100.0*total_cr_flagged_incorrectly / total_cr_true}")
print(f"CR Unflags = {100.0*total_cr_unflagged / total_cr_true}")
print("\n Other Stats: \n")
print(f"Max Unflagged S/N = {max_unflagged_signal_to_noise}")
print()
|
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.webdriver import WebDriver
from selenium.webdriver.support.select import Select
from page_objects.base_page import BasePage
class EmailDetailsPage(BasePage):
FROM_EMAIL_LOCATOR = (By.CSS_SELECTOR, "#report_from_email")
FROM_NAME_LOCATOR = (By.CSS_SELECTOR, "#report_from_name")
SUBJECT_LOCATOR = (By.CSS_SELECTOR, "#report_subject")
BODY_TEXT_LOCATOR = (By.CSS_SELECTOR, "#report_body_text")
IS_REPLY_LOCATOR = (By.CSS_SELECTOR, "#report_is_reply")
HAS_ATTACHMENT_LOCATOR = (By.CSS_SELECTOR, "#report_has_attachment")
ATTACHMENT_NAMES_LOCATOR = (By.CSS_SELECTOR, "#report_attachment_names")
APP_RATING_LOCATOR = (By.CSS_SELECTOR, "#report_app_rating")
APP_VERSION_LOCATOR = (By.CSS_SELECTOR, "#report_app_version")
SENT_REPLY_LOCATOR = (By.CSS_SELECTOR, "#report_sent_message")
LANGUAGE_LOCATOR = (By.CSS_SELECTOR, "#report_language")
SELECT_CATEGORY_PREDICTION_LOCATOR = (By.CSS_SELECTOR, "#id_rule_category")
SELECT_TOPIC_PREDICTION = (By.CSS_SELECTOR, "#id_topic_prediction")
SELECT_PREDICTIONS_RATING_LOCATOR = (By.CSS_SELECTOR, "#id_rating_status")
def __init__(self, driver):
super().__init__(driver)
self.driver: WebDriver = driver
def category_prediction(self):
return Select(self.get_element(by_locator=self.SELECT_CATEGORY_PREDICTION_LOCATOR))
def topic_prediction(self):
return Select(self.get_element(by_locator=self.SELECT_TOPIC_PREDICTION))
def prediction_rating(self):
return Select(self.get_element(by_locator=self.SELECT_PREDICTIONS_RATING_LOCATOR))
@property
def from_email_text(self) -> str:
return self.get_element_text(by_locator=self.FROM_EMAIL_LOCATOR)
@property
def form_name_text(self) -> str:
return self.get_element_text(by_locator=self.FROM_NAME_LOCATOR)
@property
def subject_text(self) -> str:
return self.get_element_text(by_locator=self.SUBJECT_LOCATOR)
@property
def body_text(self) -> str:
return self.get_element_text(by_locator=self.BODY_TEXT_LOCATOR)
@property
def is_reply(self) -> bool:
src_icon = self.get_src_attribute(by_locator=self.IS_REPLY_LOCATOR)
return self.icon_is_green(string=src_icon)
@property
def has_attachment(self) -> bool:
src_icon = self.get_src_attribute(by_locator=self.HAS_ATTACHMENT_LOCATOR)
return self.icon_is_green(string=src_icon)
@property
def attachment_names_text(self) -> str:
return self.get_element_text(by_locator=self.ATTACHMENT_NAMES_LOCATOR)
@property
def app_ratting_text(self) -> str:
return self.get_element_text(by_locator=self.APP_RATING_LOCATOR)
@property
def app_version_text(self) -> str:
return self.get_element_text(by_locator=self.APP_VERSION_LOCATOR)
@property
def sent_reply_text(self) -> str:
return self.get_element_text(by_locator=self.SENT_REPLY_LOCATOR)
@property
def language_text(self) -> str:
return self.get_element_text(by_locator=self.LANGUAGE_LOCATOR)
@property
def details_data(self) -> dict:
return {
"from_email": self.from_email_text,
"from_name": self.form_name_text,
"subject": self.subject_text,
"body_text": self.body_text,
"is_reply": self.is_reply,
"has_attachment": self.has_attachment,
"attachment_names": self.attachment_names_text,
"app_rating": self.app_ratting_text,
"app_version": self.app_version_text,
"sent_reply": self.sent_reply_text,
"language": self.language_text,
"category_prediction": self.category_prediction().first_selected_option.text,
"category_prediction_is_clickable": self.is_clickable(
by_locator=self.SELECT_CATEGORY_PREDICTION_LOCATOR
),
"topic_prediction": self.topic_prediction().first_selected_option.text,
"topic_prediction_is_clickable": self.is_clickable(
by_locator=self.SELECT_TOPIC_PREDICTION
),
"prediction_rating": self.prediction_rating().first_selected_option.text,
"prediction_rating_is_clickable": self.is_clickable(
by_locator=self.SELECT_PREDICTIONS_RATING_LOCATOR
),
}
|
#!/usr/bin/env python
import os
import sys
import re
import json
from pandocfilters import toJSONFilter, walk, Str, Plain, Para, RawInline
from subprocess import Popen, PIPE, call
def include2json(file, format):
inc = open(file)
p = Popen(["pandoc", "-f", "markdown", "-t", format, "--filter", os.path.abspath(__file__) ], stdin=inc, stdout=PIPE)
output, _ = p.communicate()
# output = inc.read()
# print "File is: " + str(file) + " and output is: " + str(output)
# data = json.loads(output)
# meta = data.pop(0)
# print "Data is " + str(data)
# contents = data[0]
# result = walk(contents, include, "", meta['unMeta'])
# return RawInline('json', json.dumps(result))
return RawInline(format, output)
pattern = re.compile(r"<<\[(.*)\]$")
def include(key, value, format, meta):
if key == 'Str':
match = pattern.match(value)
if match:
file = match.group(1)
# print "Processing file: " + file
# if file == 'version.md':
return include2json(file, format)
if __name__ == "__main__":
toJSONFilter(include)
|
#!usr/bin/python
#
# Data structure to model Table
#
from common import log_msg
import sys
DEFAULT_NA_VALUE = "N/A"
class Table:
#
# self._columns = ["col1", "col2", ... , "coln"]
# self.__column_defaults = {"col1": "val1", "col2": "val2", ... , "coln" : "valn"}
# self._rows = {"rowkey1": {row1}, "rowkey2": {row2}, ... , "rowkeym" : {rowm}}
# rowi = {"col1" : "val1i", "col2" : "val2i", ... , "coln" : "valni"}
#
#
# type of col,rowkey should be string
# type of val can be anything
#
def __init__(self, columns = None, rows = None, column_defaults = None):
self._columns = [] if columns is None else columns[:]
self._column_defaults = dict([(col, DEFAULT_NA_VALUE) for col in self._columns]) if column_defaults is None else column_defaults
self._rows = {} if rows is None else rows.copy()
# verify state after init
self._verify_state("init")
def _verify_state(self, state_msg):
# type invariants
assert type(self._columns) == list, "after %s: columns should be a list" % state_msg
assert all((type(col) == str for col in self._columns)), "after %s: each column should be string" % state_msg
assert type(self._column_defaults) == dict, "after %s: column defaults should be a dictionary" % state_msg
assert type(self._rows) == dict, "after %s: rows should be a dictionary" % state_msg
assert all((type(row) == dict for row in self._rows.values())), "after %s: each row should be a dictionary" % state_msg
assert all((type(rowkey) == str for rowkey in self._rows.keys())), "after %s: each row should be a dictionary" % state_msg
# dimension invariants
assert len(self._columns) == len(self._column_defaults), "after %s: too less column defaults!" % state_msg
assert all((len(self._columns) == len(row) for row in self._rows.values())), "after %s: missing values for some columns in one or more rows!" % state_msg
cols_set = set(self._columns)
assert all((cols_set == set(row.keys()) for row in self._rows.values())), "after %s: invalid columns in some rows" % state_msg
def insert_column(self, index, column_name, row_vals = None, column_default_val = DEFAULT_NA_VALUE):
# sanitize row_vals
row_vals = {} if row_vals is None else row_vals
# insert new column
self._columns.insert(index, column_name)
# add default value for the same
self._column_defaults[column_name] = column_default_val
# update for all other columns
[row.update({column_name : row_vals.get(rowkey, column_default_val)}) for (rowkey, row) in self._rows.iteritems()]
# verify the state
self._verify_state("insert_column")
def append_column(self, column_name, row_vals = None, column_default_val = DEFAULT_NA_VALUE):
self.insert_column(len(self._columns), column_name, row_vals, column_default_val)
def set_row(self, rowkey, row = None):
row = {} if row is None else row
# check for nay new columns and append them to the table
[self.append_column(new_col) for new_col in set(row.keys()) - set(self._columns)]
# add a new row
self._rows[rowkey] = dict(((column_name, row.get(column_name, self._column_defaults.get(column_name))) for column_name in self._columns))
# verify state
self._verify_state("set_row with row_key: " + str(rowkey))
def delete_column(self, column_name):
if column_name in self._columns:
# remove from column
self._columns.remove(column_name)
# remove the defualt entry
del self._column_defaults[column_name]
# pop all entries from all rows
[row.pop(column_name) for row in self._rows.values()]
self._verify_state("delete column => " + str(column_name))
def delete_row(self, row_key):
if self._rows.has_key(row_key):
self._rows.pop(row_key)
self._verify_state("delete row => " + str(row_key))
def column_keys(self):
return self._columns[:]
def row_keys(self):
return self._rows.keys()[:]
def row(self, key):
return self._rows[key].copy()
# iter methods of the table
def iter_row(self, row_key, action = lambda r_key, col, val : sys.stderr.write(str(val) + "\t")):
if self._rows.has_key(row_key):
return [action(row_key, col, val) for (col, val) in ((c, self._rows[row_key][c]) for c in self._columns)]
def iter_column(self, column_name, action = lambda r_key, col, val : sys.stderr.write(str(val) + "\n")):
if column_name in self._columns:
return [action(row_key, column_name, row[column_name]) for (row_key, row) in self._rows.iteritems()]
def iter_row_x_column(self, action = lambda r_key, col, val : sys.stderr.write(str(val) + "\t"), next_iter_action = lambda r_key: sys.stderr.write('\n')):
for rowkey in self._rows.keys():
next_iter_action(rowkey)
self.iter_row(rowkey, action)
def iter_column_x_row(self, action = lambda r_key, col, val : sys.stderr.write(str(val) + "\n"), next_iter_action = lambda col: sys.stderr.write('\n\n')):
for column in self._columns:
next_iter_action(column)
self.iter_column(column, action)
def __str__(self):
return "COLUMNS: %s\nCOL_DEFAULTS: %s\nROWS: %s\n" % (str(self._columns), str(self._column_defaults), str(self._rows))
|
from .practices import PracticesAirtableAdapter
|
from math import sqrt,acos,pi,inf,isinf,fabs
def savePPM(w,h,mxCol,name,imgDat): # Code for saving rendered image in displayable format
print("saving ppm...")
filename = "/Users/willsumner/Desktop/Coding/My_Raytracer/Image_Test_Container/"+name+".ppm"
with open(filename, 'w') as outFile:
outFile.write("P3\n")
outFile.write(str(w) + " " + str(h) + "\n"+ str(mxCol)+"\n")
for line in imgDat:
outFile.write(" ".join(line))
outFile.write("\n")
def solveQuad(a,b,c): # Find roots of quadratic equation (i.e. for sphere tracing)
det = b*b - 4*a*c
if det > 0:
inv = 1/(2*a)
tt = (-1*b - sqrt(det))*inv
if tt <= 0:
t = (-1*b + sqrt(det))*inv
if t > 0: return t
else: return tt
return False
class Vec: # Vector class
x = 0
y = 0
z = 0
def __init__ (self, x,y,z):
self.x = x
self.y = y
self.z = z
def __str__(self): # Useful for saving as a file, basically str(Vec()) -> "x y z"
return "{} {} {}".format(self.x,self.y,self.z)
def __mul__(self,val): # Overload math operators
return Vec(self.x*val,self.y*val,self.z*val)
def __add__(self,other): # Same for other simple math operators
newx = self.x+other.x
newy = self.y+other.y
newz = self.z+other.z
return Vec(newx,newy,newz)
def __sub__(self,other):
newx = self.x-other.x
newy = self.y-other.y
newz = self.z-other.z
return Vec(newx,newy,newz)
def __div__(self,val):
newx = self.x/val
newy = self.y/val
newz = self.z/val
return Vec(newx,newy,newz)
def dot (self,other): # Dot Product
return (self.x*other.x)+(self.y*other.y)+(self.z*other.z)
def ang(self,other): # Angle between two vectors
return acos(self.dot(other)/(self.length() * other.length()))
def length(self): # Return Length
return sqrt((self.x**2)+(self.y**2)+(self.z**2))
def square(self): # Square a vector
return self.dot(self)
def norm(self): # Normalize a vector
l = self.length()
if l > 0:
self.x = self.x/l
self.y = self.y/l
self.z = self.z/l
else: self.x=self.y=self.z=0
def cross(self,other): # Cross Product
i = (self.y*other.z - self.z*other.y)
j = (self.x*other.z - self.z*other.x)
k = (self.x*other.y - self.y*other.x)
return Vec(i,j,k)
class Col: # Color Class
r = 0
g = 0
b = 0
def __init__(self,r,g,b):
self.r = r
self.g = g
self.b = b
def __str__(self): # Useful for saving as a file, basically str(Col()) -> "r g b"
return "{} {} {}".format(self.r,self.g,self.b)
def __add__(self,nCol):
newr = min(self.r + nCol.r,255)
newg = min(self.g + nCol.g,255)
newb = min(self.b + nCol.b,255)
return Col(newr,newg,newb)
def scale(self): # Very basic pseudo - tone mapping
self.r = min(255,self.r)
self.g = min(255,self.g)
self.b = min(255,self.b)
self.r = max(0,self.r)
self.g = max(0,self.g)
self.b = max(0,self.b)
class Sphere: # Sphere Class
c = Vec(0,0,0)
r = 1
col = Col(.5,.5,.5)
scol = Col(.5,.5,.5)
shine = 0
def __init__(self,origin,radius,col,scol,shine):
self.c = origin
self.r = radius
self.col = col
self.scol = scol
self.shine = shine
def checkHit(self,ray): # Find Intersection
t0 = 0
t1 = 0
oc = ray.o - self.c
a = ray.d.square()
b = 2 * ray.d.dot(oc)
c = oc.dot(oc)-(self.r**2)
hit = solveQuad(a,b,c)
if (not(hit)): return False
return hit
def normal(self,point):
normal = self.c-point
normal.norm()
return normal
class Ray: # Ray Class
o = Vec(0,0,0)
t = 0
d = Vec(1,1,1)
def __init__(self,o,t,d):
self.o = o
self.t = t
self.d = d
self.d.norm()
def hitPoint(self,t):
return self.o+(self.d*t)
class Camera: # Camera Class
o = Vec(0,0,0)
d = Vec(1,0,0)
ang = 0
def __init__(self,o,d,ang):
self.o = o
self.d=d
self.ang = ang
class Light: # Light Class
o = Vec(0,0,0)
brightness = 1.0
def __init__(self,o,brightness):
self.o = o
self.brightness = brightness
class Scene:
objects = []
lights = []
camera = 0
def __init__(self):
objects = []
lights = []
camera = 0
def addObject(self,obj):
self.objects.append(obj)
def removeObject(self,obj):
self.objects.remove(obj)
def addLight(self,light):
self.lights.append(light)
def removeLight(self,light):
self.lights.remove(light)
def addCamera(self,camera):
self.camera=camera
def trace(ray, objects,lights): # Trace a ray until it hits an object
t = inf
obj = objects[0]
for thing in objects:
hold = thing.checkHit(ray)
if (hold > 0 and hold < t):
t = hold;
obj = thing
if (not(isinf(t))):
hitPoint = ray.hitPoint(t)
normal = thing.normal(hitPoint)
return phongshade(ray,normal,hitPoint,obj.col,obj.scol,obj.shine,lights)
return Vec(30,30,30) # Background Color
def lamshade(N,P,material,lights): # Shade a pixel based on lighting
fincol = Col(0,0,0)
for light in lights:
L = P-light.o
L.norm()
dott = max(0,N.dot(L))
fincol.r += int(dott*material.r*light.brightness)
fincol.g += int(dott*material.g*light.brightness)
fincol.b += int(dott*material.b*light.brightness)
return fincol+ambient
def phongshade(ray,N,hitpoint,dmaterial,smaterial,shine,lights): # More Advanced Shading
if shine == 0: return lamshade(N,hitpoint,dmaterial,lights)
fincol = Col(0,0,0)
fincol.scale()
shine *= 1000
for light in lights:
L = hitpoint-light.o
L.norm()
ddot = max(0,N.dot(L))
H = L+ray.d
H.norm()
sdot = max(0,H.dot(N))**shine
fincol.r += min(int(dmaterial.r*ddot*light.brightness+smaterial.r*sdot*light.brightness),255)
fincol.g += min(int(dmaterial.g*ddot*light.brightness+smaterial.g*sdot*light.brightness),255)
fincol.b += min(int(dmaterial.b*ddot*light.brightness+smaterial.b*sdot*light.brightness),255)
return fincol+ambient
def render(pixels,objects,width,height,camera,lights,filename): # Main Render Function
w = camera.d*-1 # Creating coordinate system
u = w.cross(Vec(0,1,0))
u.norm()
v = u.cross(w)*-1
v.norm()
for i in range(height): # For each pixel
for j in range(width):
dirr = camera.d*camera.ang + u*(2*(j / width)-1) + v*(1-2*(i/height)) # Find a direction
dirr.norm() # Normalize it
traceRay = Ray(camera.o,2,dirr) # Trace it
pixels[i][j] = str(trace(traceRay,objects,lights)) # Shade and store it
savePPM(width,height,255,filename,pixels) # Save it to an image file
width,height=150,150 # Image Dimensions
ambient = Col(10,10,10) # Ambient Color
image = [ [ 0 for x in range(width)] for y in range(height)] # Image Container
red = Col(180,0,0) # Setting up colors
blue = Col(40,93,226)
green = Col(39,196,39)
white = Col(255,255,255)
cyan = Col(55,221,216)
mscene = Scene() # Default Scene Setup
SphereUno = Sphere(Vec(-1,0,3),1,red,white,0)
SphereDos = Sphere(Vec(1,0,3.5),1,blue,white,1)
LightUno = Light(Vec(-1,2,0),1)
mscene.addObject(SphereUno)
mscene.addObject(SphereDos)
mscene.addLight(LightUno)
mscene.addCamera(Camera(Vec(0,0,0),Vec(0,0,1),1))
print("Rendering...")
imgName = "image"
render(image,mscene.objects,width,height,mscene.camera,mscene.lights,imgName)
|
# 128x32 display with hardware I2C:
import Adafruit_SSD1306
from PIL import Image, ImageDraw, ImageFont
from jetbot.utils.utils import get_ip_address
disp = Adafruit_SSD1306.SSD1306_128_32(rst=None, i2c_bus=1, gpio=1)
disp.begin()
print("ready")
disp.clear()
disp.display()
print("cleared")
width = disp.width
height = disp.height
image = Image.new('1', (width, height))
draw = ImageDraw.Draw(image)
draw.rectangle((0,0,width,height), outline=0, fill=0)
font = ImageFont.load_default()
#draw.text((0,0), "eth0: " + str(get_ip_address('eth0')), font=font, fill=255)
draw.text((0,8), "wlan0: " + str(get_ip_address('wlan0')), font=font, fill=255)
draw.text((0,16), "hallo", font=font, fill=255)
disp.image(image)
disp.display()
print("used")
|
# coding=utf-8
#
# BSD 3-Clause License
#
# Copyright (c) 2016-21, University of Liverpool
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Python wrappers for common command line applications"""
__author__ = "Felix Simkovic"
__date__ = "20 Oct 2016"
__version__ = "0.13.3"
def BbcontactsCommandline(*args, **kwargs):
from conkit.applications.bbcontacts import BbcontactsCommandline
return BbcontactsCommandline(*args, **kwargs)
def CCMpredCommandline(*args, **kwargs):
from conkit.applications.ccmpred import CCMpredCommandline
return CCMpredCommandline(*args, **kwargs)
def CdhitCommandline(*args, **kwargs):
from conkit.applications.cdhit import CdhitCommandline
return CdhitCommandline(*args, **kwargs)
def HHblitsCommandline(*args, **kwargs):
from conkit.applications.hhblits import HHblitsCommandline
return HHblitsCommandline(*args, **kwargs)
def HHfilterCommandline(*args, **kwargs):
from conkit.applications.hhfilter import HHfilterCommandline
return HHfilterCommandline(*args, **kwargs)
def JackhmmerCommandline(*args, **kwargs):
from conkit.applications.jackhmmer import JackhmmerCommandline
return JackhmmerCommandline(*args, **kwargs)
def PsicovCommandline(*args, **kwargs):
from conkit.applications.psicov import PsicovCommandline
return PsicovCommandline(*args, **kwargs)
def MapAlignCommandline(*args, **kwargs):
from conkit.applications.map_align import MapAlignCommandline
return MapAlignCommandline(*args, **kwargs)
|
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def upload():
return render_template('upload.html')
@app.route('/uploader', methods = ['GET', 'POST'])
def upload_file():
if request.method == 'POST':
f = request.files['file']
#f.save(secure_filename(f.filename))
return f.read()
if __name__ == '__main__':
# Threaded option to enable multiple instances for multiple user access support
app.run(threaded=True, port=5000)
|
# --- coding:utf-8 ---
# author: Cyberfish time:2021/7/22
import paddle
from paddle import set_device, get_device
from ner_code.ner_utils import model, utils_fn
from paddlenlp.transformers import NeZhaTokenizer
ner_label2id, ner_id2label, bi_label2id, id2bi_label, slots2id, id2slots, slots = utils_fn.label_process('data/slots.txt')
class NerPredict():
def __init__(self):
self.ner_model = model.NerModel()
self.ner_model.to(set_device(get_device()))
ner_state_dict = paddle.load('weight/ner_model.state')
self.ner_model.set_state_dict(ner_state_dict)
self.tokenizer = NeZhaTokenizer.from_pretrained('nezha-base-wwm-chinese')
def ner_predict(self, content):
content = [c for c in content]
input_ids = paddle.to_tensor([self.tokenizer(content, is_split_into_words=True)['input_ids']])
lens = paddle.to_tensor(len(content))
_, pred = self.ner_model(input_ids, lens)
entities = []
entity = ''
for content, label in zip(content, pred[0]):
label = int(label)
if label == 0:
if entity:
entities.append(entity)
entity = ''
else:
continue
else:
if label % 2 == 1:
if entity:
entities.append(entity)
entity = ner_id2label[label].split('_')[1] + '\t' + content
else:
if entity:
entity += content
else:
continue
return entities
|
def handle_error(response):
codes = {-1: APIError, 400: ParseError, 404: ObjectNotFoundError}
try:
error = response.json().get("errors", {})
message = response.json().get("message")
except:
message = "Unknown API error."
error = "Unknown API error."
code = response.status_code
data = error
raise codes.get(code, APIError)(
message=message, code=code, data=data, response=response
)
class APIError(Exception):
response = None
data = {}
message = "An error with the API occurred."
code = -1
def __init__(self, message=None, code=None, data={}, response=None):
self.response = response
if message:
self.message = message
if code:
self.code = code
if data:
self.data = data
def __str__(self):
if self.code:
ret = {"Error code": self.code, "Message": self.message, "Data": self.data}
return str(ret)
return self.message
class CandidError(Exception):
response = None
data = {}
message = "Object has no stamps."
code = -1
class ParseError(APIError):
pass
class FormatValidationError(ParseError):
pass
class ObjectNotFoundError(APIError):
## TODO add logic for including oid in error message
pass
|
# -*- coding: utf-8 -*-
from django.shortcuts import render
def handle_404_view(request):
return render(request, "error/404.html", status=404)
def handle_500_view(request):
return render(request, "error/500.html", status=500)
def handle_503_view(request):
# maintenance mode
return render(request, "error/503.html", status=503)
def handle_csrf_view(request, reason):
# CSRF failure view
return render(request, "error/csrf.html", {"reason": reason}, status=403)
|
"""データの読み込み"""
from typing import TextIO, Optional
import numpy as np
from asva.Types import WaveType, CASESType
#import pandas as pd
def read_wave(file_path: TextIO, col: int, delimiter: Optional[str], skiprows: int):
try:
#df = pd.read_csv(opened_file, header=None, skiprows=skiprows, usecols=[col], delimiter=delimiter)*amp*to_meter
#wave = df[col]
wave = np.loadtxt(file_path, usecols=[col], delimiter=delimiter, skiprows=skiprows)
except Exception as error:
raise ValueError(f"check wave setting: {error}")
return wave
def read_case_wave(wave: WaveType, case_conf: CASESType):
"""ケースとその地震設定に基づき、地震ファイルを読み込んでarrayを返す"""
open_file = wave['INPUT_FILE']
encording_setting = 'ENCORDING' in wave
encording = wave['ENCORDING'] if encording_setting else 'shift-jis'
try:
opened_file = open(open_file, 'r', encoding=encording)
except OSError:
raise ValueError(f"cannot open {open_file}")
col = wave['COL']
delimiter = wave['DELIMITER']
skiprows = wave['SKIPROWS']
to_meter = wave['TO_METER']
amp = case_conf['AMP']
wave = read_wave(opened_file, col, delimiter, skiprows)*amp*to_meter
return wave
def add_wave_required_zero(wave: list, required_length: int):
n_wave = len(wave)
length = max(n_wave, required_length)
new_wave = np.zeros(length)
for n in range(n_wave):
new_wave[n] = wave[n]
return new_wave
def divide_wave(wave: list, n_div: int):
n_wave = len(wave) * n_div
acc_00 = np.zeros(n_wave)
acc_g = 0
for n in range(n_wave):
wave_step = n // n_div
wave_step0 = wave_step - 1
acc_g1 = wave[wave_step0] if wave_step >= 1 else 0
acc_g2 = wave[wave_step]
dacc_g = (acc_g2 - acc_g1) / n_div
acc_g = acc_g + dacc_g
acc_00[n] = acc_g
return acc_00 |
#
# MythBox for XBMC - http://mythbox.googlecode.com
# Copyright (C) 2011 analogue@yahoo.com
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
import logging
log = logging.getLogger('mythbox.event')
class Event(object):
RECORDING_DELETED = 'RECORDING_DELETED' # keys: id, program
RECORDING_STARTED = 'RECORDING_STARTED' # keys: id
RECORDING_ENDED = 'RECORDING_ENDED' # keys: id
SETTING_CHANGED = 'SETTING_CHANGED' # keys: id, tag, old, new
SHUTDOWN = 'SHUTDOWN' # keys: None
SCHEDULER_RAN = 'SCHEDULER_RAN' # keys: None
SCHEDULE_CHANGED = 'SCHEDULE CHANGED' # keys: None recording schedule added/deleted/updated
COMMFLAG_START = 'COMMFLAG_START' # [u'BACKEND_MESSAGE', u'COMMFLAG_START 4276 2011-06-02T17:00:00', u'empty']
FANART_REFRESHED = 'FANART_REFRESHED' # keys: id, program
class EventBus(object):
def __init__(self):
self.listeners = []
def register(self, listener, firstDibs=False):
if firstDibs:
self.listeners.insert(0, listener)
else:
self.listeners.append(listener)
def deregister(self, listener):
try:
self.listeners.remove(listener)
except ValueError, ve:
log.error(ve)
def publish(self, event):
"""
@type event: dict
@param event: Put in whatever you like. The only mandatory key is 'id'
"""
log.debug('Publishing event %s to %d listeners' % (event, len(self.listeners)))
for listener in self.listeners[:]:
try:
listener.onEvent(event)
except:
log.exception('Error publishing event %s to %s' % (event, listener))
|
#!/usr/bin/env python
"""Calculates average given waveforms"""
import sys
import os
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
def average(experimentDir, outputFile):
for dirpath, _, filenames in os.walk(experimentDir):
sensorList = []
for filename in filenames:
filepath = os.path.join(dirpath, filename)
sensor = open(filepath).read().splitlines()
sensorList.append(sensor)
label = os.path.splitext(filename)[0]
plt.plot(sensor, label=label)
# calculate mean
mean = np.array(sensorList).astype(float).mean(axis=0)
np.savetxt(outputFile, mean, newline='\n')
print "Saving average =", outputFile
# graph
plt.plot(mean, label="average")
plt.legend()
plt.title("Average")
plt.savefig(outputFile + ".png")
def main(inputDir, outputDir):
for dirpath, dirnames, _ in os.walk(inputDir):
for dirname in dirnames:
average(os.path.join(dirpath, dirname), os.path.join(outputDir, dirname))
return 0
if __name__ == '__main__':
print "Args =", sys.argv
main(*sys.argv[1:])
|
import requests
from lxml import etree
if __name__ == "__main__":
baseurl = "https://hz.58.com/ershoufang/"
headers = {
"User-Agent":'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36 Edg/89.0.774.63'
}
page_text = requests.get(baseurl,headers=headers).text
parser = etree.HTMLParser(encoding='utf-8')
tree = etree.HTML(page_text,parser=parser)
title_list = tree.xpath('//div[@class="property"]//h3[@class="property-content-title-name"]/text()')
with open('06._58ershou.txt','w') as fp:
for title in title_list:
fp.write(title+"\n") |
import sqlite3
from flask_login import UserMixin
#Turn the results from the database into a dictionary
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
#inherit basic flask_login user properties
def getUserDataL(arg1, arg2, arg3, query):
try:
conn = sqlite3.connect('leaguemate.db')
conn.row_factory = dict_factory
c = conn.cursor()
if (arg3 and arg2 and arg3) == 'Any':
query = getAllUsers()
c.execute(query)
elif arg3 == 'max':
c.execute(query)
elif arg3 == 'lane':
c.execute(query, (arg1),)
else:
c.execute(query, (arg1, arg2, arg3),)
result = c.fetchall()
for row in result:
print(row)
return result
except sqlite3.Error as error:
print("failed to read from table", error)
finally:
if conn:
conn.close()
print("the sqlite connection is closed")
def getAllUsers():
que = "select * from UserLeague as C left outer join LeagueAccount as L on L.LeagueID = C.LeagueID left outer join UserAccount as U on C.Username = U.Username"
return que
def getAllChampions():
try:
conn = sqlite3.connect('leaguemate.db')
conn.row_factory = dict_factory
c = conn.cursor()
c.execute("SELECT GoodAtChamp, count(*) as num FROM usergoodat GROUP BY GoodAtChamp ORDER BY num DESC")
result = c.fetchall()
return result
except sqlite3.Error as error:
print("failed to read from table", error)
finally:
if conn:
conn.close()
print("the sqlite connection is closed")
def getMaxLevel(arg1, arg2, arg3):
que = " select Username,GameServer,LeagueID,GameLevel from (select GameServer, LeagueID as MaxLevelID, Gamelevel from leagueaccount where GameLevel = (select max(GameLevel) from leagueaccount)) R1 join (select Username,LeagueID from userleague)R2 on R1.MAXLevelID = R2.LeagueID"
return getUserDataL(arg1, arg2, 'max', que)
def sortUsers(arg1, arg2, arg3):
arg2 = switch(arg2)
que ="select * from UserLeague as C left outer join LeagueAccount as L on L.LeagueID = C.LeagueID left outer join UserAccount as U on C.Username = U.Username WHERE L.Position = (?) AND L.AccountRank = (?) AND l.QueueType =(?) ORDER BY L.GameLevel desc"
return getUserDataL(arg1, arg2, arg3, que)
def searchLane(arg1):
que ="select GameServer, LeagueID, GameLevel, AccountRank, QueueType, TFTRank from leagueaccount L1 where exists (select LeagueID, GameServer from (select leagueID, GameServer from userleague U1 where exists (select Username from usergoodat where U1.Username = usergoodat.Username and usergoodat.GoodAtPosition <> (?)))R1 where R1.LeagueID = L1.LeagueID)"
return getUserDataL(arg1, 0, 'lane', que)
def switch(argument):
switcher = {
'N/A': 0,
'Iron': 1,
'Bronze': 2,
'Silver': 3,
'Gold': 4,
'Platinum': 5,
'Diamond': 6,
'Master': 7,
'Grandmaster': 8,
'Challenger': 9
}
print('switcher:')
print(switcher.get(argument), 'nothing')
return switcher.get(argument, "nothing")
|
import torch
import torch.optim as optim
from enchanter.addons import TransformerOptimizer
def test_transformer_optim():
def f(x):
return 3 * x ** 2
a = torch.tensor(10.0, requires_grad=True).float()
adam = TransformerOptimizer(optim.Adam([a]), warm_up=10, d_model=100)
is_pass = False
try:
for i in range(100):
adam.zero_grad()
out = f(a)
out.backward()
adam.step()
except Exception as e:
print(e)
is_pass = False
else:
is_pass = True
assert is_pass
|
import pytest
import unittest
import numpy as np
from .. import dataloaders
# class ClassTest(unittest.TestCase):
# @pytest.mark.xfail
# def test_feature_a(self):
# self.assertEqual(2, 3)
# def test_feature_b(self):
# self.assertTrue(True)
# @pytest.mark.parametrize(
# "test_input,expected",
# [("3+5", 8), ("2+4", 6), pytest.param("6*9", 42, marks=pytest.mark.xfail)],
# )
# def test_eval(test_input, expected):
# assert eval(test_input) == expected
@pytest.mark.parametrize(
"augPolicy, dataset, data_dir, expected",
[(0, "cifar10", "/data/cifar10", 10), (1, "cifar100", "data/cifar100", 100), (2, "cifar100", "data/cifar100", 100), pytest.param(-1, "cifar10", "/data/cifar10_v1", 10, marks=pytest.mark.xfail)],
)
def test_datasubset(augPolicy, dataset, data_dir, expected):
num_classes, train_data, train_data_noT, test_data = dataloaders.load_data_subsets(augPolicy, dataset, data_dir)
assert num_classes == expected
@pytest.mark.parametrize(
"augPolicy, dataset, data_dir, expected",
[(2, "cifar10", "/data/cifar10", 10)],
)
def test_train_dataloaders_10(augPolicy, dataset, data_dir, expected):
num_classes, train_data, train_data_noT, test_data = dataloaders.load_data_subsets(augPolicy, dataset, data_dir)
trainloader, \
validloader, \
unlabelledloader, \
train_sampler, \
unlabelled_sampler, \
indices_train, \
indices_unlabelled, \
trainIndexOrder, \
unlabeledIndexOrder = dataloaders.get_train_dataloaders(dataset, train_data, train_data_noT, 100, 12, 1000, 1000, 1, [0,1,2,3,4,5,6,7,8,9], [0,1,2,3,4,5,6,7,8,9], ordered=False)
assert num_classes == expected
@pytest.mark.parametrize(
"augPolicy, dataset, data_dir, expected",
[(1, "cifar100", "data/cifar100", 100)],
)
def test_train_dataloaders_100(augPolicy, dataset, data_dir, expected):
num_classes, train_data, train_data_noT, test_data = dataloaders.load_data_subsets(augPolicy, dataset, data_dir)
trainloader, \
validloader, \
unlabelledloader, \
train_sampler, \
unlabelled_sampler, \
indices_train, \
indices_unlabelled, \
trainIndexOrder, \
unlabeledIndexOrder = dataloaders.get_train_dataloaders(dataset, train_data, train_data_noT, 100, 12, 1000, 1000, 1, np.arange(100), np.arange(100), ordered=False)
assert num_classes == expected |
import string
infile = open('./conservation_analysis.out', 'r')
lines = infile.readlines()
types = ['normalized_0D_conservation', 'normalized_noactsit_0D_conservation',
'normalized_1D_conservation', 'normalized_3D_conservation',
'normalized_ms3D_conservation', 'normalized_noactsit_ms3D_conservation',
'normalized_nobadloop_ms3D_conservation']
data = {}
for type in types:
data[type] = []
for line in lines:
if type in line:
ind = int(string.find(line, ' percent of the time'))
pwin = float(string.strip(line[ind-4: ind]))
data[type].append(pwin)
easy_indices, easy_thresh = [], 100.0
hard_indices, hard_thresh = [], 30.0
for i in range(len(data['normalized_0D_conservation'])):
sum = 0.0
for type in types:
sum += data[type][i]
avg = sum/float(len(types))
if avg >= easy_thresh:
easy_indices.append(i)
elif avg <= hard_thresh:
hard_indices.append(i)
print '%s easy indices, %s hard'%(len(easy_indices), len(hard_indices))
for type in types:
sum = 0.0
count = 0.0
i = 0
for datum in data[type]:
if i not in easy_indices and i not in hard_indices:
sum += datum
count += 1
i += 1
print 'average %s, count %s - %s '%(sum/float(count), count, type)
infile.close()
print '>>>'
|
import numpy as np
import features
import go
from test_utils import load_board, GoPositionTestCase
go.set_board_size(9)
EMPTY_ROW = '.' * go.N + '\n'
TEST_BOARD = load_board('''
.X.....OO
X........
XXXXXXXXX
''' + EMPTY_ROW * 6)
TEST_POSITION = go.Position(
board=TEST_BOARD,
n=0,
komi=6.5,
caps=(1,2),
ko=None,
recent=(go.PlayerMove(go.BLACK, (0, 1)),
go.PlayerMove(go.WHITE, (0, 8)),
go.PlayerMove(go.BLACK, (1, 0))),
to_play=go.BLACK,
)
TEST_BOARD2 = load_board('''
.XOXXOO..
XO.OXOX..
XXO..X...
''' + EMPTY_ROW * 6)
TEST_POSITION2 = go.Position(
board=TEST_BOARD2,
n=0,
komi=6.5,
caps=(0, 0),
ko=None,
recent=tuple(),
to_play=go.BLACK,
)
class TestFeatureExtraction(GoPositionTestCase):
def test_stone_color_feature(self):
f = features.stone_color_feature(TEST_POSITION)
self.assertEqual(f.shape, (9, 9, 3))
# plane 0 is B
self.assertEqual(f[0, 1, 0], 1)
self.assertEqual(f[0, 1, 1], 0)
# plane 1 is W
self.assertEqual(f[0, 8, 1], 1)
self.assertEqual(f[0, 8, 0], 0)
# plane 2 is empty
self.assertEqual(f[0, 5, 2], 1)
self.assertEqual(f[0, 5, 1], 0)
def test_liberty_feature(self):
f = features.liberty_feature(TEST_POSITION)
self.assertEqual(f.shape, (9, 9, features.liberty_feature.planes))
self.assertEqual(f[0, 0, 0], 0)
# the stone at 0, 1 has 3 liberties.
self.assertEqual(f[0, 1, 2], 1)
self.assertEqual(f[0, 1, 4], 0)
# the group at 0, 7 has 3 liberties
self.assertEqual(f[0, 7, 2], 1)
self.assertEqual(f[0, 8, 2], 1)
# the group at 1, 0 has 18 liberties
self.assertEqual(f[1, 0, 7], 1)
def test_recent_moves_feature(self):
f = features.recent_move_feature(TEST_POSITION)
self.assertEqual(f.shape, (9, 9, features.recent_move_feature.planes))
# most recent move at (1, 0)
self.assertEqual(f[1, 0, 0], 1)
self.assertEqual(f[1, 0, 3], 0)
# second most recent move at (0, 8)
self.assertEqual(f[0, 8, 1], 1)
self.assertEqual(f[0, 8, 0], 0)
# third most recent move at (0, 1)
self.assertEqual(f[0, 1, 2], 1)
# no more older moves
self.assertEqualNPArray(f[:, :, 3], np.zeros([9, 9]))
self.assertEqualNPArray(f[:, :, features.recent_move_feature.planes - 1], np.zeros([9, 9]))
def test_would_capture_feature(self):
f = features.would_capture_feature(TEST_POSITION2)
self.assertEqual(f.shape, (9, 9, features.would_capture_feature.planes))
# move at (1, 2) would capture 2 stones
self.assertEqual(f[1, 2, 1], 1)
# move at (0, 0) should not capture stones because it's B's move.
self.assertEqual(f[0, 0, 0], 0)
# move at (0, 7) would capture 3 stones
self.assertEqual(f[0, 7, 2], 1)
self.assertEqual(f[0, 7, 1], 0)
|
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 16 15:24:48 2015
@author: Hanna
"""
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
def warmUpExercise():
A = np.eye(5)
print(A)
def plotData(X,y):
plt.figure(1)
plt.plot(X, y, 'rx', markersize=5)
plt.xlim([4, 24])
plt.ylim([-5, 25])
plt.ylabel('Profit in $10,000s')
plt.xlabel('Population of City in 10,000s')
plt.savefig("data.pdf")
def plotLinReg(X,y,theta):
plt.figure(3)
plt.plot(X[:,1], y, 'rx', markersize=5) # do not plot added column of ones
plt.plot(X[:,1], hypothesis(theta,X), 'b-')
plt.xlim([4, 24])
plt.ylim([-5, 25])
plt.legend(['Training data', 'Linear regression'], loc='lower right', numpoints=1)
plt.ylabel('Profit in $10,000s')
plt.xlabel('Population of City in 10,000s')
plt.savefig("linRegFit.pdf")
def hypothesis(theta,X):
return np.dot(X,theta)
def computeCost(X,y,theta): # J(theta)
m = len(y)
h = hypothesis(theta,X)
return (np.dot((h-y).T,h-y))/(2.0*m)
def gradientDescent(X,y,theta,alpha,iterations):
m = len(y)
costHistory = np.zeros((iterations+1,1)) # for debugging
costHistory[0,0] = computeCost(X,y,theta) # for debugging
for iter in range(0,iterations):
theta = theta - np.dot(X.T,hypothesis(theta,X)-y)*alpha/m
costHistory[iter+1,0] = computeCost(X,y,theta) # for debugging
#plot J(theta) as a function pf # of iterations -- should monotonically decrease
ind = np.ones((iterations+1,1)) # for debugging
for i in range(0,iterations+1): # for debugging
ind[i,0] = i # for debugging
plt.figure(2) # for debugging
plt.plot(ind[:,0],costHistory[:,0],'ro') # for debugging
plt.ylabel('cost function') # for debugging
plt.xlabel('# of iterations') # for debugging
plt.savefig("cost_vs_iter.pdf") #for debugging
return theta
def visualCost(X,y,theta):
theta0_vals = np.linspace(-10, 10, 500)
theta1_vals = np.linspace(-1, 4, 500)
J_vals = np.zeros((len(theta0_vals),len(theta1_vals)))
for i, elem0 in enumerate(theta0_vals):
for j, elem1 in enumerate(theta1_vals):
theta_vals = np.array([[elem0], [elem1]])
J_vals[i, j] = computeCost(X, y, theta_vals)
theta0_vals, theta1_vals = np.meshgrid(theta0_vals,theta1_vals)
J_vals = J_vals.T
#surface plot
fig4 = plt.figure()
ax = fig4.gca(projection='3d')
ax.plot_surface(theta0_vals, theta1_vals, J_vals, cmap=cm.jet)
ax.view_init(azim = 180+40,elev = 25)
plt.xlabel("theta_0")
plt.ylabel("theta_1")
plt.savefig("cost3D.pdf")
#contour plot
plt.figure(5)
plt.contour(theta0_vals, theta1_vals, J_vals, levels=np.logspace(-2, 3, 20))
plt.scatter(theta[0,0], theta[1,0], c='r', marker='x', s=20, linewidths=1) # mark converged minimum
plt.xlim([-10, 10])
plt.ylim([-1, 4])
plt.xlabel("theta_0")
plt.ylabel("theta_1")
plt.savefig("costContour.pdf")
if __name__ == '__main__':
warmUpExercise()
#load the data
data = np.genfromtxt("ex1data1.txt", delimiter=',')
X, y = data[:,:1], data[:,1:]
m = len(y) # number of examples in training set
#see what the data looks like
plotData(X,y)
#linear regression
X = np.hstack((np.ones((m,1)),X))
theta = np.zeros((2, 1)) # initial guess for theta
iterations = 1500
alpha = 0.01
cost = computeCost(X,y,theta) # debug J(theta)
print("cost for initial theta =",cost,"should be 32.07 for this dataset") # get [[ 32.07273388]]
theta = gradientDescent(X,y,theta,alpha,iterations) # contains plot of J(theta) vs #iterations for debugging
cost = computeCost(X, y, theta) # minimized J(theta)
print("optimal theta =",theta) # get [[-3.63029144] [ 1.16636235]]
print("optimal cost =",cost) # get [[ 4.48338826]]
#plot the resulting model
plotLinReg(X,y,theta)
#use model to make predictions
predict1 = np.dot(np.array([[1,3.5]]),theta)
predict2 = np.dot(np.array([[1,7]]),theta)
print("prediction1 =",predict1) # get [[ 0.45197679]]
print("prediction2 =",predict2) # get [[ 4.53424501]]
#visualize cost function
visualCost(X,y,theta)
plt.show()
|
n = int(input())
matrix = []
for _ in range(n):
matrix.append([int(x) for x in input().split(', ')])
# fll = [num for sublist in matrix for num in sublist]
# print(fll)
flattened = []
for sublist in matrix:
for num in sublist:
flattened.append(num)
print(flattened)
|
""" CarList class. Responsible for sorting and keeping up with cars and results """
import json
import os
from pathlib import Path
from car import Car
CAR_PATH = f"{Path.home()}/.config/car_rater"
class CarList:
"""CarList class"""
def __init__(self, category):
self._category = category
self._cars = []
self._path = f"{CAR_PATH}/{category}.json"
if not os.path.isfile(self._path):
self.create_car_file()
else:
self.load()
super().__init__()
def __str__(self):
"""Return text box friendly list of cars"""
return_str = "\n"
return_str += f"{self._category}".center(58)
return_str += "\n\n"
return_str += 58 * "="
return_str += "\n"
return_str += "Owner | Year | Kind of Car | Score |\n"
return_str += f"{58 * '-'}\n"
for car in self._cars:
return_str += (
f"{car.owner:18} | {car.year:4} | {car.kind:20} | {car.score:5} |\n"
)
return return_str
@property
def category(self):
"""Make category read only"""
return self._category
@property
def cars(self):
"""Return the list of cars"""
return self._cars
def create_car_file(self):
"""Create a brand new car file"""
os.makedirs(CAR_PATH, exist_ok=True)
with open(self._path, "w", encoding="utf-8"):
pass
def add_car(self, car):
"""Add a car to the list"""
self._cars.append(car)
self.sort()
self.write()
print(f'Added car "{car}"')
def delete_car(self, car):
"""Delete a car from the list"""
self._cars.remove(car)
self.write()
print(f'Deleted car "{car}"')
def load(self):
"""Load the current data from disk"""
try:
with open(self._path, encoding="utf-8") as car_file:
self._cars = [Car().from_dict(car) for car in json.load(car_file)]
except json.decoder.JSONDecodeError:
print("Car file is corrupted! Creating a new one")
self.create_car_file()
def sort(self):
"""Sort the cars for ratings in order of highest to lowest"""
# Selection Sort
new_cars = []
for _ in range(len(self._cars)):
curr_highest = 0
for idx, car in enumerate(self._cars):
if car.score > self._cars[curr_highest].score:
curr_highest = idx
new_cars.append(self._cars[curr_highest])
del self._cars[curr_highest]
self._cars = new_cars
def write(self):
"""Write the current cars to disk"""
cars = [car.to_dict() for car in self._cars]
with open(self._path, "w", encoding="utf-8") as car_file:
car_file.write(json.dumps(cars))
|
from typing import List, Tuple, Dict
from math import ceil, inf
from heapq import heapify, heappop, heappushpop
def solve(q: List[int], u: int) -> Tuple[List[Dict[int, int]], List[int]]:
"""The general case."""
# preconditions: q is monotonically nonincreasing; all(qv > 0 for qv in q); u > 0
F = [] # type: List[Dict(int, int)]
p = [] # type: List[int]
base = 0
while base < len(q):
f, pf = oneForm(q[base : base + min(len(q), u)], u)
F.append(dict(zip((base + i for i in range(len(f))), f)))
p.append(pf)
base += len(f)
return F, p
def oneForm(q: List[int], u: int):
"""Given q and U, return (f, p)."""
# precondition: len(q) <= u and all(qv > 0 for qv in q)
def pVal(qv: int, fv: int) -> Tuple[List[int], int]:
return inf if fv == 0 else ceil(qv / fv)
while True:
idealP = sum(q) / u
p = int(ceil(idealP))
f = [float(qv // p) for qv in q]
if p == idealP and all(qv % p == 0 for qv in q):
break # perfect; we're done
uncommitted = [] # type: List[int]
pvHeap = list(zip((-pVal(qv, fv) for qv, fv in zip(q, f)), range(len(q))))
heapify(pvHeap)
p, v = heappop(pvHeap)
for _ in range(u - int(sum(f))):
uncommitted.append(v)
f[v] += 1
pNew, v = heappushpop(pvHeap, (-pVal(q[v], f[v]), v))
if pNew > p:
uncommitted = []
p = pNew
# Discard uncommitted leftovers.
for v in uncommitted:
f[v] -= 1
if p != -inf:
p = int(-p)
break # for now, we'll call it good if it's feasible
# Discard zeroes in f and corresponding q
# TODO: add code to test our belief that the zeroes, if present, are
# all at the end of f. Monotonicity of q *should* imply that, but...
q, f = zip(*[x for x in zip(q, f) if x[1] != 0])
return ([int(fv) for fv in f], p)
|
# Copyright (c) 2019 NVIDIA Corporation
"""Core PyTorch-base Neural Modules"""
__all__ = [
'SequenceEmbedding',
'ZerosLikeNM',
]
from typing import Dict, Iterable, Mapping, Optional, Set
import torch
import torch.nn as nn
from nemo.backends.pytorch.nm import NonTrainableNM, TrainableNM
from nemo.core.neural_types import *
class SequenceEmbedding(TrainableNM):
@property
def input_ports(self):
"""Returns definitions of module input ports.
"""
# return {"input_seq": NeuralType({0: AxisType(TimeTag), 1: AxisType(BatchTag)})}
return {"input_seq": NeuralType(('B', 'T'))}
@property
def output_ports(self):
"""Returns definitions of module output ports.
"""
# return {"outputs": NeuralType({0: AxisType(TimeTag), 1: AxisType(BatchTag), 2: AxisType(ChannelTag),})}
return {"outputs": NeuralType(('B', 'T', 'C'), ChannelType())}
def __init__(self, voc_size, hidden_size, dropout=0.0):
super().__init__()
self.voc_size = voc_size
self.hidden_size = hidden_size
self.dropout = dropout
self.embedding = nn.Embedding(self.voc_size, self.hidden_size)
if self.dropout != 0.0:
self.embedding_dropout = nn.Dropout(self.dropout)
self.to(self._device)
def forward(self, input_seq):
embedded = self.embedding(input_seq)
if self.dropout != 0.0:
embedded = self.embedding_dropout(embedded)
return embedded
class ZerosLikeNM(NonTrainableNM):
@property
def input_ports(self):
"""Returns definitions of module input ports.
"""
# return {"input_type_ids": NeuralType({0: AxisType(BatchTag), 1: AxisType(TimeTag),})}
return {"input_type_ids": NeuralType(('B', 'T'), VoidType())}
@property
def output_ports(self):
"""Returns definitions of module output ports.
"""
# return {"input_type_ids": NeuralType({0: AxisType(BatchTag), 1: AxisType(TimeTag),})}
return {"input_type_ids": NeuralType(('B', 'T'), ChannelType())}
def __init__(self):
super().__init__()
def forward(self, input_type_ids):
return torch.zeros_like(input_type_ids).long()
|
import pymem
import pymem.process
import pymem.exception
import psutil
from editor import Player, Stat
from tkinter import Tk, messagebox, ttk, Button, Label, Menu, Spinbox, filedialog, IntVar
from pathlib import Path
from .config import Config
class Gui(Tk):
filename = ""
appname='PES/WE/JL PS2 Face/Hair assigner'
def __init__(self):
super().__init__()
#self.master = master
self.title(self.appname)
w = 350 # width for the Tk root
h = 250 # height for the Tk root
# get screen width and height
ws = self.winfo_screenwidth() # width of the screen
hs = self.winfo_screenheight() # height of the screen
# calculate x and y coordinates for the Tk root window
x = (ws/2) - (w/2)
y = (hs/2) - (h/2)
# set the dimensions of the screen
# and where it is placed
self.geometry('%dx%d+%d+%d' % (w, h, x, y))
try:
self.create_config()
except FileNotFoundError as e:
messagebox.showerror(title=self.appname, message=f"No config files found code error {e}")
self.destroy()
self.my_menu=Menu(self)
self.config(menu=self.my_menu)
self.file_menu = Menu(self.my_menu, tearoff=0)
self.edit_menu = Menu(self.my_menu, tearoff=0)
self.help_menu = Menu(self.my_menu, tearoff=0)
self.my_menu.add_cascade(label="File", menu=self.file_menu)
self.file_menu.add_command(label="Open", command=self.search_exe)
self.file_menu.add_command(label="Search process", command=self.get_by_process_name)
self.file_menu.add_command(label="Exit", command=self.quit)
self.my_menu.add_cascade(label="Edit", menu=self.edit_menu)
self.edit_submenu = Menu(self.my_menu, tearoff=0)
# Dinamically loading game versions as sub menu
for i in range(len(self.my_config.games_config)):
self.edit_submenu.add_command(label=self.my_config.games_config[i],command= lambda i=i: self.change_config(self.my_config.filelist[i]))
self.edit_menu.add_cascade(label="Game Version", menu=self.edit_submenu)
self.my_menu.add_cascade(label="Help", menu=self.help_menu)
self.help_menu.add_command(label="Manual", command=self.show_help)
self.help_menu.add_command(label="About", command=self.show_thanks)
self.game_ver_lbl = Label(self, text=f"Game version: {self.my_config.game_name}")
self.game_ver_lbl.pack()
self.player_lbl = Label(self, text="Player Name: ")
self.player_lbl.pack()
self.face_type_lbl = Label(self, text="Face Type").pack()
self.face_type_dropdown = ttk.Combobox(self,values=self.my_config.gui["face_type"],state="readonly")
self.face_type_dropdown.current(0)
self.face_type_dropdown.bind('<<ComboboxSelected>>', lambda event: self.set_param())
self.face_type_dropdown.pack()
self.skin_lbl = Label(self,text="Skin Colour").pack()
self.skin_spb_var = IntVar()
self.skin_spb_var.set(1)
self.skin_spb = Spinbox(self, textvariable=self.skin_spb_var, from_=1, to=self.my_config.gui["skin_colour_max"],command = self.set_param)
self.skin_spb.bind('<Return>', lambda event: self.set_param())
self.skin_spb.pack()
self.face_id_lbl = Label(self,text="Face ID").pack()
self.face_spb_var = IntVar()
self.face_spb_var.set(1)
self.face_id_spb = Spinbox(self, textvariable=self.face_spb_var, from_=1, to=self.my_config.gui["face_id_max"],command = self.set_param)
self.face_id_spb.bind('<Return>', lambda event: self.set_param())
self.face_id_spb.pack()
self.sh2_var = IntVar()
self.sh2_var.set(0)
self.sh2_cb = ttk.Checkbutton(self,text = "Special Hairstyles 2", variable = self.sh2_var, command=self.set_param, state='disabled')
self.sh2_cb.pack()
try:
if self.my_config.gui["pes2014_sh_2"]:
self.sh2_cb.config(state='active')
except KeyError:
pass
self.hair_id_lbl = Label(self,text="Special Hairstyle ID").pack()
self.hair_spb_var = IntVar()
self.hair_spb_var.set(0)
self.hair_id_spb = Spinbox(self, textvariable=self.hair_spb_var, from_=0, to=self.my_config.gui["hair_id_max"],command = self.set_param)
self.hair_id_spb.bind('<Return>', lambda event: self.set_param())
self.hair_id_spb.pack()
self.read_values = Button(self,text="Read data", command=self.read_player).pack()
def create_config(self):
self.my_config = Config()
def change_config(self, file):
self.my_config = Config(file)
self.refresh_gui()
def refresh_gui(self):
self.game_ver_lbl.config(text=f"Game version: {self.my_config.game_name}")
self.face_type_dropdown.config(values=self.my_config.gui["face_type"])
self.skin_spb.config(to=self.my_config.gui["skin_colour_max"])
self.face_id_spb.config(to=self.my_config.gui["face_id_max"])
self.hair_id_spb.config(to=self.my_config.gui["hair_id_max"])
self.player_edit_mode = self.my_config.player_edit_mode
try:
if self.my_config.gui["pes2014_sh_2"]:
self.sh2_cb.config(state='active')
except KeyError:
self.sh2_cb.config(state='disabled')
def search_exe(self):
self.filename = filedialog.askopenfilename(initialdir=".",title=self.appname, filetypes=([("PCSX2 Executable", ".exe"),]))
if self.filename!="":
self.load_data()
self.read_player()
def get_by_process_name(self):
PROCNAME = "pcsx2.exe"
for proc in psutil.process_iter():
if proc.name() == PROCNAME:
self.filename = proc.name()
self.load_data()
self.read_player()
def show_help(self):
messagebox.showinfo(title=self.appname,message=
"""
Work in progress...
""".replace(' ', ''))
def show_thanks(self):
messagebox.showinfo(title=self.appname,message="Developed by PES Indie")
def check_version(self):
if Path(self.filename).name == "pcsx2.exe":
"""
If we lay here it is pcsx2 emulator
"""
return True
else:
"""
We shouldn't be here
"""
messagebox.showerror(title=self.appname,message="Emulator Version")
return 0
def load_data(self):
if self.check_version()==0:
return 0
self.pes_we_exe = Path(self.filename).name
self.player_bytes_size = 124
try:
self.pm = pymem.Pymem(self.pes_we_exe)
self.client = pymem.process.module_from_name(self.pm.process_handle, self.pes_we_exe).lpBaseOfDll
self.player_edit_mode = self.my_config.player_edit_mode - self.client
except pymem.exception.ProcessNotFound as e:
messagebox.showerror(title=self.appname, message=f"pymem error code {e}")
return 0
def read_player(self):
if self.filename=="":
messagebox.showerror(title=self.appname, message="You must select your exe file first or run your game\nbefore trying to read or set any data")
return 0
try:
self.player = Player(bytearray(self.pm.read_bytes(self.client + self.player_edit_mode, self.player_bytes_size)), self.my_config)
except pymem.exception.MemoryReadError as e:
messagebox.showerror(title=self.appname, message=f"pymem error code {e}")
return 0
except pymem.exception.ProcessError as e:
messagebox.showerror(title=self.appname, message=f"pymem error code {e}")
return 0
self.player_lbl.config(text=f"Player Name: {self.player.name}")
self.face_type_dropdown.current(self.player.face_type.get_value())
self.skin_spb_var.set(self.player.skin_colour.get_value() + 1)
self.hair_spb_var.set(self.player.hair_id.get_value())
try:
if self.my_config.gui["pes2014_sh_2"]:
self.sh2_var.set(self.player.sh_2.get_value())
self.face_spb_var.set(self.player.face_id.get_value())
except KeyError:
self.face_spb_var.set(self.player.face_id.get_value() + 1)
#self.test()
def test(self):
#218911CD
pm = pymem.Pymem(self.pes_we_exe)
client = pymem.process.module_from_name(self.pm.process_handle, self.pes_we_exe).lpBaseOfDll
test_pl = [1, 2, 3, 4, 5, 6]
pl_list = []
for i in test_pl:
pl_list.append(bytearray(self.pm.read_bytes(0x21891168 + (self.player_bytes_size * i), self.player_bytes_size)))
print(pl_list)
#data = self.player.data
#validate=[*range(0, 8, 1)]#+[*range(0, 6, 1)]
#validate = [0,1,2,3,4,5,6]
#validate = [6,5,4,3,2,1,0]
#validate = [122,933,309,97,145,2]
validate = [121,932,308,141,144,1]
#print(validate)
test=[]
print(Stat(self.player.data,101, 5, 511, "face").get_value())
print(Stat(self.player.data,102, 0, 511, "face").get_value())
"""
for shift in range(0,65536):
#print (f"the mask is {mask}")
for mask in range(0,65536):
#if mask==2047:
# print("llegamos al punto conocido")
#mask=4095
offset = 101
stat_name = ""
test.append(Stat(pl_list[0],offset, shift, mask, stat_name).get_value())
test.append(Stat(pl_list[1],offset, shift, mask, stat_name).get_value())
test.append(Stat(pl_list[2],offset, shift, mask, stat_name).get_value())
test.append(Stat(pl_list[3],offset, shift, mask, stat_name).get_value())
test.append(Stat(pl_list[4],offset, shift, mask, stat_name).get_value())
test.append(Stat(pl_list[5],offset, shift, mask, stat_name).get_value())
#test.append((get_value(of,690,offset, shift, mask, stat_name) ))
#test.append((get_value(of,4473,offset, shift, mask, stat_name) ))
#test.append((get_value(of,1485,offset, shift, mask, stat_name) ))
#print (test)
#test.append((get_value(of,4521,offset, shift, mask, stat_name) ))
#test.append((get_value(of,1229,offset, shift, mask, stat_name) ))
#test.append((get_value(of,690,offset, shift, mask, stat_name) ))
#test.append((get_value(of,4029,offset, shift, mask, stat_name) ))
if test == validate:
print(shift, mask)
test=[]
"""
print("END")
def set_param(self):
if self.filename=="":
messagebox.showerror(title=self.appname, message="You must select your exe file first or run your game\nbefore trying to read or set any data")
return 0
if self.check_val(self.face_type_dropdown.current(),0,len(self.my_config.gui["face_type"])-1):
self.player.face_type.set_value(self.face_type_dropdown.current())
else:
messagebox.showerror(title=self.appname, message=f"Value for {self.player.face_type.name} out of range, check Help-> Manual")
if self.check_val(self.skin_spb_var.get()-1, 0, self.my_config.gui["skin_colour_max"]):
self.player.skin_colour.set_value(self.skin_spb_var.get()-1)
else:
messagebox.showerror(title=self.appname, message=f"Value for {self.player.skin_colour.name} out of range, check Help-> Manual")
if self.check_val(self.hair_spb_var.get(), 0, self.my_config.gui["hair_id_max"]):
self.player.hair_id.set_value(self.hair_spb_var.get())
else:
messagebox.showerror(title=self.appname, message=f"Value for {self.player.hair_id.name} out of range, check Help-> Manual")
try:
if self.my_config.gui["pes2014_sh_2"]:
self.player.sh_2.set_value(self.sh2_var.get())
if self.check_val(self.face_spb_var.get(), 1, self.my_config.gui["face_id_max"]):
self.player.face_id.set_value(self.face_spb_var.get())
else:
messagebox.showerror(title=self.appname, message=f"Value for {self.player.face_id.name} out of range, check Help-> Manual")
except KeyError:
if self.check_val(self.face_spb_var.get()-1, 0, self.my_config.gui["face_id_max"]):
self.player.face_id.set_value(self.face_spb_var.get()-1)
else:
messagebox.showerror(title=self.appname, message=f"Value for {self.player.face_id.name} out of range, check Help-> Manual")
# Here we set the values to memory
try:
self.pm.write_bytes(self.client + self.player_edit_mode,bytes(self.player.data),self.player_bytes_size)
except pymem.exception.MemoryWriteError as e:
messagebox.showerror(title=self.appname, message=f"pymem error code {e}")
except pymem.exception.ProcessError as e:
messagebox.showerror(title=self.appname, message=f"pymem error code {e}")
except pymem.exception.TypeError as e:
messagebox.showerror(title=self.appname, message=f"pymem error code {e}")
def check_val(self, val, min, max):
return min<=val<=max
def start(self):
self.resizable(False, False)
self.mainloop()
|
"""
Baseline training file used in app production
----------------OBSOLETE----------------
import os
import sqlite3
from sklearn.linear_model import LogisticRegression
from basilica import Connection
import psycopg2
# Load in basilica api key
API_KEY = "d3c5e936-18b0-3aac-8a2c-bf95511eaaa5"
# Filepath for database
DATABASE_URL = "postgres://khqrpuidioocyy:28306f6ac214b8c5ff675ab1e38ed6007d0b810d0a538df3e0db3da0f3cde717@ec2-18-210-214-86.compute-1.amazonaws.com:5432/d1jb037n8m5r20"
# Heroku postgresql credentials
DB_NAME = "d1jb037n8m5r20"
DB_USER = "khqrpuidioocyy"
DB_PASSWORD = "28306f6ac214b8c5ff675ab1e38ed6007d0b810d0a538df3e0db3da0f3cde717"
DB_HOST = "ec2-18-210-214-86.compute-1.amazonaws.com"
# Connect to basilica for embedding text
basilica_connection = Connection(API_KEY)
sql_connection = psycopg2.connect(dbname=DB_NAME, user=DB_USER, password=DB_PASSWORD, host=DB_HOST)
sql_cursor = sql_connection.cursor()
def train_model():
# SQL commands to select all and delete null
select_data =
SELECT
*
FROM
"postgresql-shallow-75985";
# breakpoint()
# print(data)
# Execute select all commands
sql_cursor.execute(select_data)
data = sql_cursor.fetchall()
# Ensure the select command is working
# for row in data:
# print(f"\nSubreddits: {row['subreddit']}")
# print(f"\nTest: {row['Text']}")
print("TRAINING THE MODEL...")
subreddits = []
text_embeddings = []
# breakpoint()
for row in data:
subreddits.append(row[1])
embedding = basilica_connection.embed_sentence(row[2], model="reddit")
text_embeddings.append(embedding)
# breakpoint()
# print(subreddits, text_embeddings)
classifier = LogisticRegression()
classifier.fit(text_embeddings, subreddits)
return classifier
if __name__ == "__main__":
classifier = train_model()
# breakpoint()
""" |
import phonenumbers
from authy.api import AuthyApiClient
from django import forms
from django.conf import settings
from phonenumbers.phonenumberutil import NumberParseException
from .models import TwoFAUser
authy_api = AuthyApiClient(settings.ACCOUNT_SECURITY_API_KEY)
# Create Sign Up form and SMS Token Verification Form
class BootstrapInput(forms.TextInput):
def __init__(self, placeholder, size=12, *args, **kwargs):
self.size = size
super(BootstrapInput, self).__init__(attrs={
'class': 'form-control input-sm',
'placeholder': placeholder
})
def bootwrap_input(self, input_tag):
classes = 'col-xs-{n} col-sm-{n} col-md-{n}'.format(n=self.size)
return '''<div class="{classes}">
<div class="form-group">{input_tag}</div>
</div>
'''.format(classes=classes, input_tag=input_tag)
def render(self, *args, **kwargs):
input_tag = super(BootstrapInput, self).render(*args, **kwargs)
return self.bootwrap_input(input_tag)
class BootstrapPasswordInput(BootstrapInput):
input_type = 'password'
template_name = 'django/forms/widgets/password.html'
class RegistrationForm(forms.ModelForm):
class Meta:
model = TwoFAUser
fields = ('username', 'email', 'password')
widgets = {
'username': BootstrapInput('User Name'),
'email': BootstrapInput('Email Address'),
'password': BootstrapPasswordInput('Password', size=6),
}
country_code = forms.CharField(
widget=BootstrapInput('Country Code', size=6))
phone_number = forms.CharField(
widget=BootstrapInput('Phone Number', size=6))
confirm_password = forms.CharField(
widget=BootstrapPasswordInput('Confirm Password', size=6))
def clean_username(self):
username = self.cleaned_data['username']
if TwoFAUser.objects.filter(username=username).exists():
self.add_error('username', 'Username is already taken')
return username
def clean_email(self):
email = self.cleaned_data['email']
if TwoFAUser.objects.filter(email=email).exists():
self.add_error('email', 'Email is already registered')
return email
def clean_country_code(self):
country_code = self.cleaned_data['country_code']
if not country_code.startswith('+'):
country_code = '+' + country_code
return country_code
def clean(self):
data = self.cleaned_data
if data['password'] != data['confirm_password']:
self.add_error(
'password',
'Password and confirmation did not match'
)
phone_number = data['country_code'] + data['phone_number']
try:
phone_number = phonenumbers.parse(phone_number, None)
if not phonenumbers.is_valid_number(phone_number):
self.add_error('phone_number', 'Invalid phone number')
except NumberParseException as e:
self.add_error('phone_number', e)
class TokenVerificationForm(forms.Form):
token1 = forms.CharField(widget=forms.TextInput(attrs={'size': '1','min': '0', 'max': '9','pattern' : '[0-9]{1}'}))
token2 = forms.CharField(widget=forms.TextInput(attrs={'size': '1','min': '0', 'max': '9','pattern' : '[0-9]{1}'}))
token3 = forms.CharField(widget=forms.TextInput(attrs={'size': '1','min': '0', 'max': '9','pattern' : '[0-9]{1}'}))
token4 = forms.CharField(widget=forms.TextInput(attrs={'size': '1','min': '0', 'max': '9','pattern' : '[0-9]{1}'}))
token5 = forms.CharField(widget=forms.TextInput(attrs={'size': '1','min': '0', 'max': '9','pattern' : '[0-9]{1}'}))
token6 = forms.CharField(widget=forms.TextInput(attrs={'size': '1','min': '0', 'max': '9','pattern' : '[0-9]{1}'}))
token7 = forms.CharField(widget=forms.TextInput(attrs={'size': '1','min': '0', 'max': '9','pattern' : '[0-9]{1}'}))
def is_valid(self, authy_id):
self.authy_id = authy_id
return super(TokenVerificationForm, self).is_valid()
def clean(self):
token1 = self.cleaned_data['token1']
token2 = self.cleaned_data['token2']
token3 = self.cleaned_data['token3']
token4 = self.cleaned_data['token4']
token5 = self.cleaned_data['token5']
token6 = self.cleaned_data['token6']
token7 = self.cleaned_data['token7']
token = token1+token2+token3+token4+token5+token6+token7
verification = authy_api.tokens.verify(self.authy_id, token)
if not verification.ok():
self.add_error('token', 'Invalid token')
|
# -*- coding: utf-8 -*-
# for localized messages
from __init__ import _
import os
from enigma import eTimer
from Components.Console import Console
from Components.Harddisk import harddiskmanager #global harddiskmanager
from xml.etree.cElementTree import parse as cet_parse
XML_FSTAB = "/etc/enigma2/automounts.xml"
def rm_rf(d): # only for removing the ipkg stuff from /media/hdd subdirs
try:
for path in (os.path.join(d,f) for f in os.listdir(d)):
if os.path.isdir(path):
rm_rf(path)
else:
os.unlink(path)
os.rmdir(d)
except Exception, ex:
print "AutoMount failed to remove", d, "Error:", ex
class AutoMount():
"""Manages Mounts declared in a XML-Document."""
def __init__(self):
self.automounts = {}
self.restartConsole = Console()
self.MountConsole = Console()
self.removeConsole = Console()
self.activeMountsCounter = 0
# Initialize Timer
self.callback = None
self.timer = eTimer()
self.timer.callback.append(self.mountTimeout)
self.getAutoMountPoints()
def getAutoMountPoints(self, callback = None):
# Initialize mounts to empty list
automounts = []
self.automounts = {}
self.activeMountsCounter = 0
if not os.path.exists(XML_FSTAB):
return
tree = cet_parse(XML_FSTAB).getroot()
def getValue(definitions, default):
# Initialize Output
ret = ""
# How many definitions are present
Len = len(definitions)
return Len > 0 and definitions[Len-1].text or default
# Config is stored in "mountmanager" element
# Read out NFS Mounts
for nfs in tree.findall("nfs"):
for mount in nfs.findall("mount"):
data = { 'isMounted': False, 'active': False, 'ip': False, 'sharename': False, 'sharedir': False, 'username': False, \
'password': False, 'mounttype' : False, 'options' : False, 'hdd_replacement' : False }
try:
data['mounttype'] = 'nfs'.encode("UTF-8")
data['active'] = getValue(mount.findall("active"), False).encode("UTF-8")
if data["active"] == 'True' or data["active"] == True:
self.activeMountsCounter +=1
data['hdd_replacement'] = getValue(mount.findall("hdd_replacement"), "False").encode("UTF-8")
data['ip'] = getValue(mount.findall("ip"), "192.168.0.0").encode("UTF-8")
data['sharedir'] = getValue(mount.findall("sharedir"), "/exports/").encode("UTF-8")
data['sharename'] = getValue(mount.findall("sharename"), "MEDIA").encode("UTF-8")
data['options'] = getValue(mount.findall("options"), "rw,nolock,tcp").encode("UTF-8")
self.automounts[data['sharename']] = data
except Exception, e:
print "[MountManager] Error reading Mounts:", e
# Read out CIFS Mounts
for nfs in tree.findall("cifs"):
for mount in nfs.findall("mount"):
data = { 'isMounted': False, 'active': False, 'ip': False, 'sharename': False, 'sharedir': False, 'username': False, \
'password': False, 'mounttype' : False, 'options' : False, 'hdd_replacement' : False }
try:
data['mounttype'] = 'cifs'.encode("UTF-8")
data['active'] = getValue(mount.findall("active"), False).encode("UTF-8")
if data["active"] == 'True' or data["active"] == True:
self.activeMountsCounter +=1
data['hdd_replacement'] = getValue(mount.findall("hdd_replacement"), "False").encode("UTF-8")
data['ip'] = getValue(mount.findall("ip"), "192.168.0.0").encode("UTF-8")
data['sharedir'] = getValue(mount.findall("sharedir"), "/exports/").encode("UTF-8")
data['sharename'] = getValue(mount.findall("sharename"), "MEDIA").encode("UTF-8")
data['options'] = getValue(mount.findall("options"), "rw,nolock").encode("UTF-8")
data['username'] = getValue(mount.findall("username"), "guest").encode("UTF-8")
data['password'] = getValue(mount.findall("password"), "").encode("UTF-8")
self.automounts[data['sharename']] = data
except Exception, e:
print "[MountManager] Error reading Mounts:", e
self.checkList = self.automounts.keys()
if not self.checkList:
print "[AutoMount.py] self.automounts without mounts",self.automounts
if callback is not None:
callback(True)
else:
self.CheckMountPoint(self.checkList.pop(), callback)
def sanitizeOptions(self, origOptions, cifs=False):
options = origOptions.strip()
if not options:
options = 'rsize=8192,wsize=8192'
if not cifs:
options += ',tcp'
else:
if 'rsize' not in options:
options += ',rsize=8192'
if 'wsize' not in options:
options += ',wsize=8192'
if not cifs and 'tcp' not in options and 'udp' not in options:
options += ',tcp'
return options
def CheckMountPoint(self, item, callback):
data = self.automounts[item]
if not self.MountConsole:
self.MountConsole = Console()
command = None
path = os.path.join('/media/net', data['sharename'])
if self.activeMountsCounter == 0:
print "self.automounts without active mounts",self.automounts
if data['active'] == 'False' or data['active'] is False:
umountcmd = "umount -fl '%s'" % path
print "[AutoMount.py] UMOUNT-CMD--->",umountcmd
self.MountConsole.ePopen(umountcmd, self.CheckMountPointFinished, [data, callback])
else:
if data['active'] == 'False' or data['active'] is False:
command = "umount -fl '%s'" % path
elif data['active'] == 'True' or data['active'] is True:
try:
if not os.path.exists(path):
os.makedirs(path)
if data['mounttype'] == 'nfs':
if not os.path.ismount(path):
if data['options']:
options = "tcp,noatime," + data['options']
else:
options = "tcp,noatime"
tmpcmd = "mount -t nfs -o %s '%s' '%s'" % (options, data['ip'] + ':/' + data['sharedir'], path)
command = tmpcmd.encode("UTF-8")
elif data['mounttype'] == 'cifs':
if not os.path.ismount(path):
tmpusername = data['username'].replace(" ", "\\ ")
options = data['options'] + ',noatime,noserverino,iocharset=utf8,username='+ tmpusername + ',password='+ data['password']
tmpcmd = "mount -t cifs -o %s '//%s/%s' '%s'" % (options, data['ip'], data['sharedir'], path)
command = tmpcmd.encode("UTF-8")
except Exception, ex:
print "[AutoMount.py] Failed to create", path, "Error:", ex
command = None
if command:
print "[AutoMount.py] U/MOUNTCMD--->",command
self.MountConsole.ePopen(command, self.CheckMountPointFinished, [data, callback])
else:
self.CheckMountPointFinished(None,None, [data, callback])
def CheckMountPointFinished(self, result, retval, extra_args):
print "[AutoMount.py] CheckMountPointFinished",result,retval
(data, callback ) = extra_args
path = os.path.join('/media/net', data['sharename'])
if os.path.exists(path):
if os.path.ismount(path):
if self.automounts.has_key(data['sharename']):
self.automounts[data['sharename']]['isMounted'] = True
desc = data['sharename']
if self.automounts[data['sharename']]['hdd_replacement'] == 'True': #hdd replacement hack
self.makeHDDlink(path)
harddiskmanager.addMountedPartition(path, desc)
else:
if self.automounts.has_key(data['sharename']):
self.automounts[data['sharename']]['isMounted'] = False
if os.path.exists(path):
if not os.path.ismount(path):
try:
os.rmdir(path)
harddiskmanager.removeMountedPartition(path)
except Exception, ex:
print "Failed to remove", path, "Error:", ex
if self.checkList:
# Go to next item in list...
self.CheckMountPoint(self.checkList.pop(), callback)
if self.MountConsole:
if len(self.MountConsole.appContainers) == 0:
if callback is not None:
self.callback = callback
self.timer.startLongTimer(1)
def makeHDDlink(self, path):
hdd_dir = '/media/hdd'
print "[AutoMount.py] symlink %s %s" % (path, hdd_dir)
if os.path.islink(hdd_dir):
if os.readlink(hdd_dir) != path:
os.remove(hdd_dir)
os.symlink(path, hdd_dir)
elif os.path.ismount(hdd_dir) is False:
if os.path.isdir(hdd_dir):
rm_rf(hdd_dir)
try:
os.symlink(path, hdd_dir)
except OSError, ex:
print "[AutoMount.py] add symlink fails!", ex
movie = os.path.join(hdd_dir, 'movie')
if not os.path.exists(movie):
try:
os.mkdir(movie)
except Exception, ex:
print "[AutoMount.py] Failed to create ", movie, "Error:", ex
def mountTimeout(self):
self.timer.stop()
if self.MountConsole:
if len(self.MountConsole.appContainers) == 0:
print "self.automounts after mounting",self.automounts
if self.callback is not None:
self.callback(True)
def getMountsList(self):
return self.automounts
def getMountsAttribute(self, mountpoint, attribute):
if self.automounts.has_key(mountpoint):
if self.automounts[mountpoint].has_key(attribute):
return self.automounts[mountpoint][attribute]
return None
def setMountsAttribute(self, mountpoint, attribute, value):
if self.automounts.has_key(mountpoint):
self.automounts[mountpoint][attribute] = value
def writeMountsConfig(self):
# Generate List in RAM
list = ['<?xml version="1.0" ?>\n<mountmanager>\n']
for sharename, sharedata in self.automounts.items():
mtype = sharedata['mounttype']
list.append('<' + mtype + '>\n')
list.append(' <mount>\n')
list.append(" <active>" + str(sharedata['active']) + "</active>\n")
list.append(" <hdd_replacement>" + str(sharedata['hdd_replacement']) + "</hdd_replacement>\n")
list.append(" <ip>" + sharedata['ip'] + "</ip>\n")
list.append(" <sharename>" + sharedata['sharename'] + "</sharename>\n")
list.append(" <sharedir>" + sharedata['sharedir'] + "</sharedir>\n")
list.append(" <options>" + sharedata['options'] + "</options>\n")
if sharedata['mounttype'] == 'cifs':
list.append(" <username>" + sharedata['username'] + "</username>\n")
list.append(" <password>" + sharedata['password'] + "</password>\n")
list.append(' </mount>\n')
list.append('</' + mtype + '>\n')
# Close Mountmanager Tag
list.append('</mountmanager>\n')
# Try Saving to Flash
try:
open(XML_FSTAB, "w").writelines(list)
except Exception, e:
print "[AutoMount.py] Error Saving Mounts List:", e
def stopMountConsole(self):
if self.MountConsole is not None:
self.MountConsole = None
def removeMount(self, mountpoint, callback = None):
print "[AutoMount.py] removing mount: ",mountpoint
self.newautomounts = {}
for sharename, sharedata in self.automounts.items():
if sharename is not mountpoint.strip():
self.newautomounts[sharename] = sharedata
self.automounts.clear()
self.automounts = self.newautomounts
if not self.removeConsole:
self.removeConsole = Console()
path = '/media/net/'+ mountpoint
umountcmd = "umount -fl '%s'" % path
print "[AutoMount.py] UMOUNT-CMD--->",umountcmd
self.removeConsole.ePopen(umountcmd, self.removeMountPointFinished, [path, callback])
def removeMountPointFinished(self, result, retval, extra_args):
print "[AutoMount.py] removeMountPointFinished result", result, "retval", retval
(path, callback ) = extra_args
if os.path.exists(path):
if not os.path.ismount(path):
try:
os.rmdir(path)
harddiskmanager.removeMountedPartition(path)
except Exception, ex:
print "Failed to remove", path, "Error:", ex
if self.removeConsole:
if len(self.removeConsole.appContainers) == 0:
if callback is not None:
self.callback = callback
self.timer.startLongTimer(1)
iAutoMount = AutoMount()
|
def to_operator(current, new_value):
is_operator = isinstance(new_value, UpdateOperator)
is_set = isinstance(new_value, Set)
if is_operator:
if not is_set and current is None:
raise Exception('Cant use operator on None')
else:
new_value = Set(new_value)
return new_value
class UpdateOperator(object):
"""UpdateOperators can be used to automatically generate
update queries that are understood by mongo. Each of the operators
can be used as defined in the mongo manual as they're just
a direct mapping.
"""
def __init__(self, update):
super().__init__()
self.update = update
def set_original_value(self, value):
self.original_value = value
class Set(UpdateOperator):
"""Is used to set the specified field to any given value.
Not using it is the default case and
functionally the same as just leaving out an UpdateOperator completely.
Example usage:
>>> doc.num = 5
>>> doc.num = Set(5)
Query:
>>> Set(5)()
{'$set': 5}
"""
def __call__(self):
return '$set', self.update
def apply(self):
return self.update
class Unset(UpdateOperator):
"""Is used to remove an entry from a list or dict.
Example usage:
>>> del doc.map['test']
>>> doc.map = Unset('test')
Query:
>>> Unset('test')()
{'$unset': 'test'}
"""
def __call__(self):
return '$unset', self.update
def apply(self):
return self.update
class Inc(UpdateOperator):
"""Is used to modify a numeric value by a given amount.
Example usage:
>>> doc.num = Inc(5)
>>> doc.num = Inc(-5)
Query:
>>> Inc(5)()
{'$inc': 5}
"""
def __call__(self):
return '$inc', self.update
def apply(self):
return self.original_value + self.update
class Dec(UpdateOperator):
"""Is used to decrease a numeric value.
Example usage:
>>> doc.num = Dec(5)
Query:
>>> Dec(5)()
{'$inc': -5}
"""
def __call__(self):
return '$inc', -self.update
def apply(self):
return self.original_value - self.update
class Max(UpdateOperator):
"""Update the field to the maximum of database and current value.
Example usage:
>>> doc.num = Max(5)
Query:
>>> Max(5)()
{'$max': 5}
"""
def __call__(self):
return '$max', self.update
def apply(self):
return max(self.original_value, self.update)
class Min(UpdateOperator):
"""Update the field to the minimum of database and current value.
Example usage:
>>> doc.num = Min(5)
Query:
>>> Min(5)()
{'$min': 5}
"""
def __call__(self):
return '$min', self.update
def apply(self):
return min(self.original_value, self.update)
class Mul(UpdateOperator):
"""Is used to multipy a numeric value by a given amount.
Example usage:
>>> doc.num = Mul(5)
Query:
>>> Mul(5)()
{'$mul': 5}
"""
def __call__(self):
return '$mul', self.update
def apply(self):
return self.original_value * self.update
class Push(UpdateOperator):
"""Is used to append a value to a list.
Example usage:
>>> doc.num_list = Push(5)
Query:
>>> Push(5)()
{'$push': 5}
"""
def __call__(self):
return '$push', self.update
def apply(self):
return self.original_value.append(self.update)
class Pull(UpdateOperator):
"""Is used to pull all entries that match the given value.
Example usage:
>>> doc.num_list = Pull(5)
Query:
>>> Pull(5)()
{'$pull': 5}
"""
def __call__(self):
return '$pull', self.update
def apply(self):
return [
val for val in self.original_value
if val != self.update
]
class PullAll(UpdateOperator):
"""Is used to pull all entries that match a value from a list.
Example usage:
>>> doc.num_list = PullAll([5, 6, 7])
Query:
>>> PullAll([5, 6, 7])()
{'$pullAll': [5, 6, 7]}
"""
def __call__(self):
return '$pullAll', self.update
def apply(self):
return [
val for val in self.original_value
if val not in self.update
]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Struct Manager
"""
import re
from Struct import *
class StructManager(object):
"""
结构体的管理类,用于从字符串中解析结构体,判断结构体合法性,
存储原始字符串用于优化顺序后写回操作
"""
PATTERN_STRUCT_HEADER = re.compile(r'([\w ]*?struct.*?)\{')
PATTERN_STRUCT_BODY = re.compile(r'\{(.+)\}')
PATTERN_STRUCT_TAIL = re.compile(r'.*\}(.*);')
def parse_from_string(self, struct_str):
"""
Parse struct from string.
"""
struct_str = re.sub('[\r\n\t]', '', struct_str.strip())
if not StructManager.valid(struct_str) or not self._is_struct(struct_str):
return None
struct_instance = Struct(self._STRUCT_HEADER, self._STRUCT_BODY, self._STRUCT_TAIL)
return struct_instance
def _is_struct(self, struct_str):
headers = self.PATTERN_STRUCT_HEADER.findall(struct_str)
body = self.PATTERN_STRUCT_BODY.findall(struct_str)
tail = self.PATTERN_STRUCT_TAIL.findall(struct_str)
if not headers or not body:
return False
self._STRUCT_HEADER = headers[0].strip()
if struct_str.find(self._STRUCT_HEADER) != 0 or struct_str[-1] != ';':
return False
self._STRUCT_BODY = body[0].strip()
self._STRUCT_TAIL = ""
if tail:
self._STRUCT_TAIL = tail[0].strip()
return True
def valid(struct_str):
brackets_map = {'{':'}', '[':']', '(':')'}
brackets_stack = []
for char in struct_str:
if char in brackets_map.keys():
brackets_stack.append(char)
elif char in brackets_map.values():
if not brackets_stack or char != brackets_map[brackets_stack[-1]]:
return False
else:
brackets_stack.pop()
if brackets_stack:
return False
else:
return True
SM = StructManager
|
"""This script train the {{ modelName }} model"""
__author__ = "{{ authorName }}"
def train(data):
"""Train the {{ modelName }} model."""
pass
|
# %%
from pymongo import MongoClient
import json
import dota2api
import os
api_key = os.environ.get('DOTA2_API_KEY')
# %%
api = dota2api.Initialise(api_key)
# %%
ip = "10.0.1.4"
port = 27017
username = os.environ.get('MONGO_USER')
pwd = os.environ.get('MONGO_PWD')
client = MongoClient(
ip,
port,
username=username,
password=pwd,
authMechanism='SCRAM-SHA-1'
)
# %%
db = client.dota
# %%
def process_heroes():
heroes_table = db.heroes
match_hero_table = db.match_hero
matches_table = db.matches
hero_matchups_table = db.hero_matchups
global hero_win_rates
hero_win_rates = []
heroes = heroes_table.find()
heroes = list(heroes)
for h in heroes:
hero_win_rates.append({'_id': h['_id'], 'name': h['name'], 'matches_won': 0.0, 'matches_played': 0.0})
def match_hero_query(hero_id):
return {'hero_id': hero_id}
for h in hero_win_rates:
h['hero_matchup'] = []
for he in heroes:
h['hero_matchup'].append({'hero_id': he['_id'], 'name': he['name'], 'matches_won_against': 0.0, 'matches_played_against': 0.0})
mh_query = match_hero_query(h['_id'])
mh_result = match_hero_table.find(mh_query)
if mh_result:
for mh in mh_result:
def matches_query(match_id):
return {'_id': match_id}
m_query = matches_query(mh['match_id'])
m_result = matches_table.find_one(m_query)
if ((mh['player_slot'] < 128 and m_result['radiant_win']) or (mh['player_slot'] >= 128 and not m_result['radiant_win'])):
h['matches_won'] = h['matches_won'] + 1
h['matches_played'] = h['matches_played'] + 1
def match_hero_query2(match_id, hero_id):
return {'match_id': match_id, 'hero_id': {'$ne': hero_id}}
mh_query2 = match_hero_query2(mh['match_id'], mh['hero_id'])
mh_result2 = match_hero_table.find(mh_query2)
for mh2 in mh_result2:
pl = next((pl for pl in h['hero_matchup'] if pl['hero_id']==mh2['hero_id']), None)
if(pl):
if(mh['player_slot']<128 and mh2['player_slot']>=128 or mh['player_slot']>=128 and mh2['player_slot']<128):
pl['matches_played_against'] += 1
if(mh['player_slot']<128 and m_result['radiant_win'] or mh['player_slot']>=128 and not m_result['radiant_win']):
pl['matches_won_against'] += 1
# hero_matchups_table
# for h in hero_win_rates:
# h['win_rate'] = None if (h['matches_played']==0) else h['matches_won']/h['matches_played']
# key = {'_id': h['_id']}
# data = {"$set": {
# 'win_rate': h['win_rate'],
# 'matches_played': h['matches_played']
# }}
# heroes_table.update_one(key, data)
# for pl in h['hero_matchup']:
# pl['win_rate'] = None if (pl['matches_played_against']==0) else pl['matches_won_against']/pl['matches_played_against']
# key = {
# '_id': str(h['_id']) + '_' + str(pl['hero_id']),
# 'hero_id': h['_id'],
# 'opponent_id': pl['hero_id']
# }
# data = {"$set": {
# 'win_rate': pl['win_rate'],
# 'matches_won_against': pl['matches_won_against'],
# 'matches_played_against': pl['matches_played_against']
# }}
# hero_matchups_table.update_one(key, data, upsert=True)
# print("id: {}, name: {}, wins: {}, total: {}, win rate: {}".format(h['_id'], h['name'], h['matches_won'], h['matches_played'], "-" if (h['win_rate']==None) else '%.2f' % (100*h['win_rate'])+"%"))
# in heroes table instead
for h in hero_win_rates:
h['win_rate'] = None if (h['matches_played']==0) else h['matches_won']/h['matches_played']
key = {'_id': h['_id']}
data = {"$set": {
'win_rate': h['win_rate'],
'matches_played': h['matches_played']
}}
heroes_table.update_one(key, data)
for pl in h['hero_matchup']:
pl['win_rate'] = None if (pl['matches_played_against']==0) else pl['matches_won_against']/pl['matches_played_against']
# try update
key = {'_id': h['_id'], "matchups.opponent_id": pl['hero_id']}
data = {"$set": {
'matchups.$.win_rate': pl['win_rate'],
'matchups.$.matches_won_against': pl['matches_won_against'],
'matchups.$.matches_played_against': pl['matches_played_against']
}}
result = heroes_table.update_one(key, data)
# else push
if result.modified_count < 1:
key = {'_id': h['_id']}
data = {"$push": {'matchups': {
"opponent_id": pl['hero_id'],
'win_rate': pl['win_rate'],
'matches_won_against': pl['matches_won_against'],
'matches_played_against': pl['matches_played_against']
}}}
heroes_table.update_one(key, data)
print("id: {}, name: {}, wins: {}, total: {}, win rate: {}".format(h['_id'], h['name'], h['matches_won'], h['matches_played'], "-" if (h['win_rate']==None) else '%.2f' % (100*h['win_rate'])+"%"))
# %%
# def generate_hero_matchups():
# query = ("DELETE FROM hero_matchups")
# cursor.execute(query)
# query = ("SELECT id FROM heroes")
# cursor.execute(query)
# create_mwr_stmt = ("""INSERT INTO hero_matchups (hero_id, opponent_id) VALUES
# (%s, %s)""")
# hid_list = cursor.fetchall()
# for hid1 in hid_list:
# hid1 = hid1[0] # hero_id
# for hid2 in hid_list:
# hid2 = hid2[0] # hero_id
# if hid1 != hid2:
# data = (hid1, hid2)
# cursor.execute(create_mwr_stmt, data)
# %%
process_heroes()
# %%
def suggest_1():
global hero_win_rates
top_heroes = []
max_top = 5
sorted_hero_win_rates = sorted(hero_win_rates, key=lambda k: (-1,-1) if (k['win_rate']==None) else (k['win_rate'],k['matches_played']), reverse=True)
temp_wr = sorted_hero_win_rates[0]['win_rate']
i = 0
while i < max_top or sorted_hero_win_rates[i]['win_rate'] == temp_wr:
temp_wr = sorted_hero_win_rates[i]['win_rate']
add_hero = {'_id': sorted_hero_win_rates[i]['_id'],
'matches_played': sorted_hero_win_rates[i]['matches_played'],
'matches_won': sorted_hero_win_rates[i]['matches_won'],
'name': sorted_hero_win_rates[i]['name'],
'win_rate': sorted_hero_win_rates[i]['win_rate']}
top_heroes.append(add_hero)
print(add_hero)
i = i + 1
return top_heroes
# %%
def suggest_2(hero_id1):
global hero_win_rates
top_heroes = []
max_top = 5
h = next((h for h in hero_win_rates if h['_id']==hero_id1), None)
print({'_id': h['_id'],
'matches_played': h['matches_played'],
'matches_won': h['matches_won'],
'name': h['name'],
'win_rate': h['win_rate']})
print("suggestions:")
# sorted_hero_matchups = sorted(h['hero_matchup'], key=lambda k: (2,2) if (k['win_rate']==None) else (k['win_rate'],k['matches_played_against']), reverse=False)
sorted_hero_matchups = sorted(h['hero_matchup'], key=lambda k: -1 if (k['win_rate']==None) else k['matches_played_against'], reverse=True)
sorted_hero_matchups = sorted(sorted_hero_matchups, key=lambda k: 2 if (k['win_rate']==None) else k['win_rate'], reverse=False)
temp_wr = sorted_hero_matchups[0]['win_rate']
temp_m = sorted_hero_matchups[0]['matches_played_against']
i = 0
while i < max_top or (sorted_hero_matchups[i]['win_rate'] >= temp_wr and sorted_hero_matchups[i]['matches_played_against'] >= temp_m):
temp_wr = sorted_hero_matchups[i]['win_rate']
temp_m = sorted_hero_matchups[i]['matches_played_against']
add_hero = {'hero_id': sorted_hero_matchups[i]['hero_id'],
'matches_played_against': sorted_hero_matchups[i]['matches_played_against'],
'matches_won_against': sorted_hero_matchups[i]['matches_won_against'],
'name': sorted_hero_matchups[i]['name'],
'win_rate': sorted_hero_matchups[i]['win_rate']}
top_heroes.append(add_hero)
print(add_hero)
# print(sorted_hero_matchups[i])
i = i + 1
return top_heroes
|
from django.contrib import admin
from .models import Profile,Event,registration,feedback,project
# Register your models here.
admin.site.register(Profile)
admin.site.register(Event)
admin.site.register(registration)
admin.site.register(feedback)
admin.site.register(project)
|
import json
from datetime import datetime
json_s = json.loads(input())
filters = dict()
for _ in range(5):
f, value = input().split()
filters[f] = value
filters['NAME_CONTAINS'] = filters['NAME_CONTAINS'].lower()
filters['PRICE_GREATER_THAN'] = int(filters['PRICE_GREATER_THAN'])
filters['PRICE_LESS_THAN'] = int(filters['PRICE_LESS_THAN'])
filters['DATE_AFTER'] = datetime.strptime(filters['DATE_AFTER'], '%d.%m.%Y')
filters['DATE_BEFORE'] = datetime.strptime(filters['DATE_BEFORE'], '%d.%m.%Y')
result = []
for product in json_s:
if filters['NAME_CONTAINS'] not in product['name'].lower():
continue
if filters['PRICE_GREATER_THAN'] <= product['price'] <= filters['PRICE_LESS_THAN']:
if filters['DATE_AFTER'] <= datetime.strptime(product['date'], '%d.%m.%Y') <= filters['DATE_BEFORE']:
result.append(product)
result.sort(key=lambda item: item['id'])
print(json.dumps(result))
|
__module_name__ = "_load_annoy_index.py"
__author__ = ", ".join(["Michael E. Vinyard"])
__email__ = ", ".join(["vinyard@g.harvard.edu"])
# import packages #
# --------------- #
from annoy import AnnoyIndex
def _load_annoy_index(path, n_features, metric="euclidean"):
"""
Load a pre-existing AnnoyIndex.
Parameters:
-----------
path
file path to annoy idx (.ann)
n_features
type: int
metric
type: str
default: "euclidean"
Returns:
--------
annoy_idx
AnnoyIndex
type: annoy.Annoy
Notes:
------
"""
annoy_idx = AnnoyIndex(n_features, metric)
annoy_idx.load(path)
return annoy_idx
|
import matplotlib.pyplot as plt
reds = plt.get_cmap('Reds')
blues = plt.get_cmap('Blues')
plt.style.use('paper')
if __name__ == '__main__':
dg_igm, dg_stderr = (-8.641285419935713, 0.3977607904106817)
ew_sigma = 27.319517253413203*298.15/ (1000 * 4.184)
dg_ew_no_del, dg_ew_stderr_no_del = (0.3346700524075954, max(0.39781974977271223, ew_sigma))
dg_ew, dg_ew_stderr = (0.3190447507612837, 0.3978197497727124)
plt.figure(figsize=(5, 5))
plt.bar(x=[0],
width=0.5,
height=[dg_igm],
yerr=[dg_stderr],
linewidth=0.8,
edgecolor=reds(0.6),
label='IGM',
color=[reds(0.4)],
capsize=2)
# ∆G where rather than treating the translational part of one of the waters
# in the octamer cluster with an EW, use the vibrational contribution and
# do not delete freqencies
plt.bar(x=[0.5],
width=0.5,
height=[dg_ew_no_del],
yerr=[dg_ew_stderr_no_del],
linewidth=0.8,
edgecolor=blues(0.7),
color=[blues(0.5)],
label='exp$_1$',
capsize=2)
plt.bar(x=[1],
width=0.5,
height=[dg_ew],
yerr=[dg_ew_stderr],
linewidth=0.8,
edgecolor=blues(0.9),
color=[blues(0.7)],
label='exp$_2$',
capsize=2)
plt.ylabel(r'$\Delta G$ / kcal mol$^{-1}$')
plt.plot([-0.5, 4], [0, 0], c='k', lw=1.5)
plt.xticks([])
plt.legend()
plt.xlim(-0.5, 1.5)
plt.ylim(-12, 10)
plt.tight_layout()
plt.savefig('water_in_water_dGs.pdf')
|
# -*- coding: utf-8 -*-
import os
import glob
import codecs
from xml.dom.minidom import *
tags_to_div = ["Body","Story","Root"]
impl = getDOMImplementation()
domDest = impl.createDocument(None,"body",None)
with open("filelist.txt","r") as dataSrc:
files = dataSrc.readlines()
for file in files:
file = file.replace("\n","")
print "processing ", file
with open (file,mode="r") as xmlIn:
domSrc = parse(xmlIn)
elems = domSrc.getElementsByTagName("Root")
for elem in elems:
domDest.documentElement.appendChild(elem)
print "processed ", file
data= domDest.getElementsByTagName("body")[0].toxml(encoding="utf-8")
for tag in tags_to_div:
data = data.replace("<"+tag, "<div")
data = data.replace("</"+tag, "</div")
data=data.replace("aid:pstyle","class")
data=data.replace("aid:cstyle","class")
with open ("dw_full.html", "w") as out:
out.write(data);
|
# Generated by Django 3.0.2 on 2020-02-04 20:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pages', '0003_auto_20200203_1646'),
]
operations = [
migrations.AlterField(
model_name='page',
name='seo_description',
field=models.TextField(blank=True, default=None, help_text='Small description about this page.', max_length=512, null=True),
),
migrations.AlterField(
model_name='page',
name='seo_keywords',
field=models.CharField(blank=True, default=None, help_text='eg: keyword1, keyword2, keyword3 ...', max_length=512, null=True),
),
]
|
from typing import List
from domain.connection.GetConnection.GetConnectionDto import GetConnectionDto, GetConnectionTypeDto, GetConnectorTypeDto
from models.dao.connection.Connection import Connection
class GetConnectionMapping:
@staticmethod
def to_dto(entity: Connection) -> GetConnectionDto:
dto = GetConnectionDto()
dto.Id = entity.Id
dto.Name = entity.Name
dto.ConnectionType = GetConnectionTypeDto(Id=entity.ConnectionType.Id, Name=entity.ConnectionType.Name)
dto.ConnectorType = GetConnectorTypeDto(Id=entity.Database.ConnectorType.Id,
Name=entity.Database.ConnectorType.Name,
ConnectionTypeId=entity.Database.ConnectorType.ConnectionTypeId)
dto.Host = entity.ConnectionServers[0].Host
dto.Port = entity.ConnectionServers[0].Port
dto.Sid = entity.Database.Sid
dto.ServiceName = entity.Database.ServiceName
dto.DatabaseName = entity.Database.DatabaseName
dto.CreationDate = entity.Database.CreationDate
dto.IsDeleted = entity.IsDeleted
return dto
@staticmethod
def to_dtos(entities: List[Connection]) -> List[GetConnectionDto]:
result: List[GetConnectionDto] = []
for entity in entities:
dto = GetConnectionMapping.to_dto(entity=entity)
result.append(dto)
return result
|
import logging
import os
import numpy as np
from openff.units import unit as pint_unit
from paprika.restraints.plumed import Plumed
from paprika.restraints.utils import get_bias_potential_type, parse_window
from paprika.utils import check_unit, get_key
logger = logging.getLogger(__name__)
_PI_ = np.pi
class Colvars(Plumed):
"""
This class converts restraints generated with :class:`paprika.restraints.DAT_restraints` into collective variables
(`Colvars`) restraints that is available as a plugin in `NAMD` and `LAMMPS`.
.. note ::
The ``Colvars`` module is described in the reference below and the source code is available on Github
https://github.com/Colvars/colvars
`Fiorin, G., Klein, M. L. & Hénin, J. Using collective variables to drive molecular dynamics simulations.
Mol. Phys. 111, 3345–3362 (2013).`
Examples
--------
>>> colvars = Colvars()
>>> colvars.file_name = 'colvars.tcl'
>>> colvars.path = './windows'
>>> colvars.window_list = window_list
>>> colvars.restraint_list = restraint_file
>>> colvars.dump_to_file()
The commands above will write the restraints to ``windows/*/colvars.tcl`` and contains the `Colvars`-style
restraints
.. code-block::
ColvarsTrajFrequency 500
ColvarsRestartFrequency 50000
# Collective variables
colvar {
name c1
distance {
forceNoPBC yes
group1 { atomNumbers 123 }
group2 { atomNumbers 1 }
}
}
# Bias potentials
harmonic {
colvars c1
centers 6.0
forceConstant 10.0000
}
The positional restraints on dummy atoms, however, is not added automatically. This restraints on the dummy
atoms can be added to ``windows/*/colvars.tcl`` using the code below.
>>> for window in window_list:
>>> structure = pmd.load_file("topology.prmtop", "coordinates.rst7")
>>> colvars.add_dummy_atoms_to_file(structure, window)
This appends the file with the following
.. code-block::
# Dummy atom position restraints
colvar {
name dummyAtoms
cartesian {
atoms { atomNumbers 123 124 125 }
}
}
harmonic {
colvars dummyAtoms
centers ( 0.0, 0.0, -6.0, 0.0, 0.0, -9.0, 0.0, 2.2, -11.2 )
forceConstant 100.00
}
"""
@property
def output_freq(self) -> int:
"""int: The frequency at which the `colvars` will be printed to ``*.colvars.traj`` file."""
return self._output_freq
@output_freq.setter
def output_freq(self, value: int):
self._output_freq = value
def __init__(self):
super().__init__()
self._file_name = "colvars.dat"
self._output_freq = 500
self._colvars_factor = {}
def _initialize(self):
# Set factor for spring constant
if self.uses_legacy_k:
self.k_factor = 2.0
# header line
self.header_line = (
f"ColvarsTrajFrequency {self.output_freq}\n"
f"ColvarsRestartFrequency {self.output_freq*100} "
)
def dump_to_file(self):
"""
Write the `Colvars`-style restraints to file.
"""
self._initialize()
# Loop over APR windows
for window in self.window_list:
window_number, phase = parse_window(window)
# Check if file exist and write header line
with open(os.path.join(self.path, window, self.file_name), "w") as file:
file.write(self.header_line + "\n")
cv_index = 1
cv_dict = {}
cv_lines = []
bias_lines = []
# Parse each restraint in the list
for restraint in self.restraint_list:
# Skip restraint if the target or force constant is not defined.
# Example: wall restraints only used during the attach phase.
try:
target = restraint.phase[phase]["targets"][window_number]
force_constant = (
restraint.phase[phase]["force_constants"][window_number]
* self.k_factor
)
except TypeError:
continue
# Get atom indices in space separated string
atom_index = self._get_atom_indices(restraint)
atom_string = " ".join(map(str, atom_index))
# Convert units to the correct type for COLVAR module
energy_units = pint_unit.kcal / pint_unit.mole
if restraint.restraint_type == "distance":
target = target.to(pint_unit.angstrom)
force_constant = force_constant.to(
energy_units / pint_unit.angstrom**2
)
elif (
restraint.restraint_type == "angle"
or restraint.restraint_type == "torsion"
):
target = target.to(pint_unit.degrees)
force_constant = force_constant.to(
energy_units / pint_unit.degrees**2
)
# Determine bias type for this restraint
bias_type = get_bias_potential_type(restraint, phase, window_number)
# Append cv to list
# The code below prevents duplicate cv definition.
# While not necessary, it makes the plumed file cleaner.
if not get_key(cv_dict, atom_string):
cv_key = f"c{cv_index}"
cv_dict[cv_key] = atom_string
cv_template_lines = [
"colvar {",
f" name {cv_key}",
f" {'dihedral' if restraint.restraint_type == 'torsion' else restraint.restraint_type} {{",
" forceNoPBC yes",
f" group1 {{ atomNumbers {atom_index[0]} }}",
f" group2 {{ atomNumbers {atom_index[1]} }}",
]
if restraint.restraint_type == "angle":
cv_template_lines += [
f" group3 {{ atomNumbers {atom_index[2]} }}"
]
if restraint.restraint_type == "torsion":
cv_template_lines += [
f" group3 {{ atomNumbers {atom_index[2]} }}",
f" group4 {{ atomNumbers {atom_index[3]} }}",
]
cv_template_lines += [" }", "}"]
cv_lines.append(cv_template_lines)
bias_lines.append(
self._get_bias_block(bias_type, cv_key, target, force_constant)
)
else:
cv_key = get_key(cv_dict, atom_string)[0]
bias_lines.append(
self._get_bias_block(bias_type, cv_key, target, force_constant)
)
# Increment cv index
cv_index += 1
# Write collective variables to file
self._write_colvar_to_file(window, cv_lines, bias_lines)
def _write_colvar_to_file(self, window, cv_list, bias_list):
with open(os.path.join(self.path, window, self.file_name), "a") as file:
file.write("# Collective variables\n")
for colvar in cv_list:
for line in colvar:
file.write(line + "\n")
file.write("# Bias potentials\n")
for bias in bias_list:
for line in bias:
file.write(line + "\n")
@staticmethod
def _get_bias_block(bias_type, cv_key, target, force_constant):
bias_template_lines = []
if bias_type == "restraint":
bias_template_lines = [
"harmonic {",
f" colvars {cv_key}",
f" centers {target.magnitude:.4f}",
f" forceConstant {force_constant.magnitude:.4f}",
"}",
]
elif bias_type == "upper_walls":
bias_template_lines = [
"harmonicWalls {",
f" colvars {cv_key}",
f" upperWalls {target.magnitude:.4f}",
f" upperWallConstant {force_constant.magnitude:.4f}",
"}",
]
elif bias_type == "lower_walls":
bias_template_lines = [
"harmonicWalls {",
f" colvars {cv_key}",
f" lowerWalls {target.magnitude:.4f}",
f" lowerWallConstant {force_constant.magnitude:.4f}",
"}",
]
return bias_template_lines
@staticmethod
def _get_atom_indices(restraint):
# Check atom index setting
index_shift = 0
if not restraint.amber_index:
index_shift = 1
logger.debug("Atom indices starts from 0 --> shifting indices by 1.")
# Collect DAT atom indices
atom_index = []
if not restraint.group1:
atom_index.append("{}".format(restraint.index1[0] + index_shift))
else:
igr1 = ""
for index in restraint.index1:
igr1 += "{} ".format(index + index_shift)
atom_index.append(igr1)
if not restraint.group2:
atom_index.append("{}".format(restraint.index2[0] + index_shift))
else:
igr2 = ""
for index in restraint.index2:
igr2 += "{} ".format(index + index_shift)
atom_index.append(igr2)
if restraint.index3 and not restraint.group3:
atom_index.append("{}".format(restraint.index3[0] + index_shift))
elif restraint.group3:
igr3 = ""
for index in restraint.index3:
igr3 += "{} ".format(index + index_shift)
atom_index.append(igr3)
if restraint.index4 and not restraint.group4:
atom_index.append("{}".format(restraint.index4[0] + index_shift))
elif restraint.group4:
igr4 = ""
for index in restraint.index4:
igr4 += "{} ".format(index + index_shift)
atom_index.append(igr4)
return atom_index
@staticmethod
def _write_dummy_to_file(file, dummy_atoms, kpos=100.0):
"""
Append to the "colvars.dat" file the dummy atoms colvar definition and position restraints
Parameters
----------
file : class '_io.TextIOWrapper'
The file object handle to save the plumed file.
dummy_atoms : dict
Dictionary containing information about the dummy atoms.
kpos : float or pint_unit.Quantity
Spring constant used to restrain dummy atoms (default for float: kcal/mol/A^2).
Examples
--------
.. code :: tcl
colvar {
name dummy
cartesian {
atoms { atomNumbers 1 2 3 }
}
}
harmonic {
colvars dummy
centers ( 0.0, 0.0, -6.0, 0.0, 0.0, -9.0, 0.0, 2.2, -11.2)
forceConstant 100.0
}
"""
# Check k units
kpos = check_unit(
kpos, base_unit=pint_unit.kcal / pint_unit.mole / pint_unit.angstrom**2
)
# Get dummy atom indices
dummy_indices = [dummy_atoms[key]["idx"] for key in dummy_atoms.keys()]
dummy_index_string = " ".join(map(str, dummy_indices))
cv_template_lines = [
"colvar {",
" name dummyAtoms",
" cartesian {",
f" atoms {{ atomNumbers {dummy_index_string} }}",
" }",
"}",
]
# Get dummy atom positions
dummy_position = []
for dummy in dummy_atoms.keys():
dummy_position += list(dummy_atoms[dummy]["pos"])
dummy_position_string = ", ".join(map(str, dummy_position))
bias_template_lines = [
"harmonic {",
" colvars dummyAtoms",
f" centers ( {dummy_position_string} )",
f" forceConstant {kpos.magnitude:.2f}",
"}",
]
file.write("# Dummy atom position restraints\n")
# Write colvar to file
for line in cv_template_lines:
file.write(line + "\n")
# Write bias potential to file
for line in bias_template_lines:
file.write(line + "\n")
|
# -*- coding: utf-8 -*-
"""
Created on Tue Sep 14 11:55:55 2021
@author: gawe
"""
import numpy as _np
import matplotlib.pyplot as _plt
# from FFT import hilbert
try:
from . import rectangular_waveguide as _rw
except:
from QO import rectangular_waveguide as _rw
# end try
"""
General filter terminology:
Low Pass Filter - allows frequency below a given frequency to pass
(to be transmitted or received) while rejecting
frequencies above the given frequency.
High pass filter - allows frequencies above a given frequency to pass
through the filter, while rejecting frequencies
above the given frequency (opposite of low pass).
Band pass filter - Passes signal between a lower and an upper frequency
while rejecting signal at frequencies outside of
that range
Band stop (or Band Reject or Notch) - Prevent all signal between an
upper and lower frequency from passing while
allowing all others to pass (opposite of band pass)
Attenuation - measured in dB, degree by which a signal sees a loss in
amplitude after passing through a filter
Bandwidth - width of the passband of a bandpass filter and is expressed
as the frequency difference between lower and upper 3 dB
points
Cutoff - Usually the point at which the response of the filter has
fallen by 3 dB from passband level
Group delay - Group delay is a measure of how different components of a
modulated signal (which is a sum of sine waves at various
frequencies) would propagate through the filter. Measured
in units of time (seconds) and is a derivative of the
filter's phase with respect to frequency
Passband - the portion of the frequency spectrum that the filter allows
to be transmitted
Stopband - the band (frequency) where the filter has reached the
required out-of-band rejection
Shape factor - The ratio of a filters stop band to pass band. The
higher the shape factor, typically the closer the filter
is to theoretical performance.
Selectivity - Measurement of the capability of the filter to pass or
reject specific frequencies relative to the center
frequency of the filter. Selectivity is typically stated
as the loss through a filter that occurs at some
specified distance from the center frequency. A filter
with high selectivity exhibits high slope in the
transition from pass to stop - selectivity is crucial in
environments where adjacent channels are close together
and high selectivity enables designers to make good use
of available bandwidth.
Temperature stability - Concerns how the temperatures performance
varies with temperature. An approach is to
define in ppm/degree C the shift of the
filters, cutoff, passband, etc. in frequency
as temperature varies.
Insertion loss - ratio of a signal level in a test configuration
without a filter present (|V1|) to that when the
filter is present (|V2|). When discussed this is
typically referencing the loss in the passband.
Ripple - Ripple is a measure of the variation of insertion loss within
the passband and is measured in dB
S11 - Scattering parameter that represents the reflection coefficient
(Gamma) at the input. Related to Return Loss.
RL [dB] = -20*log10|S11|
S21 - Scattering parameter that represents the loss of signal while
passing from input to output ports. When the measurement ports
are at the same impedance, it is a measure of the insertion loss.
IL [dB] = -20*log10|S21|
VSWR - A measure of the filters match to a given impedance (ex// to a
50 Ohm system), Voltage Standing Wave Ratio is calculated from
S11 (Gamma).
VSWR = (1+|Gamma|)/(1-|Gamma|)
Return loss - Relative amount of power reflected by an input signal:
Measure of the amount of signal that is returned or
reflected by the filter. measured in dB, it is the
negativ eof hte magnitude of the reflection coefficient
expressed as power. Return loss is expressed as a
positive number.
RL [dB] = -20*log10|Gamma|
For example:
for -3 dB of return loss
3 dB = (- 20 dB ) * log10 | Gamma |
| Gamma | = 10^(-3/20) = 0.7079 --> 70% reflection
for 10% reflection
-20*log10(0.10) = 20 dB --> "20 dB return loss"
or "-20 dB reflected"
Note: Return loss is mathematically positive, but
colloquially negative... "19 dB of loss"
or "Return loss of 19 dB"
Passband return loss - return loss in the filters passband
Percent bandwidth - Common relative figure of merit that compares
bandwidth with carrier frequency.
normalized measure of how much frequency variation
a component / system can handle.
Commonly calculated as (3 dBW)/(Center Frequency)
BW% = BW/Fc
BW is the absolute bandwidth
Fc is the center frequency
OR
BW%=2(F_H/F_L - 1)/(F_H/F_L+1)
F_H - Upper frequency of the passband
F_L - Lower frequency of the passband
1 filter with 1 GHz passband centered at 10 GHz
has 10% bandwidth
Q Factor - The quality factor (Q) of a resonator is expressed as the
ratio of stored versus lost energy per oscillation cycle.
Overall losses through a resonator increase as Q factor
drops and will increase more rapidly with frequency for
lower values of resonator Q. As a result, the edges of the
passband become more rounded and badnwidth narrows as the
Q-factor decreases.
Rejection - Attenuation of signals outside the passband. Typically
measured in dB or dBc if referenced from insertion loss of
passband
General filter technologies:
Crystal filters - make use of a quartz crystal as the resonant element.
The high Q of a quartz resonator makes for a very
steep band-pass. These filters are usually
implemented at IF frequencies in the range of 10 MHz
and Q factors fall in the range of 10e3 to 100e3
SAQ/BAQ (Surface Acoustic Wave and Bulk Acoustic Wave) -
Acoustic filters cover a range of frequencies up to 6 GHz
and offer a good performance/cost tradeoff, making them
the dominant off chip filter approach in mobile devices.
Ceramic Filters - Cover a range of ~100 MHz to ~8 GHz. Offer similar
performance to discrete lumped element inductor-
capacitor (LC) designs but can be implemented in
small form factor surface mount packages. Performance
and package thickness can be a limiting factor when
comparing ceramic filters with SAW/BAW
Lumped Element - Discrete LC approaches provide a low-cost approach to
implement a filter, but the attainable Q factors are
limited in such devices. Discrete lumped element
filters are usually used around 30 MHz to 300 MHz
range but can in principle be built for applications
up to 40 GHz. At mmWave frequencies though discrete
lumped element filters are very hard to implement
because of the dimensional limitations imposed by the
frequency, since the filter elements must be much
smaller than the wavelength of the transmission lines.
Discrete LC designs are perforamnce and repeatability
limited by the tolerances of the discrete components.
Cavity Filters - Cavity filters are a common approach in the 40 MHz to
960 MHz frequency range and can offer high selectivity
under high power. They can achieve good performance,
but are physically large, and usualyl only seen in
infrastructure applications, such as for additional
filtering at a cell site.
Planar filters - Such as Microstrip filters - are manufactured using a
thin-film process, and depending on the filter
topology can offer high-Q and a reasonable approach to
achieving performanc ein a small footprint when
comapred with discrete lumped element designs. In a
thin film Lumped Element approach, the filter's
transmission lines are printed in various
configurations, depending on the frequired perofrmance
and filter lements are realized through discrete
resistive, capactivie, and inductive elements. Planar
distributed Element Filters rely on carefully
distributed transmission lines to create resonant
structures and can be designed to tighter tolerances
than a lumped element filter. Distributed Element
designs are more practical than Lumped Element designs
at increased frequencies.
Waveguide Filters - Characterized by high power handling capability,
leading to their wide adoption in radar
applications, high selectivity and rejection and
low loss givne that the waveguide itself is a low
loss medium.
---> Rule of thumb and fundamental issue:
Passband loss is inversely proportional to filter bandwidth.
->Very narrow bands require very high Q-factors to achieve low passband loss.
Air filled waveguide resonators enable Q factors from 5 to 20e3
Dielectric resonators as the resonant elements can push Q up to 50e3
"""
def percBW(freqc):
"""
Percent bandwidth (BW) - [dBW]
"""
return
def ReturnLoss(S11):
"""
mathematically it is the negative of the magnitude of the reflection
coefficient expressed as power (dB)
Lr = -20*log10|Gamma|
|Gamma| = S11 = (VSWR+1)/(VSWR-1)
Voltage Standing Wave Ratio -- VSWR
in a perfect transmission line, the return loss is infinite
"""
return -20*_np.log10(_np.abs(S11))
def VSWR(S11):
return (1.0+_np.abs(S11))/((1.0-_np.abs(S11)))
def InsertionLoss(S21):
"""
Relative amount of power absorbed/dissipated while passing through the filter
"""
return -20.0*_np.log10(_np.abs(S21))
# ==========================================================================
"""
"""
def transfer_losslessTEM(beta, l, Zo, Yo):
"""
Transfer function of a lossless transmission line to the TEM mode
(assuming sinusoidal excitation)
beta - [rad/m] - propagation constant of the line (guide wavenumber)
l - [m] - length of the transmission line
Zo - [Ohms] - characteristic impedance of the ilne
Yo - [Mohrs] - characteristic admittance of the line
"""
return _np.matrix([[_np.cos(beta*l), 1j*Zo*_np.sin(beta*l)], [1j*Yo*_np.sin(beta*l), _np.cos(beta*l)]])
def richards_transform():
"""
s -> j*omega -> j*alpha*tan(theta)
"""
pass
# ==========================================================================
"""
Impedance inverters
An alternative realization of ladders networks
--> lossless, reciprocal, frequency-independent, two-port network
______
o-----| |------
| | |
Zin | K | Zload
| | |
o-----|_____|-------
Zin(p) = (A*Zload + B)/(C*Zload+D)
= jK/(j*Zload/K) = K^2/Zload
---> K=1 ---> Zin = 1/Zload
Maximally flat prototype (Butterworth approximation)
|S12(1j*omega)|^2 = 1/( 1+omega^(2N) ) (3.3, page 49)
"""
def impedance_inverter(Kimp):
"""
K is the characteristic impedance or admittance of the inverter
"""
return _np.matrix([[0, 1j*Kimp],[1j/Kimp, 0]])
# ==========================================================================
"""
ideal lowpass filter
magnitude of gain is unity in passband, zero in the stopband
|H(jomega)| = 1 |omega|<|omegac|
|H(jomega)| = 0 |omega|>|omegac|
phase response is linear in the passband:
psi(omega)=k*omega
group delay is the derivative
Tg = -d[psi(omega)]/[domega] = k
--> linear phase --> constant group delay
--> zero phase distortion for finite bandwidth signals
impulse response
h(t) = (1/pi) * (sin[(t-k)*omegac])/(t-k)
for omegac = 1
h(t)_(omegac=1) = (1/pi) = sinc(t-k)
--> zeros at t-k = m*pi, m=+-1, +-2, etc.
t= m*pi+k, peak of sync function at +k
--> noncausal... physically unrealizable
(infinite selectivity-->infinite group delay-->infinite elements)
we make filters by truncating the impulse response and tailoring the
frequency spectrum so the selectivity isn't too horrendous
Lumped element filter with transfer function
S12(p) = N(p) / D(p)
p - complex frequency variable (nu in most signals and systems books)
--> Minimum phase network if there are no poles or zeros in the right half p plane
N(p) /= 0, D(p) /= 0, Re[p]>0
Defining N(p) and D(p) == Hurwitz polynomials
when energy transfer between input and output can only take one path,
a physical system is a minimum phase system.
Transfer function of minimum phase network:
H(jomega) = exp[-alpha(omega) - j*psi(omega)]
alpha(omega) - magnitude
psi(omega) - phase
--> magnitude and phase are a Hilbert transform pair
psi(omega) = (omega/pi) * int_-inf^inf[ alpha(y)/(y^2-omega^2) dy ]
alpha(omega) = alpha(0) + (omega^2/pi) * int_-inf^inf[ psi(y)/(y*(y^2-omega^2)) dy ]
Approximation for the amplitude:
|H(jomega)| = 1.0 / An(omega^2);
An(omega^2) is a polynomial degree N in omega^2
--> "All pole" transfer function (this class of ladder network)
Equiripple characteristics provides optimum selectivity for a given degree
Reactance functions and circuit synthesis (lossless networks):
In a linear, time-invariant system, the relationship between voltage and
current can be expressed in terms of their Laplace transforms:
V(p) = Z(p)*I(p)
Z(p) is the input impedance of the network. For a finite lumped network
(not infinite in extent), Z is a rational function of p and may be
expressed as the ratio of two polynomials:
Z(p) = N(p)/D(p)
For passive networks, since all physical networks give rise to real responses for real input, and conservation of energy applies, Z(p) is real for p real. Re{Z(p)}>0 for Re{p}>0. Then the coefficients of N(p) and D(p) are real, and Z(p) has no poles or zeros in the right half-plane (N,P are Hurwitz polynomials).
=======
The reflection coefficient in terms of input impedance:
Gamma(p) = +- (Z(p)-1)/(Z(p)+1); Gamma(p) is real for p real
0 <= |Gamma(p)| <= 1 for Re{p} > 0
Note that the input impedance can be split into its real and imaginary parts:
Z = R + j*X
Gamma = +- (R+j*X - 1) / (R+j*X + 1)
|Gamma|^2 = 1 - 4R/( X^2 + (1+R)^2 )
A lossless network is comprised entirely of reactive components (no resistance)
Z(p) |_(p=j*omega) = Z(j*omega) = R(omega) + j*X(omega)
R(omega) = Re{Z(j*omega)} = 0 (lossless network)
X(omega) = Im{Z(j*omega)}
Z(p) = (m1 + n1)/(m2+n2)
m1, n1 are the even, odd parts of N(p); m2, n2 even/odd parts of D(p)
Split the impedance into even / odd functions: Z(p) = Ev{Z(p)}+Odd{Z(p)}
even polynomials -> even powers -> p=j*omega -> Ev{Z(j*omega)} = Real
odd polynomials -> odd powers -> p = j*omega -> Odd{Z(j*omega)} = Imaginary
In a lossless network:
R-> 0: Ev{Z(p)} = 0 = ( Z(p) + Z(-p) ) / 2
or
(m1 + n1)/(m2+n2) + (m1 - n1)/(m2 - n2) = 0
--> m1/n1 = n2/m2
Z(p) = m1/n2 or n1/m2
If the numerator is even, the denominator is odd and visa-versa
---> These types of impedances are known as "reactance functions"
Generally:
Z(p) = Ainf*p + A0/p + sum_1^m ( 2*Ai*p / (p^2+omega_i^2) )
if p = j*omega
Z(j*omega) = j*X(omega)
= Ainf*omega - A0/omega + sum_1^m ( 2*Ai*omega / (omega_i^2-omega^2) )
And the derivative in frequency space:
dX(omega)/domega > 0
= Ainf + A0/omega^2 + sum_1^m ( 2*Ai*( omega_i^2+omega^2) / (omega_i^2-omega^2)^2 )
---> The impedance of a parallel tuned circuit is identical to the reactance
function for a lossless network ... this is synthesis
____inductor (L)____
Z(p) | |
o-----| |------o
|___ ____|
Capacitor (C)
--> Foster synthesis: circuit is derived by partial fraction decomposition
(expansion) of the impedance circuit, then cascading parallel tuned
circuits following a series inductor + capacitor as below:
___L____ ___L____ ___L____
Z(p) | | | | | |
o---L---| |-o-| |---o---| |--- ...---| |---o
|___ ____| |___ ____| |___ ____|
C C C
--> Ladder synthesis: circuit is derived by alternating reactance / admittances
(Lossless two-port network: series inductor, parallel capacitor) and
subtracting the value from the reactance function until complete
_____ _____ _____ _____
o----| |-----| |-----| |--...---| |---o
| Z1 | | Z2 | | Z3 | | ZN |
o----|____|-----|____|-----|____|--...---|____|---o
ex//
_______ _____ _____ _____
Z(p) | | | | | | | |
o---| Z1(p) |---o---| Z2 |--o---| Z3 |--o...o--| Zn |---o----o
|_______| | |____| | |____| |____| |
_|_ _|_ _|_
| | | | | |
|Y1| |Y2| |Yn|
|__| |__| |__|
| | |
0 0 0
--> Darlington synthesis: any positive rael function can be synthesized as
the input impedance of a lossless passive reciprocal two-port network
which is terminated in al oad resistor (Rload>=0).
_______
o----| |---o
| Z(p) | Rload>=0
o----|______|---o
==========================================================
Example Synthesis using Ladder networks:
For an impedance in partial fraction form of (and combined after)
Z(p) = 2*p + 1/p + 2p/(p^2+1)
= ( 2*p^4 + 2*p^2 + p^2 + 1 + 2*p^2 ) / (p*(p^2+1))
= ( 2*p^4 + 5*p^2 + 1 ) / (p^3+p))
Evaluate the residue as p->infinity (Ainf above)
Z(p->infinity) -> 2*p (from inspection of the first line, it is obvious)
Z1(p) = 2*p
Z(p) = 2*p + 1/p + 2p/(p^2+1) = ( 2*p^4 + 5*p^2 + 1 ) / (p*(p^2+1))
simple pole of order 1 at 0,
and simple pole at +-j on imaginary axis of order 1
First order pole: no derivative. Residue at infinity evaluated by
Res( Z(p), infinity) = lim (p->infinity) 1/p * Z(p)
= lim (p->infinity) ( 2*p^4 + 5*p^2 + 1 ) / (p^2*(p^2+1))
complete the square on the numerator
= lim (p->infinity) ( (2*p^2+1)*(p^2+1) + 2*p^2) / (p^2*(p^2+1))
factor the removable poles from each term
= lim (p->infinity) ( (2*p^2+1)/p^2 + 2/(p^2+1) )
second term limits to 0 as p tends to infinity
first term need L'hospitals rules to evaluate
= lim (p->infinity) (4*p)/(2p) = 2
therefore
1/p * Z1(p) = Res( Z(p), infinity) = lim (p->infinity) 1/p * Z(p) = 2
Subtract off the reactance:
In a lossless network, this series reactance represents an inductor
of value L=2
Z1(p) = Z(p) - 2p
= ( 2*p^4 + 5*p^2 + 1 ) / (p*(p^2+1)) - 2p
Use long-division on the first term to make drop the order of the numerator
____________(2p+0)
(p*(p^2+1)) |2*p^4 + 5*p^2 + 1
2*p^4 + 2*p^2
_________________
3*p^2 + 1
Z(p) = 2p + (3*p^2+1)/(p^3+p)
Z1(p) = Z(p) - 2p = (3*p^2+1)/(p^3+p)
Now invert Z1 to form the first admittance Y1(p)
Y1(p) = p*(p^2+1) / (3*p^2+1)
Determine the residue as p tends to infinity
1/p*Y1(p)|_(p=infinity) = lim(p->infinity) (p^2+1) / (3*p^2+1)
L'Hospitals
= lim(p->infinity) (2*p) / (6*p) = 1/3
Therefore, the first admittance is a shunt capacitor of value 1/3
Subtract it off from the admittance
1/Z2 = 1/Z1 - Y1(p) = p*(p^2+1) / (3*p^2+1) - p/3
= (3*p*(p^2+1) - 3*p^3-p)/(3*(3*p^2+1))
= 2*p/(9*p^2+3) = (2/3) * (p/(3p^2+1))
Invert to form an impedance, and take the residue as p->infinity
1/p*Z2|_(p=infinity) = lim (p-> infinity) (1/p) * (3/2p) * (3*p^2+1)
= lim (p-> infinity) (3/2p^2) * (3*p^2+1)
= lim (p-> infinity) (9/2 + 3/2p^2)
= 9/2
So subtract off a series inductance of 9/2 and invert to form the 2nd admittance:
1/Y2(p) = Z2(p)- 9*p/2
= (3/2p) * (3*p^2+1) - 9*p/2
= (9*p/2 + 3/2p - 9*p/2) = 3/2p
Y2(p) = 2p/3
Take the residue as p tends to infinity
1/p * Y2(p) |_(p=infinity) = lim (p-> infinity) (1/p) * (2*p/3)
= 2/3
Subtract off a shunt capacitor of value 2/3
1/Z3 = Y2(p) - 2*p/3 = 0 ... so we are done
For an input impedance of
Z(p) = 2*p + 1/p + 2p/(p^2+1)
= ( 2*p^4 + 2*p^2 + p^2 + 1 + 2*p^2 ) / (p*(p^2+1))
= ( 2*p^4 + 5*p^2 + 1 ) / (p^3+p))
poles at p=0 and +-j
zeros at p^2 = -5/4 +- 1/4 * sqrt(25 - 8)
= -5/4 +- sqrt(17/16) = -0.219.. or -2.28..
p = +- 0.468...*j and p = +- 1.510...*j
The 2-port lossless network, ladder circuit synthesis consists of
- series inductor: L=2
- shunt capacitor: C=1/3
- series inductor: L=9/2
- shunt capacitor: C=2/3
==========================================================
A lossless tw-oport network may be decomposed intoa cascade of first-,
second- and fourth-degree networks depending on the locations of the zeros
of the even part of Z(p). These are the transmission zeros of the network
transmission zeros: Ev{Z(p)}=0
Zero on jomega-axis corresponds to zero transmission at real omega
--> measured zero in frequency response
Lossless network driven by 1-Ohm generator and terminated in 1-Ohm load:
Zin(p) = (m1+n1) / (m2+n2)
Ev{Zin} = 0.5*(Z(p) + Z(-p)) = (m1*m2 - n1*n2) / (m2^2 - n2^2)
Input power: Pin = |Iin(jomega)|^2 * Re{Zin(jomega)}
= 0.5*Iin(jomega)*Iin(-jomega)*[ Zin(jomega)+Zin(-jomega) ]
= Vg*conj(Vg)*[ Zin(p)+Zin(-p) ]/[(1+Zin(p))*(1+Zin(-p))]
Power transmission zeros at [Zin(p)+Zin(-p)]/[(1+Zin(p))*(1+Zin(-p))] = 0
---> zeros of the even part of the input impedance
---> Additional zeros at poles of Zin(p) and Zin(-p) (at d.c., infinity or
on the imaginary axis) can be removed as elements of a reactance
function by Foster synthesis
---> Remaining transmission zeros are not poles of Zin(p) and Zin(-p) and
can be removed by second-order or fourth-order networks
Second-order - Brune section: Finite real-frequency transmission zeros
extracted in complex conj. pairs
Second-order - Darlington C section: Remove transmission zeros on the real-axis
Fourth-order - Darlington D section: Remove complex transmission zeros
Cascade Synthesis: Synthesize an imput impedance as a cascade of
Brune, C, and D sections terminated in a resitor
(assuming any zeros that are also poles of Z(p) and Z(-p) have been removed)
Zeros occur where m1*m2-n1*n2 = 0.
Assume it is a perfect square
m1*m2-n1*n2 = {
product_(i=1^q)[1+p^2/omegai^2] *
product_(i=1^r)[1-p^2/sigmai^2] *
product_(i=1^s)[p^4+2(omegai^2-sigmai^2)*p^2+(omegai^2+sigmai^2)^2]
}^2
----> three types of transmission zeros
(i) imaginary axis pair
(ii) real axis pair
(iii) complex quadruplet
(asymmetric zeros come later)
"""
"""
==========================================================
Quick refresher: Evaluating residues
A mathematically rigorous way to evaluate the residue
In a Laurent series: f(z) = sum_(n=-infty^infty) a_n*(z-zo)^n
of f(z) about a point z0, a_-1 is the residue of f(z)
if f(z) is analytic at z0, the residue is 0
a_-1 = 1/(m-1)! * d^(m-1) / dz^(m-1) [(z-z0)^m * f(z)]_z=z0
the residue of f(z) as z approaches z0, is related to the (m-1)th
weighted derivative of f near that point for an order m pole
simple pole of the function, f, given by c:
Res(f, c) = lim(z->c) of (z-c)*f(z)
if f(z) = g(z) / h(z) (holomorphic functions near c)
Note that holomorphic is a synonym for analytic
and h(c) = 0 and h'(c) /= 0, then use L'Hospitals rule
Res(f, c) = lim(z->c) of (z-c)*f(z)
= lim(z->c) of (g(z) + z*g'(z) - c*g'(z)) / h'(z)
= lim(z->c) of g(z) / h'(z)
higher order poles of function, f, given by c (order of pole: n)
Res(f, c) = 1/(n-1)! lim(z->c) of d^(n-1)/dz^(n-1) ( (z-c)^n f(z) )
(n-1)th derivative
Special case of z-> infinity:
Res( f(z), infinity) = - Res(1/z^2*f(1/z), 0)
(i) if lim(z->infinity) f(z)=0
then
Res( f(z), infinity) = - lim(z->infinity) z*f(z)
(ii) if lim(z->infinity) f(z)= c /= 0
then
Res( f(z), infinity) = lim( |z|->infinity) z^2*f'(z)
Special case where part or all of f(z) can be expanded into a
Taylor or Laurent series: it is easier
==========================================================
"""
"""
==========================================================
Scaling the 1-Ohm prototypes to arbitrarty impedance / cutoff frequency
Most microwave filters operate at 50-Ohms. Historically, 50-Ohms was
chosen as a compromise between the losses and power handling capacity
of coax cable.
To convert from 1-Ohm to Zo-Ohms, scale the impedance of each element
Inductors: Z = L*p --> Zo*L*p = (Zo*L)*p; L-> Zo*L
Capacitors: Z = 1/(C*p) --> Zo/Cp = 1/(C/Zo)p; C-> C/Zo
Impedance invertors: K--> Zo*K (characteristic impedance)
Ladder coupled filters have series inductors + shunt-capacitors
Admittance inverter coupled filters have shunt capacitors and impedance inverters
==========================================================
Converting the lowpass prototype to an abritrary cutoff frequency, omega_c
Lowpass prototype networks have a cut-off of omega = 1 rad/s
lowpass transmission characteristic: |S12(jomega)|^2 = 1/(1+Fn(omega)^2)
omega --> omega/omega_c
|S12(jomega)|^2 = 1/(1+Fn(omega/omega_c)^2)
Fn(omega=1) = Fn(omega/omega_c=1)
Inductors: Z = L*p; Z(jomega)=jomegaL --> jomegaL/omega_c; L-> L/omega_c
Capacitors: Z=1/C*p; Z(jomega)=-j/omegaC --> -j/(omega/omega_c)C; C-> C/omega_c
Impedance inverters are frequency independent
==========================================================
Converting the lowpass prototype to a highpass filter with arbitrary omega_c
|S12(jomega)|^2 = 1/(1+Fn(omega)^2)
transform omega--> -omega_c/omega to map dc to infinite frequency (and vice versa)
|S12(jomega)|^2 = 1/(1+Fn(-omega_c/omega)^2)
inductors: Z(jomega)=jomegaL--> -jomega_c*L/omega
= -j/(omega*(1/omeca_cL))
= -j/(omega*Cprime);
Cprime = (1/omega_C)/L
capacitors: Z(jomega)=-j/omegaC--> -jomega/(omega_c*C) = jomega*Lprime
Lprime = 1/(omega_c*C)
Impedance inverters are unaffected.
Shifts transmission zeros from omega=infinity to omega=0
Note: first convert designed load impedance to match system (50 Ohms),
then convert to high pass filter
==========================================================
Converting the lowpass prototype to a bandpass filter with arbitrary omega_1,2
map omega = +-1 in the lowpass to omega_1 and omega_2
transmission zeros in lowpass --> both omgea=0 and omega=infinity
midband of the lowpass prototype (omega=0)
--> center of passband in bandpass: omega_o
Transformation:
omega --> alpha*(omega/omega_o - omega_o/omega)
omega=-1:
-1 = alpha*(omega_1/omega_o - omega_o/omega_1)
omega=+1:
+1 = alpha*(omega_2/omega_o - omega_o/omega_2)
Solving:
omega_o = _np.sqrt(omega_1*omega_2) --- geometric mean
alpha = omega_o/(omega_2-omega_1) --- bandwidth scaling factor
Inductor: Z=jomegaL--> j*alpha*L*(omega/omega_o-omega_o/omega)
=j*(alpha*L/omega_o)*omega - j/(omega*(1/(alpha*L*omega_o))
--> A series connected LC circuit
o---L---o ==> o---Lprime--| Cprime |----o
Lprime = alpha*L/omega_o
Cprime = 1/(alpha*L*omega_o)
Capacitor: Y=jomegaC--> j*alpha*C*(omega/omega_o-omega_o/omega)
= j*(alpha*C/omega_o)*omega - j/(omega*(1/(alpha*C*omega_o)))
--> admittance of a parallel connected LC circuit
o---|C|---o ==> Lprime = 1 / (alpha*C*omega_o)
Cprime = alpha*C/omega_o
____Lprime___
Z(p) | |
o-----| |------o
|____Cprime__|
Impedance inverter: invariant under the frequency transformation
==========================================================
Realizing an impedance inverter in a circuit:
Use a pi-network of reactances to simulate an impedance inverter
o--------- Y=jB ------------o
| |
Y=-jB Y=-jB
| |
o---------------------------o
Transfer matrix
| 1 0 || 1 -j/B || 1 0 | | 0 -j/B |
[T] = | || || | = | |
| -jB 1 || 0 1 || -jB 1 | | -jB 0 |
| 0 j/K |
= | | where K=-B
| jK 0 |
---> a pi-network of reactance elements equates eactly to an inverter
of characteristic admittance K=-B.
---> Approximate the ideal reactive element jB with a series capacitor
of admittance Y=jB=jomegaC
Then K = -omega*C
o - no longer frequency independent, but if the filter is
sufficiently narrowband, then it is okay.
o - negative sign on capacitance doesn't matter
... just flips the phase response of the filter.
o - realize inverters with capacitive pi-sections, and you
can use the positive (filter) shunt capacitances to absorb
negative capacitances required by the pi-network
Then the lowpass prototype ladder with impedance inverters and shunt capacitors
______ ______ ______ __________
o---o---| |--o---| |--o---| |--...---| |--o-----o
C1 | K12 | C2 | K23 | C3 | K34 | | K(n-1,n) | CN Load (1-Ohm)
o---o---|_____|--o---|_____|--o---|_____|--...---|__________|--o-----o
becomes a bandpass prototype with impedance inverters and parallel shunt inductors/capacitors
______ ______ __________
o---o--o---| |---o--o---| |---o--o---...---o--o---| |---o--o--------o
L1',C1' | K12 | L2',C2' | K23 | L3',C3' L',C'(n-1)'| K(n-1,n) | Ln',Cn' Load (1-Ohm)
o---o--o---|_____|---o--o---|_____|---o--o---...---o--o---|__________|---o--o--------o
where L1' = 1/(alpha*C1*omega_o), C1'=(alpha*C1)/omega_o , etc.
And has a pi-network of capacitors inserted to replace the inverters
C12 C23 C(n-1,nn)
o---o--o---| |---o--o---| |---o--o---...---o--o-------| |---o--o--------o
L11,C11 L22,C22 L33,C33 L,C(n-1,n-1) Lnn,Cnn Load (1-Ohm)
o---o--o---------o--o---------o--o---...---o--o-------------o--o--------o
The rth shunt inductor is L_rr = 1/(alpha*C_r*omegao)
The rth shunt capacitor is C_rr = alpha C_r/omega_o - C_(r-1,r) - C_(r,r+1)
bandpass xform pi-net1 pi-net2
and finally, C_(r,r+1) = K_(r,r+1)/omega_o
-----> The same could be achieved by inductively coupling the resonators
(make an admittance inverter from inductors)
or by alternating inductors/capacitors
-----> Note that for very small bandwidths, the admittance of the filter
may need to be scaled by 1/alpha to make the components realizable
...
admittance of the rth shunt resonator prior to forming capacitive inverters:
--> Y_r(jomega) = j*[alpha*Cr/omega_o)*omega - 1/(omega/(alpha*Cr*omega_o))]
alpha = omega_o/(delta_omega)
delta_omega = omega_2 - omega_1
for delta_omega very small compared to omega_o, alpha is very large
the inductance of the rth shunt inductor is
Lrr = 1/(alpha*Cr*omega_o)
Lrr is very small if alhpa is very large (bad for manufacturing)
... the system impedance then needs to be transformed to match the load / generator
Insert the impedance transformer between the filter and its terminations:
Y(jomega) = jomegaC_a + 1/(1-j/(omegaC_b))
= jomegaC_a + (1.0+j/omegaC_b)/(1+1/(omega^2C_b^2))
Real part must have denominator equal to 1/alpha at omega_o:
Re{Y(jomega)} = 1.0/(1.0+1.0/(omega^2*C_b^2))
force equal to 1/alpha at omega=omega_o (our scaling factor)
1.0 + 1.0/(omega^2*C_b^2) = alpha
--> Cb = 1.0/(omega_o * _np.sqrt(alpha-1))
Imaginary part must
Im{Y(jomega)} = omega*Ca + 1/(omega*Cb) / (1+1/(omega^2*Cb^2))
must be equal to zero at omega=omega_o
omega*Ca = - 1/(omega*Cb) / (1+1/(omega^2*Cb^2))
Ca = -sqrt(alpha-1) / (omega_o * alpha)
---> Cb is the first and last series capacitor coupling into/out of network
Cb = 1.0/(omega_o * _np.sqrt(alpha-1))
---> -Ca is absorbed into the capacitance of the first and last resonators
Ca = -sqrt(alpha-1) / (omega_o * alpha)
C01 C12 C23 C(n-1,nn) C(nn,nn+1)
o-| |--o--o---| |---o--o---| |---o--o---...---o--o-------| |---o--o---| |--o
L11,C11 L22,C22 L33,C33 L,C(n-1,n-1) Lnn,Cnn Load (1-Ohm)
o------o--o---------o--o---------o--o---...---o--o-------------o--o--------o
C01 = C(n, n+1) = 1.0/(omega_o*sqrt(alpha-1))
C(r,r+1) = K(r,r+1)/(alpha*omega_o)
C11 = C1/omega_o - sqrt(alpha-1)/(omega_o*alpha) - C12
Cnn = Cn/omega_o - sqrt(alpha-1)/(omega_o*alpha) - C(n+1,n)
Crr = Cr/omega_o - C(r-1,r) - C(r,r+1) (r=2 ... N-1)
Lrr = 1/(Cr*omega_o)
The rth shunt inductor is L_rr = 1/(alpha*C_r*omegao)
The rth shunt capacitor is C_rr = alpha C_r/omega_o - C_(r-1,r) - C_(r,r+1)
bandpass xform pi-net1 pi-net2
and finally, C_(r,r+1) = K_(r,r+1)/omega_o
"""
# ==========================================================================
class prototypeFilter(object):
"""
Initialization Inputs
La - [dB] - stopband insertion loss
Lr - [dB] - passband return loss
BW - [MHz] - passband bandwidth (3 dB)
delta - [MHz] - stopband bandwidth (point at which stopband insertion loss is achieved)
"""
def __init__(self, La = 3, Lr = 20, BW = 100, delta = None):
if delta is None:
delta = 2*BW
# end if
pass
# end def __init__
# end class
# ==========================================================================
class ButterworthLPF(object):
"""
low-pass prototype network based on a Butterworth filter:
two-port lumped element network
with angular cutoff = 1rad/s
and operating in a 1-Ohm system
The simplest 1 degree low-pass filter is a series resistor + parallel
capacitor. An N-degree low-pass filter just cascades these lumped elements.
In this class the series resistor is modeled as a parallel admittance.
Initialization Inputs
N - [-] - Order of the prototype Chebyshev filter
La - [dB] - stopband insertion loss
Lr - [dB] - passband return loss
S - [-] - Shape factor (ratio of stopband to passband frequencies)
"""
def __init__(self, La = 3, Lr = 20, S = 1.2, N= None):
if N is None:
N = ButterworthLPF.degree(La, Lr, S)
# end if
self.N = N # order of filter
self.La = La # rejection [dB]
self.Lr = Lr # return loss [dB]
self.S = S # Shape factor
self.Krn = _np.zeros((N,), dtype=_np.float64) # Admittance of each stage of the filter
self.Crn = _np.zeros_like(self.Krn) # Capacitance of each stage of filter
for nn in range(N):
rr = nn+1
#rn = rr+1
# This sets the admittance and capacitance of each stage to be different.
# they should be rearranged a bit
self.Krn[nn] = self.Kinverter(self.N, self.Lr, rr)
self.Crn[nn] = self.Cshunt(self.N, self.Lr, rr)
# end for
# end def
# ==========================================================================
class ChebyshevLPF(object):
"""
low-pass prototype network based on a Chebyshev filter:
two-port lumped element network
with angular cutoff = 1rad/s
and operating in a 1-Ohm system
The simplest 1 degree low-pass filter is a series resistor + parallel
capacitor. An N-degree low-pass filter just cascades these lumped elements.
In this class the series resistor is modeled as a parallel admittance.
Initialization Inputs
N - [-] - Order of the prototype Chebyshev filter
La - [dB] - stopband insertion loss
Lr - [dB] - passband return loss
S - [-] - Shape factor (ratio of stopband to passband frequencies)
"""
def __init__(self, La = 3, Lr = 20, S = 1.2, N= None):
if N is None:
N = ChebyshevLPF.degree(La, Lr, S)
# end if
self.N = N # order of filter
self.La = La # rejection [dB]
self.Lr = Lr # return loss [dB]
self.S = S # Shape factor
self.Krn = _np.zeros((N,), dtype=_np.float64) # Admittance of each stage of the filter
self.Crn = _np.zeros_like(self.Krn) # Capacitance of each stage of filter
self._eta = __eta_prototype(N, Lr)
for nn in range(N):
rr = nn+1
#rn = rr+1
# This sets the admittance and capacitance of each stage to be different.
# they should be rearranged a bit
self.Krn[nn] = self.Kinverter(self.N, self.Lr, rr)
self.Crn[nn] = self.Cshunt(self.N, self.Lr, rr)
# end for
# end def
@staticmethod
def degree(La, Lr, S, returnint=True):
"""
Formula for calculating the minimum degree of a Chebyshev low-pass filter
Inputs
La - [dB] - stopband insertion loss
Lr - [dB] - passband return loss
S - [-] - Shape factor (ratio of stopband to passband frequencies)
Outputs
Nmin - minimum degree of the Chebyshev filter
N >= (La + Lr + 6)/(20*log10[S+sqrt(S^2-1)])
"""
Nmin = (La + Lr + 6.0)/(20.0*_np.log10(S+_np.sqrt(S*S-1.0))) # eqn 3.71
if returnint:
return int(_np.ceil(Nmin))
else:
return Nmin
# end Nmin
# end def
@staticmethod
def __eta_prototype(N, Lr):
"""
intermediate variable
"""
epsl = (10.0**(Lr/10.0) - 1.0)**(-0.5) # formula (2)
return _np.sinh(_np.arcsinh(1.0/epsl)/N) # formula (3)
@staticmethod
def Kinverter(N, Lr, r):
"""
Element value K_(r,r+1) for a Chebyshev low-pass prototype filter
rth admittance in the prototype network
"""
eta = ChebyshevLPF.__eta_prototype(N, Lr)
return _np.sqrt( eta*eta + (_np.sin(r*_np.pi/N)**2.0) )/eta # formula (4)
@staticmethod
def Cshunt(N, Lr, r):
"""
Element value C_Lr for a Chebyshev low-pass prototype filter
rth shunt-capacitor in the prototype network
"""
eta = ChebyshevLPF.__eta_prototype(N, Lr)
return (2.0/eta)*_np.sin((2.0*r-1.0)*_np.pi/(2.0*N)) # formula (5)
# end class
# ==========================================================================
"""
The design of this tuneable cavity filter loosely is based on the paper:
Design of Low-Loss Coaxial Cavity Bandpass Filter with Post-Manufacturing
Tuning Capabilities
2012 IEEE Symposium on Business, Engineering and Industrial Applications
by Z. Zakaria, A. Sabah, and W. Y. Sam
-----
The majority of the math / physics comes from:
Theory and Design of Microwave Filters, published 2001
(IEE electromagnetic waves series; no.48), by Ian. Hunter.
(ISBN: 0852967772 (or 978-0-85296-777-5)
"""
def Cheb2CombLine_Admittance(alpha, theta, Cshunt):
"""
inputs
alpha - [-] - Bandwidth scaling factor
theta - [radians] - electrical length of the resonators at the
center frequency omega0 of the filter
Cshunt - [Farad?] - Capacitance of the rth capacitor in the prototype network
outputs
Yr - [Siemans] - Characteristic admittance of the short-circuited
stub with capacitance Cshunt in a Combline filter
"""
return alpha*Cshunt*_np.tan(theta) # formula (6)
def Cheb2CombLine_Beta(omega0, theta):
"""
inputs
omega0 - [rad/s] - resonant cyclic frequency of filter
theta - [radians] - electrical length of the resonators at the
center frequency omega0 of the filter
outputs
beta - [s/rad] - scaling parameter between rth admittance / capacitance
"""
return 1.0/(omega0*_np.tan(theta)) # formula (8)
def Cheb2CombLine_Capacitance(omega0, alpha, theta, Cshunt):
"""
inputs
omega0 - [rad/s] - resonant cyclic frequency of filter
alpha - [-] - Bandwidth scaling factor
theta - [radians] - electrical length of the resonators at the
center frequency omega0 of the filter
Cshunt - [Farad?] - Capacitance of the rth capacitor in the prototype network
outputs
Cr - [Farrad] - Equivalent rth capacitance in Combline filter
Cr = beta*Yr
"""
beta = Cheb2CombLine_Beta(omega0, theta)
Yr = Cheb2CombLine_Admittance(alpha, theta, Cshunt)
return beta*Yr # formula (7)
# ==========================================================================
# ==========================================================================
def prototype_LPF(La=30, Lr=3, S=4, N=None):
# La = 30 # [dB], stopband insertion loss
# Lr = 3 # [dB], passband return loss
# 1-Ohm low pass filter based ona Chebyshev filter characteristic
chebLPF = ChebyshevLPF(La=La, Lr=Lr, S=S, N=N)
#def stopbad
def example451():
"""
example from page 114 of "Theory nad Design of Microwave Filters - I. Hunter"
Design a capacitively coupled Chebyshev bandpass filter to meet the
following specifications:
Center frequency: 1 GHz
Passband bandwidth: 50 MHz
Passband return loss: >= 20 dB
Stopband insertion loss: >40 dB at fo +-100 MHz
System impedance: 50 Ohms
- First design the lowpass filter prototype
- Next convert to a bandpass filter
- Then check the value of the inductors / capacitors
- Scale impedances by 1/alpha to make them reasonable
- Use capacitive impedance transformer to match system impedance of 50 Ohms
(i) Chebyshev prototype
- inverter coupled and
|S12(jomega)|^2 = 1/(1+eps^2*T_N^2(omega));
T_N is the Chebyshev polynomial, determined by condition
for equiripple response (Cheb) s. eqn. 3.54-3.59
C_N determined so that T_N(omega) is an nth degree polynomial in omega
T_N(omega) = cos(C_N*theta)
is zero when C_N*theta = (2*r-1)*pi/2 (r=1,2,...)
--> theta = (2*r-1)*pi/(2*C_N) therefore C_N = N
T_N(omega) = cos(N*arccos(omega))
Generating function for T_N(omega):
T_(N+1)(omega) = 2*omega*T_N(omega) - T_(N-1)(omega)
Initial conditions:
T_0(omega) = 1 and T_1(omega) = omega
4th order Chebyshev prototype:
T_0 = 1
T_1 = omega
T_2 = 2*omega*T_1 - T_0 = 2*omega^2-1 eqn 3.62
T_3 = 2*omega*T_2 - T_1 = 2*omega*(2*omega^2-1) - omega
= 4*omega^3 - 3*omega eqn 3.63
T_4 * 2*omega*T_3 - T_2 = 2*omega*(4*omega^3 - 3*omega) - 2*omega^2 + 1
= 8*omega^4 - 6*omega^2 - 2*omega^2 + 1
= 8*omega^4 - 8*omega^2 + 1
|S12(jomega)|^2 = 1/(1+eps^2*T_N^2(omega))
= 1/(1+eps^2*cos^2(N*arccos(omega)))
---> poles at T_N^2(omega) = -1/eps^2
cos^2(N*arccos(omega)) = -1/eps^2 eqn 3.74
solved using
eta = sinh( arcsinh(1/eps)/N ) eqn 3.75
or 1/eps = sinh(N*arcsinh(eta)) eqn 3.76
|S11(jomega)|^2 = 1 - |S12(jomega)|^2
= eps^2*T_N^2(omega)/(1+eps^2*T_N^2(omega))
"""
def genTN(N, omega):
TN = [1.0, omega]
if N>1:
for nn in range(2,N):
TN += [2*omega*TN[-1] - TN[-2]]
# end for
# return TN
# else:
# return TN[:(N+1)]
# end if
return TN
# end def
def S12_squared(eps, omega, N):
return 1.0/(1.0+eps*eps*(_np.cos(N*_np.arccos(omega)))**2.0)
def S11_squared(eps, omega, N):
return 1.0-S12_squared(eps,omega, N)
def minN(La, Lr, S):
""" Return the minimum order required for a Chebyshev filter """
return (La+Lr+6.0)/(20.0*_np.log10(S+_np.sqrt(S*S-1.0))) # eqn 3.71
def selectivity(delta, BW):
""" Return the selectivity of the filter: ratio of stopband to passband """
return delta/BW
def ripple(Lr):
""" passband ripple determined from return loss"""
# eqn. 3.47; passband return loss at ripple level
return 1.0/_np.sqrt(_np.power(10.0, Lr/10.0) - 1.0)
def chebyshev_eta(eps, N):
""" eta parameter for a Chebyshev filter """
# used in coordinate transformation for filter design
return _np.sinh( _np.arcsinh(1.0/eps)/N )
# --- General Nth degree Chebyshev prototype network with series
# inductors / impedance transformers
def Kcheb_rr1(r, eta, N):
""" Characteristic impedance of the inverters pg 63"""
return _np.sqrt(eta*eta+_np.sin(r*_np.pi/N)**2.0)/eta
def Lcheb_r(r, eta, N):
""" inductance of series inductors pg 63"""
return (2.0/eta)*_np.sin((2.0*r-1.0)*_np.pi/(2.0*N))
# --- General Nth degree Chebyshev prototype network with shunt
# capacitors / impedance transformers
# Note that the formula is identical to that for inductance. This is admittance
def Ccheb_r(r, eta, N):
""" capacitors of shunt capacitors """
#return (2.0/eta)*_np.sin((2.0*r-1.0)*_np.pi/(2.0*N))
return Lcheb_r(r, eta, N)
def create_cheb_prototype(eta, N):
"""
A loop that generates the prototype values of capacitance and
characteristic impedance of the inverters
"""
_Klpf = _np.zeros((N-1,), dtype=_np.float64)
_Clpf = _np.zeros((N,), dtype=_np.float64)
for ii in range(N):
if ii+1<N:
_Klpf[ii] = Kcheb_rr1(ii+1, eta, N)
# end if
_Clpf[ii] = Ccheb_r(ii+1, eta, N)
# end for
return _Klpf, _Clpf
def alpha(fc, BW):
""" bandwidth scaling factor """
return fc/BW
# ========================== #
La = 40 # [dB], stopband insertion loss
Lr = 20 # [dB], passband return loss
fc = 1e9 # [Hz], center frequency of filter
BW = 50e6 # [Hz], passband bandwidth
delta = 200e6 # [Hz], stopband bandwidth (points at which the insertion loss meets requirements)
# First evaluate the degree of the lowpass prototype
# N>= (La+Lr+6)/(20*log10(S+sqrt(S^2-1)))
# S = 200/50 = 4
# N>=3.68 --> N>= 4
S = selectivity(delta, BW) # [-], selectivity of the filter: ratio of stopband to passband
minN = minN(La, Lr, S)
N = int(_np.ceil(minN)) # 4
# Ripple level determined from passband return loss: epsrl=0.1005...
# Lr = 10*log10(1+ 1/eps^2) # eqn. 3.47; passband return loss at ripple level
epsrl = ripple(Lr)
# eta parameter of filter
eta = chebyshev_eta(epsrl, N) # 0.8201...
# Scattering parameters:
# S12_squared = 1.0/(1.0+epsrl*epsrl) # 0.99; transmission
# S11_squared = epsrl*epsrl/(1+epsrl*epsrl) # 0.01; reflection
# ============================ #
#
# plot the response of the general low pass filter
# ff = _np.linspace(-1, 1.0, num=100, endpoint=True)
ff = _np.linspace(0, 2.5, num=100, endpoint=True)
S12 = S12_squared(epsrl, ff, N)
S11 = S11_squared(epsrl, ff, N)
_plt.figure()
_ax1 = _plt.subplot(2,1,1)
_ax1.plot(ff, 10*_np.log10(S12), '-')
_ax2 = _plt.subplot(2,1,2, sharex=_ax1)
_ax2.plot(ff, 10*_np.log10(S11), '-')
# _ax1.set_xlim((0,2))
# _ax1.set_xlim((-1,1))
# _ax1.set_xlim((0, 1))
_ax1.set_ylabel('|S12| [dB]')
_ax2.set_ylabel('|S11| [dB]')
_ax1.set_title('Prototype LPF')
# ============================ #
# Create the prototype Chebyshev lowpass filter using impedance inverters
# and paralell caps
_Klpf, _Clpf = create_cheb_prototype(eta, N)
# ------------------ transform to bandpass filter
# Use the shunt capacitor admittance example
# (same value as series inductor impedance)
# scale the lowpass prototype to a bandpass filter
alpha = alpha(fc, BW) # bandwidth scaling factor
omega_o = 2.0*_np.pi*fc # [rad/s], cyclic frequency at center of bandpass filter
# Admittance, Y=jomegaC
# --> admittance of a parallel connected LC circuit
#
# o---|C|---o ==> Lprime = 1 / (alpha*C*omega_o)
# Cprime = alpha*C/omega_o
#
# ____Lprime___
# Z(p) | |
# o-----| |------o
# |____Cprime__|
#
# These are the values of the shunt inductors and shunt capacitors used
# for the bandpass Chebyshev filter: in parallel to the impedance inverters
# --> this would be the intermediary result, but we need to add impedance
# transformers at the input andoutput to make the values independent
# of bandwidth
# Lprime = 1/ (alpha*_Clpf*omega_o)
# Cprime = alpha*_Clpf/omega_o
def scaled_impedance_inverter_caps(_Klpf, alpha, omega_o):
"""
return the scaled capacitances necessary to realize an impedance inverter
"""
return _Klpf/(alpha*omega_o)
def scaled_bpf_caps(_Clpf, omega_o):
"""
return the scaled shunt capacitances necessary for this band pass filter
"""
return _Clpf/omega_o
def scaled_bpf_inductors(_Clpf, omega_o):
"""
return the scaled shunt inductors for this band pass filter
"""
return 1.0/(_Clpf*omega_o) # Inductors get put in parallel
def impedance_transformer_caps(alpha, omega_o):
"""
return the capacitances required make a capacitively coupled
impedance transformer on the input and output terminations of the filter
"""
_Ca = 1.0 * _np.sqrt(alpha-1)/(omega_o*alpha) # negative in application to others
_Cb = 1.0/(omega_o*_np.sqrt(alpha-1))
return _Ca, _Cb
# ===================== #
def cap_coupled_bpf(N, omega_o, _Clpf, _Ckk, _Cyy, _Ca, _Cb, verbose=1):
"""
the capcitors (parallel / series by columns) necessary to generate a
capacitively coupled chebyshev band pass filter
... combine these with shunt inductors generated by scaled_bpf_inductors
"""
Lbpf = scaled_bpf_inductors(_Clpf, omega_o) # Inductors get put in parallel, but don't change with cap coupling
Cbpf = _np.zeros((N+1,2), dtype=_np.float64) # rows obvious, columsn parallel and series
# First and last series capacitors are from the impedance transformers
Cbpf[0,1] += _Cb
Cbpf[N,1] += _Cb
# First and last shunt capacitors absorb the neg. shunt capacitance from the imp. transformers
Cbpf[1,0] -= _Ca
Cbpf[N,0] -= _Ca
# Series capacitors in the middle come from the impedance inverter realization
Cbpf[1:N , 1] += _Ckk
# Shunt capacitors in the middle come from the original filter
Cbpf[1:N+1, 0] += _Cyy
# First and last shunt capacitor only are adjacent to one impedance
# inverter realization, so they absorb neg. capacitance from those individually
# ... the others in the middle are adjacent to two impedance inverters
Cbpf[1:N , 0] -= _Ckk[:]
Cbpf[2:N+1, 0] -= _Ckk[:]
if verbose:
print(Lbpf)
print(Cbpf)
# end if
return Lbpf, Cbpf
def scale_impedances_to_system(Lbpf, Cbpf, Zo=50.0):
"""
scale the components to match the required system impedance (input and output)
"""
# Zo = 50.0 # [Ohms]
# Inductors: Z = L*p --> Zo*L*p = (Zo*L)*p; L-> Zo*L
Lbpf *= Zo
# Capacitors: Z = 1/(C*p) --> Zo/Cp = 1/(C/Zo)p; C-> C/Zo
Cbpf /= Zo
return Lbpf, Cbpf
# ===================== #
# Realize each impedance inverter by a pi-network connected set of capacitors
# 4th order network with capacitive coupling impedance transformers on
# input / output
_Ca, _Cb = impedance_transformer_caps(alpha, omega_o)
_Ckk = scaled_impedance_inverter_caps(_Klpf, alpha, omega_o)
_Cyy = scaled_bpf_caps(_Clpf, omega_o)
Lbpf, Cbpf = cap_coupled_bpf(N, omega_o, _Clpf, _Ckk, _Cyy, _Ca, _Cb, verbose=1)
# Now scale the impedances to match 50 ohms at the input and output
Lbpf, Cbpf = scale_impedances_to_system(Lbpf, Cbpf, Zo=50.0)
return Lbpf, Cbpf
# end def
def cot(x):
"""
cotangent
"""
return 1.0/_np.tan(x)
def physical_filter():
C1 = C4 = 15.9894e-12 # pF
C2 = C3 = 38.1670e-12 # pF
Y1 = Y4 = 9.1327 # mhor
Y2 = Y3 = 22.0488 # mhor
K12 = K34 = 0.026
K23 = 0.031
fo = 2.5e9
l = 12e-3
b = 24e-3
d = 8e-3
# d = 2*d
for Kij in [K12, K23, K34]:
Sij = Scomb_ij(fo, l, b, d, Kij)
print(Sij)
# 0.07346126821028004
# 0.07212307820975238
# 0.07346126821028004
# end for
for Cij in [C1, C2]:
# Mg = Mgap(d, Cij)
# print(Mg)
# -0.002130268362364526
# -0.002130268588630083
Mg = Mgap(d, 1e12*Cij)
print(Mg)
# end for
# end def
def loading_capacitance(theta0, Zs, Zr, fc):
"""
Loading capacitance: Cl
Zs - system impedance (50 Ohms)
Zr - Resonator impedance (equation 11)
theta0 - electrical length --> must be less than 90 degrees (less than pi/4)
fc - center frequency
"""
return Zs*_np.tan(theta0)/(Zs*Zr*2.0*_np.pi*fc)
def inverse_loadCap(Cl, Zs, Zr, fc):
"""
return the electrical length based on the loading capacitance
"""
return _np.arctan( Cl*(Zs*Zr*2.0*_np.pi*fc)/Zs )
def ewall(b,d):
"""
return the distance to the wall from the resonator edge that matches the cavity diameter
equation 12
"""
return 0.5*(b+d)
def Mgap(d, Cij):
"""
returns the resonator gap between the lid and resonator to provide the necessary capacitance:
given the capacitance and resonator diameter
equation 13
"""
return 0.695*d*d/(100*Cij-2.61*d)
def Scomb_ij(fo, l, b, d, Kij):
"""
Return the distance between each resonaor (i to j) based on
Kij - the admittance invertor value,
electrical length (frequency and resonator length, l)
b - cavity diameter
d - resonator diameter
equation 14
"""
ftheta = f_theta(theta(fo, l))
Sij = (b/1.37) * ((0.91*b/d) + 0.048 - _np.log10( 4*ftheta*Kij/_np.pi ))
return Sij
def theta(fo, l):
"""
equation 16
electrical length: ... should probably use guide wavelength here
"""
wavelength = 3e8 / fo # free space wavelength
return 2.0*_np.pi*l/wavelength
def f_theta(theta):
""" equation 15 """
return 0.5*(1.0+2*theta/_np.sin(2*theta))
if __name__ == '__main__':
# Lbpf, Cbpf = example451()
physical_filter()
# end if |
import argparse
import re
from wte.src.util import Log
# ========== create_book ==========
import wte.src as src
URL_MATCHS = {
"^(https?://)?(www\\.)?wattpad\\.com/.+$": src.wattpad,
}
def get_module_of_book(url):
url = url.strip()
for pattern, module in URL_MATCHS.items():
if re.match(pattern, url):
return module
return None
def try_create_book(url):
module = get_module_of_book(url)
if module is None:
Log.error(
f"The content at {url} doesn't match any pattern and is not recognized")
exit(0)
return module.download_book(url)
# ========== Main ==========
SHORT_DESCRIPTIOM = """Easily convert websites into book"""
def get_cmd_args():
parser = argparse.ArgumentParser(description=SHORT_DESCRIPTIOM)
parser.add_argument('-o', '--output', type=str,
help='Output file', default=None)
parser.add_argument('-q', '--quiet', help='Quiet mode',
action='store_true', default=False)
parser.add_argument(
'url', type=str, help='Url of the page you want to convert to an ebook')
return parser.parse_args()
def main():
args = get_cmd_args()
if args.quiet:
Log.silent = True
book = try_create_book(args.url)
output_file = args.output or book.epub_name()
if not output_file.endswith('.epub'):
output_file += '.epub'
book.to_epub(output_file)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import logging
from contextlib import contextmanager
from subprocess import call
ROOT_DIR = '/var/lib/ambari-agent'
LOG_DIR = '/var/lib/ambari-agent/data'
TOP_LOG = '/var/log/adcm.log'
TMP_DIR = '/tmp'
LOG_LEVEL = 'INFO'
log = logging.getLogger('command')
log.setLevel(logging.DEBUG)
def get_log_handler(fname):
handler = logging.FileHandler(fname, 'a', 'utf-8')
fmt = logging.Formatter("%(asctime)s %(levelname)s %(module)s %(message)s", "%m-%d %H:%M:%S")
handler.setFormatter(fmt)
return handler
@contextmanager
def open_file(root, tag, command_id):
fname = "{}/{}-{}.txt".format(root, command_id, tag)
with open(fname, 'w', encoding='utf_8') as file:
yield from file
def print_log(root, tag, command_id):
fname = "{}/{}-{}.txt".format(root, command_id, tag)
with open(fname, 'r', encoding='utf_8') as file:
flog = file.read()
sys.stderr.write(flog)
def add_path(path):
env = os.environ
os_path = env['PATH']
env['PATH'] = "{}:{}".format(os_path, path)
return env
# pylint: disable-next=too-many-arguments
def run_python_script(base_dir, py_script, command, json_config, out_file, err_file):
try:
res = call(
[
'python',
py_script,
command.upper(),
json_config,
base_dir,
'/tmp/structured_out.json',
LOG_LEVEL,
TMP_DIR,
],
stdout=out_file,
stderr=err_file,
env=add_path(ROOT_DIR),
)
except: # pylint: disable=bare-except
log.error("exception runnung python script")
res = 42
log.info("script %s ret: %s", py_script, res)
return res
def cook_hook(root, hook, command):
return ('{}/{}'.format(root, hook), '{}/{}/scripts/hook.py'.format(root, hook), command)
def cook_command_pipe(hook_dir, command_tuple):
(_, _, command) = command_tuple
pipe = []
if command == 'install':
pipe.append(cook_hook(hook_dir, 'before-INSTALL', 'install'))
pipe.append(command_tuple)
pipe.append(cook_hook(hook_dir, 'after-INSTALL', 'install'))
elif command == 'start':
pipe.append(cook_hook(hook_dir, 'before-START', 'start'))
pipe.append(command_tuple)
else:
pipe.append(cook_hook(hook_dir, 'before-ANY', 'any'))
pipe.append(command_tuple)
return pipe
def cook_hook_folder(root, folder):
stack = folder.split('/services/')[0]
return "{}/cache/{}/hooks".format(root, stack)
def run_ambari_command(folder, script, command, command_id):
base_dir = '{}/cache/{}'.format(ROOT_DIR, folder)
hook_dir = cook_hook_folder(ROOT_DIR, folder)
json_config = "{}/data/command-{}.json".format(ROOT_DIR, command_id)
py_script = '{}/{}'.format(base_dir, script)
log.debug("command.py called as: %s", sys.argv)
log.info('%s run %s', command_id, command)
with open_file(LOG_DIR, 'out', command_id) as out_file, open_file(LOG_DIR, 'err', command_id) as err_file:
pipe = cook_command_pipe(hook_dir, (base_dir, py_script, command))
log.debug('%s %s pipe: %s', command_id, command, pipe)
for (base, py_script, comm) in pipe:
res = run_python_script(base, py_script, comm, json_config, out_file, err_file)
if res != 0:
break
if res != 0:
print_log(LOG_DIR, 'err', command_id)
sys.exit(res)
def print_usage():
print(
'''
command.py folder script.py commnad command_id
'''
)
if __name__ == '__main__':
if len(sys.argv) < 5:
print_usage()
sys.exit(4)
else:
log.addHandler(get_log_handler(TOP_LOG))
run_ambari_command(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4])
|
import os,sys
import logging
import qsmcli.qsmcli
logging.basicConfig(level=logging.WARNING)
qsmcli.qsmcli.Qsmcli().run()
|
import numpy
import xarray
import scipp as sc
from scipp.compat import from_xarray
def test_empty_attrs_dataarray():
xr_da = xarray.DataArray(data=numpy.zeros((1, )), dims={"x"}, attrs={})
sc_da = from_xarray(xr_da)
assert len(sc_da.attrs) == 0
assert len(sc_da.dims) == 1
assert "x" in sc_da.dims
assert len(sc_da.masks) == 0
def test_attrs_dataarray():
xr_da = xarray.DataArray(data=numpy.zeros((1, )),
dims=["x"],
attrs={
"attrib_int": 5,
"attrib_float": 6.54321,
"attrib_str": "test-string",
})
sc_da = from_xarray(xr_da)
assert sc_da.attrs["attrib_int"].values == 5
assert sc_da.attrs["attrib_float"].values == 6.54321
assert sc_da.attrs["attrib_str"].values == "test-string"
def test_named_dataarray():
xr_da = xarray.DataArray(data=numpy.zeros((1, )),
dims={"x"},
name="my-test-dataarray")
sc_da = from_xarray(xr_da)
assert sc_da.name == "my-test-dataarray"
def test_1d_1_element_dataarray():
xr_da = xarray.DataArray(data=numpy.zeros((1, )), dims=["x"], attrs={})
sc_da = from_xarray(xr_da)
assert sc.identical(sc_da, sc.DataArray(data=sc.zeros(dims=["x"], shape=(1, ))))
def test_1d_100_element_dataarray():
xr_da = xarray.DataArray(data=numpy.zeros((100, )), dims=["x"], attrs={})
sc_da = from_xarray(xr_da)
assert sc.identical(sc_da, sc.DataArray(data=sc.zeros(dims=["x"], shape=(100, ))))
def test_2d_100x100_element_dataarray():
xr_da = xarray.DataArray(data=numpy.zeros((100, 100)), dims=["x", "y"], attrs={})
sc_da = from_xarray(xr_da)
assert sc.identical(sc_da,
sc.DataArray(data=sc.zeros(dims=["x", "y"], shape=(100, 100))))
def test_empty_dataset():
xr_ds = xarray.Dataset(data_vars={})
sc_ds = from_xarray(xr_ds)
assert sc.identical(sc_ds, sc.Dataset(data={}))
def test_dataset_with_data():
xr_ds = xarray.Dataset(
data_vars={
"array1": xarray.DataArray(data=numpy.zeros((100, )), dims=["x"], attrs={}),
"array2": xarray.DataArray(data=numpy.zeros((50, )), dims=["y"], attrs={}),
})
sc_ds = from_xarray(xr_ds)
reference_ds = sc.Dataset(
data={
"array1": sc.DataArray(data=sc.zeros(dims=["x"], shape=(100, ))),
"array2": sc.DataArray(data=sc.zeros(dims=["y"], shape=(50, ))),
})
assert sc.identical(sc_ds, reference_ds)
def test_dataset_with_units():
xr_ds = xarray.Dataset(
data_vars={
"array1":
xarray.DataArray(
data=numpy.zeros((100, )), dims=["x"], attrs={"units": "m"}),
"array2":
xarray.DataArray(data=numpy.zeros((
50, )), dims=["y"], attrs={"units": "s"}),
})
sc_ds = from_xarray(xr_ds)
reference_ds = sc.Dataset(
data={
"array1":
sc.DataArray(data=sc.zeros(dims=["x"], shape=(100, ), unit=sc.Unit("m"))),
"array2":
sc.DataArray(data=sc.zeros(dims=["y"], shape=(50, ), unit=sc.Unit("s"))),
})
assert sc.identical(sc_ds, reference_ds)
def test_dataset_with_non_indexed_coords():
xr_ds = xarray.Dataset(
data_vars={
"array1":
xarray.DataArray(data=numpy.zeros((100, )),
dims=["x"],
coords={
"x": numpy.arange(100, dtype="int64"),
}),
"array2":
xarray.DataArray(
data=numpy.zeros((50, )),
dims=["y"],
coords={
"y": numpy.arange(50, dtype="int64"),
"z": ("y", numpy.arange(0, 100, 2, dtype="int64")
), # z is a non-index coord
}),
})
sc_ds = from_xarray(xr_ds)
reference_ds = sc.Dataset(data={
"array1":
sc.DataArray(data=sc.zeros(dims=["x"], shape=(100, ), dtype="float64")),
"array2":
sc.DataArray(data=sc.zeros(dims=["y"], shape=(50, ), dtype="float64"),
attrs={"z": sc.arange("y", 0, 100, 2, dtype="int64")}),
},
coords={
"x": sc.arange("x", 100, dtype="int64"),
"y": sc.arange("y", 50, dtype="int64"),
})
assert sc.identical(sc_ds, reference_ds)
|
import numpy
from icecube.photospline.glam.bspline import *
import Gnuplot
try:
input = raw_input
except NameError:
pass
numpts = 20
order=3
x1 = numpy.linspace(0,25,numpts)
# Pick a random complicated function to interpolate
z = numpy.random.poisson(numpy.cos(x1)*numpy.cos(x1) + (x1 -3.0)*(x1-3.0) + 10)
#z = numpy.cos(x1)*numpy.cos(x1) + (x1 -3.0)*(x1-3.0) + 10
#z = numpy.ones(x1.shape) + 5. + 4*numpy.sin(x1)
#z = numpy.ones(x1.shape) + 5. + x1**2/6.
gp = Gnuplot.Gnuplot()
xfine = numpy.sort(numpy.random.uniform(0,25,size=1000))
rawdat = Gnuplot.Data(x1, z, title = "Data")
# See if we can jump to the answer weighting by z and shifting the knots
# We want the center of the spline with coefficient z to be at the point x
# such that f(x) = z. This has to do with the spacing between knots
# ((max - min)/numpts), as well as the middle of the number of inter-knot
# cells spanned by a spline ((order - 1)/2).
baseknots = x1 + (numpy.max(x1)-numpy.min(x1))/(2.0*numpts)*(order-1)
interpknots = []
for i in range (order,0,-1):
interpknots.append(baseknots[0] - i*(baseknots[1] - baseknots[0]))
interpknots.extend(baseknots)
interpknots.append(interpknots[len(interpknots)-1] + (interpknots[len(interpknots)-1] - interpknots[len(interpknots)-2]))
splinterp = Gnuplot.Data(xfine, [sum([z[n]*bspline(interpknots, x, n, order) for n in range(0,len(interpknots)-order-1)]) for x in xfine],
with_="lines", title = "Direct Spline Interpolation Attempt")
knotcoeff = Gnuplot.Data(baseknots, z, title="Knots and Coefficients")
knots = interpknots
# Do an overparameterized least-squares fit for comparison
A = splinebasis(knots,order,x1)
result = numpy.linalg.lstsq(A, z)
# Plot the least-squares result
spline = Gnuplot.Data(xfine, [sum([result[0][n]*bspline(knots, x, n, order) for n in range(0,len(knots)-order-1)]) for x in xfine],
with_="lines", title="Least Squares")
#gp.plot(rawdat,splinterp,spline)
#gp.set_range("yrange",(-1,17))
gp.set_range("xrange",(-1,26))
gp.plot(rawdat,splinterp,spline,knotcoeff)
input("Press ENTER to continue")
|
def sublime():
return [
('MacOS', 'https://download.sublimetext.com/Sublime%20Text%20Build%203126.dmg', 'sublime/sublime.dmg'),
('Windows (32-bit)', 'https://download.sublimetext.com/Sublime%20Text%20Build%203126%20Setup.exe', 'sublime/sublime-x86.exe'), # noqa
('Windows (64-bit)', 'https://download.sublimetext.com/Sublime%20Text%20Build%203126%20x64%20Setup.exe', 'sublime/sublime-amd64.exe'), # noqa
('Ubuntu (32-bit)', 'https://download.sublimetext.com/sublime-text_build-3126_i386.deb', 'sublime/sublime-x86.deb'), # noqa
('Ubuntu (64-bit)', 'https://download.sublimetext.com/sublime-text_build-3126_amd64.deb', 'sublime/sublime-amd64.deb'), # noqa
]
|
"""
Copyright (c) 2020 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
The initial implementation is taken from https://github.com/ZitongYu/CDCN (MIT License)
"""
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
class Conv2d_cd(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=3, stride=1,
padding=1, dilation=1, groups=1, bias=False, theta=0):
super().__init__()
self.theta = theta
self.bias = bias or None
self.stride = stride
self.dilation = dilation
self.groups = groups
if self.groups > 1:
self.weight = nn.Parameter(kaiming_init(out_channels, in_channels//in_channels, kernel_size))
else:
self.weight = nn.Parameter(kaiming_init(out_channels, in_channels, kernel_size))
self.padding = padding
self.i = 0
def forward(self, x):
out_normal = F.conv2d(input=x, weight=self.weight, bias=self.bias, dilation=self.dilation,
stride=self.stride, padding=self.padding, groups=self.groups)
if math.fabs(self.theta - 0.0) < 1e-8:
return out_normal
else:
kernel_diff = self.weight.sum(dim=(2,3), keepdim=True)
out_diff = F.conv2d(input=x, weight=kernel_diff, bias=self.bias, dilation=self.dilation,
stride=self.stride, padding=0, groups=self.groups)
return out_normal - self.theta * out_diff
def kaiming_init(c_out, c_in, k):
return torch.randn(c_out, c_in, k, k)*math.sqrt(2./c_in)
class Dropout(nn.Module):
DISTRIBUTIONS = ['bernoulli', 'gaussian', 'none']
def __init__(self, p=0.5, mu=0.5, sigma=0.3, dist='bernoulli', linear=False):
super().__init__()
self.dist = dist
assert self.dist in Dropout.DISTRIBUTIONS
self.p = float(p)
assert 0. <= self.p <= 1.
self.mu = float(mu)
self.sigma = float(sigma)
assert self.sigma > 0.
# need to distinct 2d and 1d dropout
self.linear = linear
def forward(self, x):
if self.dist == 'bernoulli' and not self.linear:
out = F.dropout2d(x, self.p, self.training)
elif self.dist == 'bernoulli' and self.linear:
out = F.dropout(x, self.p, self.training)
elif self.dist == 'gaussian':
if self.training:
with torch.no_grad():
soft_mask = x.new_empty(x.size()).normal_(self.mu, self.sigma).clamp_(0., 1.)
scale = 1. / self.mu
out = scale * soft_mask * x
else:
out = x
else:
out = x
return out
class h_sigmoid(nn.Module):
def __init__(self, inplace=True):
super().__init__()
self.relu = nn.ReLU6(inplace=inplace)
def forward(self, x):
return self.relu(x + 3) / 6
class h_swish(nn.Module):
def __init__(self, inplace=True):
super().__init__()
self.sigmoid = h_sigmoid(inplace=inplace)
def forward(self, x):
return x * self.sigmoid(x)
class SELayer(nn.Module):
def __init__(self, channel, reduction=4):
super().__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Sequential(
nn.Linear(channel, make_divisible(channel // reduction, 8)),
nn.ReLU(inplace=True),
nn.Linear(make_divisible(channel // reduction, 8), channel),
h_sigmoid()
)
def forward(self, x):
b, c, _, _ = x.size()
y = self.avg_pool(x).view(b, c)
y = self.fc(y).view(b, c, 1, 1)
return x * y
def conv_3x3_in(inp, oup, stride, theta):
return nn.Sequential(
Conv2d_cd(inp, oup, 3, stride, 1, bias=False, theta=theta),
nn.InstanceNorm2d(oup),
h_swish()
)
def conv_3x3_bn(inp, oup, stride, theta):
return nn.Sequential(
Conv2d_cd(inp, oup, 3, stride, 1, bias=False, theta=theta),
nn.BatchNorm2d(oup),
h_swish()
)
def conv_1x1_bn(inp, oup):
return nn.Sequential(
nn.Conv2d(inp, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
h_swish()
)
def conv_1x1_in(inp, oup):
return nn.Sequential(
nn.Conv2d(inp, oup, 1, 1, 0, bias=False),
nn.InstanceNorm2d(oup),
h_swish()
)
def make_divisible(v, divisor, min_value=None):
"""
This function is taken from the original tf repo.
It ensures that all layers have a channel number that is divisible by 8
It can be seen here:
https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py
:param v:
:param divisor:
:param min_value:
:return:
"""
if min_value is None:
min_value = divisor
new_v = max(min_value, int(v + divisor / 2) // divisor * divisor)
# Make sure that round down does not go down by more than 10%.
if new_v < 0.9 * v:
new_v += divisor
return new_v
class MobileNet(nn.Module):
"""parent class for mobilenets"""
def __init__(self, width_mult, prob_dropout, type_dropout,
prob_dropout_linear, embeding_dim, mu, sigma,
theta, scaling, multi_heads):
super().__init__()
self.prob_dropout = prob_dropout
self.type_dropout = type_dropout
self.width_mult = width_mult
self.prob_dropout_linear = prob_dropout_linear
self.embeding_dim = embeding_dim
self.mu = mu
self.sigma = sigma
self.theta = theta
self.scaling = scaling
self.multi_heads = multi_heads
self.features = nn.Identity
# building last several layers
self.conv_last = nn.Identity
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.spoofer = nn.Linear(embeding_dim, 2)
if self.multi_heads:
self.lightning = nn.Linear(embeding_dim, 5)
self.spoof_type = nn.Linear(embeding_dim, 11)
self.real_atr = nn.Linear(embeding_dim, 40)
def forward(self, x):
x = self.features(x)
x = self.conv_last(x)
x = self.avgpool(x)
return x
def make_logits(self, features, all=False):
all = all if self.multi_heads else False
output = features.view(features.size(0), -1)
spoof_out = self.spoofer(output)
if all:
type_spoof = self.spoof_type(output)
lightning_type = self.lightning(output)
real_atr = torch.sigmoid(self.real_atr(output))
return spoof_out, type_spoof, lightning_type, real_atr
return spoof_out
def forward_to_onnx(self,x):
x = self.features(x)
x = self.conv_last(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
spoof_out = self.spoofer(x)
if isinstance(spoof_out, tuple):
spoof_out = spoof_out[0]
probab = F.softmax(spoof_out*self.scaling, dim=-1)
return probab
|
""" openconfig_mpls
This module provides data definitions for configuration of
Multiprotocol Label Switching (MPLS) and associated protocols for
signaling and traffic engineering.
RFC 3031\: Multiprotocol Label Switching Architecture
The MPLS / TE data model consists of several modules and
submodules as shown below. The top\-level MPLS module describes
the overall framework. Three types of LSPs are supported\:
i) traffic\-engineered (or constrained\-path)
ii) IGP\-congruent (LSPs that follow the IGP path)
iii) static LSPs which are not signaled
The structure of each of these LSP configurations is defined in
corresponding submodules. Companion modules define the relevant
configuration and operational data specific to key signaling
protocols used in operational practice.
+\-\-\-\-\-\-\-+
+\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\->\| MPLS \|<\-\-\-\-\-\-\-\-\-\-\-\-\-\-+
\| +\-\-\-\-\-\-\-+ \|
\| ^ \|
\| \| \|
+\-\-\-\-+\-\-\-\-\-+ +\-\-\-\-\-\-\-\-+\-\-\-\-\-\-\-+ +\-\-\-\-\-+\-\-\-\-\-+
\| TE LSPs \| \| IGP\-based LSPs \| \|static LSPs\|
\| \| \| \| \| \|
+\-\-\-\-\-\-\-\-\-\-+ +\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-+ +\-\-\-\-\-\-\-\-\-\-\-+
^ ^ ^ ^
\| +\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-+ \| +\-\-\-\-\-\-\-\-+
\| \| \| \|
\| +\-\-\-\-\-\-+ +\-+\-\-\-+\-+ +\-\-+\-\-+
+\-\-\-+ RSVP \| \|SEGMENT\| \| LDP \|
+\-\-\-\-\-\-+ \|ROUTING\| +\-\-\-\-\-+
+\-\-\-\-\-\-\-+
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class CspfTieBreaking(Enum):
"""
CspfTieBreaking (Enum Class)
type to indicate the CSPF selection policy when
multiple equal cost paths are available
.. data:: RANDOM = 0
CSPF calculation selects a random path among
multiple equal-cost paths to the destination
.. data:: LEAST_FILL = 1
CSPF calculation selects the path with greatest
available bandwidth
.. data:: MOST_FILL = 2
CSPF calculation selects the path with the least
available bandwidth
"""
RANDOM = Enum.YLeaf(0, "RANDOM")
LEAST_FILL = Enum.YLeaf(1, "LEAST_FILL")
MOST_FILL = Enum.YLeaf(2, "MOST_FILL")
class MplsHopType(Enum):
"""
MplsHopType (Enum Class)
enumerated type for specifying loose or strict
paths
.. data:: LOOSE = 0
loose hop in an explicit path
.. data:: STRICT = 1
strict hop in an explicit path
"""
LOOSE = Enum.YLeaf(0, "LOOSE")
STRICT = Enum.YLeaf(1, "STRICT")
class MplsSrlgFloodingType(Enum):
"""
MplsSrlgFloodingType (Enum Class)
Enumerated bype for specifying how the SRLG is flooded
.. data:: FLOODED_SRLG = 0
SRLG is flooded in the IGP
.. data:: STATIC_SRLG = 1
SRLG is not flooded, the members are
statically configured
"""
FLOODED_SRLG = Enum.YLeaf(0, "FLOODED-SRLG")
STATIC_SRLG = Enum.YLeaf(1, "STATIC-SRLG")
class TeBandwidthType(Enum):
"""
TeBandwidthType (Enum Class)
enumerated type for specifying whether bandwidth is
explicitly specified or automatically computed
.. data:: SPECIFIED = 0
Bandwidth is explicitly specified
.. data:: AUTO = 1
Bandwidth is automatically computed
"""
SPECIFIED = Enum.YLeaf(0, "SPECIFIED")
AUTO = Enum.YLeaf(1, "AUTO")
class TeMetricType(Enum):
"""
TeMetricType (Enum Class)
union type for setting the LSP TE metric to a
static value, or to track the IGP metric
.. data:: IGP = 0
set the LSP metric to track the underlying
IGP metric
"""
IGP = Enum.YLeaf(0, "IGP")
class PathComputationMethod(Identity):
"""
base identity for supported path computation
mechanisms
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(PathComputationMethod, self).__init__("http://openconfig.net/yang/mpls", "openconfig-mpls", "openconfig-mpls:path-computation-method")
class Mpls(Entity):
"""
Anchor point for mpls configuration and operational
data
.. attribute:: global_
general mpls configuration applicable to any type of LSP and signaling protocol \- label ranges, entropy label supportmay be added here
**type**\: :py:class:`Global <ydk.models.openconfig.openconfig_mpls.Mpls.Global>`
.. attribute:: te_global_attributes
traffic\-engineering global attributes
**type**\: :py:class:`TeGlobalAttributes <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes>`
.. attribute:: te_interface_attributes
traffic engineering attributes specific for interfaces
**type**\: :py:class:`TeInterfaceAttributes <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes>`
.. attribute:: signaling_protocols
top\-level signaling protocol configuration
**type**\: :py:class:`SignalingProtocols <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols>`
.. attribute:: lsps
LSP definitions and configuration
**type**\: :py:class:`Lsps <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps>`
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls, self).__init__()
self._top_entity = None
self.yang_name = "mpls"
self.yang_parent_name = "openconfig-mpls"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("global", ("global_", Mpls.Global)), ("te-global-attributes", ("te_global_attributes", Mpls.TeGlobalAttributes)), ("te-interface-attributes", ("te_interface_attributes", Mpls.TeInterfaceAttributes)), ("signaling-protocols", ("signaling_protocols", Mpls.SignalingProtocols)), ("lsps", ("lsps", Mpls.Lsps))])
self._child_list_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict()
self.global_ = Mpls.Global()
self.global_.parent = self
self._children_name_map["global_"] = "global"
self._children_yang_names.add("global")
self.te_global_attributes = Mpls.TeGlobalAttributes()
self.te_global_attributes.parent = self
self._children_name_map["te_global_attributes"] = "te-global-attributes"
self._children_yang_names.add("te-global-attributes")
self.te_interface_attributes = Mpls.TeInterfaceAttributes()
self.te_interface_attributes.parent = self
self._children_name_map["te_interface_attributes"] = "te-interface-attributes"
self._children_yang_names.add("te-interface-attributes")
self.signaling_protocols = Mpls.SignalingProtocols()
self.signaling_protocols.parent = self
self._children_name_map["signaling_protocols"] = "signaling-protocols"
self._children_yang_names.add("signaling-protocols")
self.lsps = Mpls.Lsps()
self.lsps.parent = self
self._children_name_map["lsps"] = "lsps"
self._children_yang_names.add("lsps")
self._segment_path = lambda: "openconfig-mpls:mpls"
class Global(Entity):
"""
general mpls configuration applicable to any
type of LSP and signaling protocol \- label ranges,
entropy label supportmay be added here
.. attribute:: config
Top level global MPLS configuration
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Global.Config>`
.. attribute:: state
Top level global MPLS state
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Global.State>`
.. attribute:: mpls_interface_attributes
Parameters related to MPLS interfaces
**type**\: :py:class:`MplsInterfaceAttributes <ydk.models.openconfig.openconfig_mpls.Mpls.Global.MplsInterfaceAttributes>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Global, self).__init__()
self.yang_name = "global"
self.yang_parent_name = "mpls"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.Global.Config)), ("state", ("state", Mpls.Global.State)), ("mpls-interface-attributes", ("mpls_interface_attributes", Mpls.Global.MplsInterfaceAttributes))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.Global.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.Global.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self.mpls_interface_attributes = Mpls.Global.MplsInterfaceAttributes()
self.mpls_interface_attributes.parent = self
self._children_name_map["mpls_interface_attributes"] = "mpls-interface-attributes"
self._children_yang_names.add("mpls-interface-attributes")
self._segment_path = lambda: "global"
self._absolute_path = lambda: "openconfig-mpls:mpls/%s" % self._segment_path()
class Config(Entity):
"""
Top level global MPLS configuration
.. attribute:: null_label
The null\-label type used, implicit or explicit
**type**\: :py:class:`NullLabelType <ydk.models.openconfig.openconfig_mpls_types.NullLabelType>`
**default value**\: mplst:IMPLICIT
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Global.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "global"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('null_label', YLeaf(YType.identityref, 'null-label')),
])
self.null_label = None
self._segment_path = lambda: "config"
self._absolute_path = lambda: "openconfig-mpls:mpls/global/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Global.Config, ['null_label'], name, value)
class State(Entity):
"""
Top level global MPLS state
.. attribute:: null_label
The null\-label type used, implicit or explicit
**type**\: :py:class:`NullLabelType <ydk.models.openconfig.openconfig_mpls_types.NullLabelType>`
**default value**\: mplst:IMPLICIT
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Global.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "global"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('null_label', YLeaf(YType.identityref, 'null-label')),
])
self.null_label = None
self._segment_path = lambda: "state"
self._absolute_path = lambda: "openconfig-mpls:mpls/global/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Global.State, ['null_label'], name, value)
class MplsInterfaceAttributes(Entity):
"""
Parameters related to MPLS interfaces
.. attribute:: interface
List of TE interfaces
**type**\: list of :py:class:`Interface <ydk.models.openconfig.openconfig_mpls.Mpls.Global.MplsInterfaceAttributes.Interface>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Global.MplsInterfaceAttributes, self).__init__()
self.yang_name = "mpls-interface-attributes"
self.yang_parent_name = "global"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("interface", ("interface", Mpls.Global.MplsInterfaceAttributes.Interface))])
self._leafs = OrderedDict()
self.interface = YList(self)
self._segment_path = lambda: "mpls-interface-attributes"
self._absolute_path = lambda: "openconfig-mpls:mpls/global/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Global.MplsInterfaceAttributes, [], name, value)
class Interface(Entity):
"""
List of TE interfaces
.. attribute:: name (key)
The interface name
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.Global.MplsInterfaceAttributes.Interface.Config>`
.. attribute:: config
Configuration parameters related to MPLS interfaces\:
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Global.MplsInterfaceAttributes.Interface.Config>`
.. attribute:: state
State parameters related to TE interfaces
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Global.MplsInterfaceAttributes.Interface.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Global.MplsInterfaceAttributes.Interface, self).__init__()
self.yang_name = "interface"
self.yang_parent_name = "mpls-interface-attributes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['name']
self._child_container_classes = OrderedDict([("config", ("config", Mpls.Global.MplsInterfaceAttributes.Interface.Config)), ("state", ("state", Mpls.Global.MplsInterfaceAttributes.Interface.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
])
self.name = None
self.config = Mpls.Global.MplsInterfaceAttributes.Interface.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.Global.MplsInterfaceAttributes.Interface.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "interface" + "[name='" + str(self.name) + "']"
self._absolute_path = lambda: "openconfig-mpls:mpls/global/mpls-interface-attributes/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Global.MplsInterfaceAttributes.Interface, ['name'], name, value)
class Config(Entity):
"""
Configuration parameters related to MPLS interfaces\:
.. attribute:: name
reference to interface name
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
.. attribute:: mpls_enabled
Enable MPLS forwarding on this interfacek
**type**\: bool
**default value**\: false
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Global.MplsInterfaceAttributes.Interface.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
('mpls_enabled', YLeaf(YType.boolean, 'mpls-enabled')),
])
self.name = None
self.mpls_enabled = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Global.MplsInterfaceAttributes.Interface.Config, ['name', 'mpls_enabled'], name, value)
class State(Entity):
"""
State parameters related to TE interfaces
.. attribute:: name
reference to interface name
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
.. attribute:: mpls_enabled
Enable MPLS forwarding on this interfacek
**type**\: bool
**default value**\: false
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Global.MplsInterfaceAttributes.Interface.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
('mpls_enabled', YLeaf(YType.boolean, 'mpls-enabled')),
])
self.name = None
self.mpls_enabled = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Global.MplsInterfaceAttributes.Interface.State, ['name', 'mpls_enabled'], name, value)
class TeGlobalAttributes(Entity):
"""
traffic\-engineering global attributes
.. attribute:: srlg
Shared risk link groups attributes
**type**\: :py:class:`Srlg <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg>`
.. attribute:: igp_flooding_bandwidth
Interface bandwidth change percentages that trigger update events into the IGP traffic engineering database (TED)
**type**\: :py:class:`IgpFloodingBandwidth <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.IgpFloodingBandwidth>`
.. attribute:: mpls_admin_groups
Top\-level container for admin\-groups configuration and state
**type**\: :py:class:`MplsAdminGroups <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups>`
.. attribute:: te_lsp_timers
Definition for delays associated with setup and cleanup of TE LSPs
**type**\: :py:class:`TeLspTimers <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.TeLspTimers>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes, self).__init__()
self.yang_name = "te-global-attributes"
self.yang_parent_name = "mpls"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("srlg", ("srlg", Mpls.TeGlobalAttributes.Srlg)), ("igp-flooding-bandwidth", ("igp_flooding_bandwidth", Mpls.TeGlobalAttributes.IgpFloodingBandwidth)), ("mpls-admin-groups", ("mpls_admin_groups", Mpls.TeGlobalAttributes.MplsAdminGroups)), ("te-lsp-timers", ("te_lsp_timers", Mpls.TeGlobalAttributes.TeLspTimers))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.srlg = Mpls.TeGlobalAttributes.Srlg()
self.srlg.parent = self
self._children_name_map["srlg"] = "srlg"
self._children_yang_names.add("srlg")
self.igp_flooding_bandwidth = Mpls.TeGlobalAttributes.IgpFloodingBandwidth()
self.igp_flooding_bandwidth.parent = self
self._children_name_map["igp_flooding_bandwidth"] = "igp-flooding-bandwidth"
self._children_yang_names.add("igp-flooding-bandwidth")
self.mpls_admin_groups = Mpls.TeGlobalAttributes.MplsAdminGroups()
self.mpls_admin_groups.parent = self
self._children_name_map["mpls_admin_groups"] = "mpls-admin-groups"
self._children_yang_names.add("mpls-admin-groups")
self.te_lsp_timers = Mpls.TeGlobalAttributes.TeLspTimers()
self.te_lsp_timers.parent = self
self._children_name_map["te_lsp_timers"] = "te-lsp-timers"
self._children_yang_names.add("te-lsp-timers")
self._segment_path = lambda: "te-global-attributes"
self._absolute_path = lambda: "openconfig-mpls:mpls/%s" % self._segment_path()
class Srlg(Entity):
"""
Shared risk link groups attributes
.. attribute:: srlg
List of shared risk link groups
**type**\: list of :py:class:`Srlg_ <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg_>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes.Srlg, self).__init__()
self.yang_name = "srlg"
self.yang_parent_name = "te-global-attributes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("srlg", ("srlg", Mpls.TeGlobalAttributes.Srlg.Srlg_))])
self._leafs = OrderedDict()
self.srlg = YList(self)
self._segment_path = lambda: "srlg"
self._absolute_path = lambda: "openconfig-mpls:mpls/te-global-attributes/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeGlobalAttributes.Srlg, [], name, value)
class Srlg_(Entity):
"""
List of shared risk link groups
.. attribute:: name (key)
The SRLG group identifier
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg_.Config>`
.. attribute:: config
Configuration parameters related to the SRLG
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg_.Config>`
.. attribute:: state
State parameters related to the SRLG
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg_.State>`
.. attribute:: static_srlg_members
SRLG members for static (not flooded) SRLGs
**type**\: :py:class:`StaticSrlgMembers <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes.Srlg.Srlg_, self).__init__()
self.yang_name = "srlg"
self.yang_parent_name = "srlg"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['name']
self._child_container_classes = OrderedDict([("config", ("config", Mpls.TeGlobalAttributes.Srlg.Srlg_.Config)), ("state", ("state", Mpls.TeGlobalAttributes.Srlg.Srlg_.State)), ("static-srlg-members", ("static_srlg_members", Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
])
self.name = None
self.config = Mpls.TeGlobalAttributes.Srlg.Srlg_.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.TeGlobalAttributes.Srlg.Srlg_.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self.static_srlg_members = Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers()
self.static_srlg_members.parent = self
self._children_name_map["static_srlg_members"] = "static-srlg-members"
self._children_yang_names.add("static-srlg-members")
self._segment_path = lambda: "srlg" + "[name='" + str(self.name) + "']"
self._absolute_path = lambda: "openconfig-mpls:mpls/te-global-attributes/srlg/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeGlobalAttributes.Srlg.Srlg_, ['name'], name, value)
class Config(Entity):
"""
Configuration parameters related to the SRLG
.. attribute:: name
SRLG group identifier
**type**\: str
.. attribute:: value
group ID for the SRLG
**type**\: int
**range:** 0..4294967295
.. attribute:: cost
The cost of the SRLG to the computation algorithm
**type**\: int
**range:** 0..4294967295
.. attribute:: flooding_type
The type of SRLG, either flooded in the IGP or statically configured
**type**\: :py:class:`MplsSrlgFloodingType <ydk.models.openconfig.openconfig_mpls.MplsSrlgFloodingType>`
**default value**\: FLOODED-SRLG
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes.Srlg.Srlg_.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "srlg"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
('value', YLeaf(YType.uint32, 'value')),
('cost', YLeaf(YType.uint32, 'cost')),
('flooding_type', YLeaf(YType.enumeration, 'flooding-type')),
])
self.name = None
self.value = None
self.cost = None
self.flooding_type = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeGlobalAttributes.Srlg.Srlg_.Config, ['name', 'value', 'cost', 'flooding_type'], name, value)
class State(Entity):
"""
State parameters related to the SRLG
.. attribute:: name
SRLG group identifier
**type**\: str
.. attribute:: value
group ID for the SRLG
**type**\: int
**range:** 0..4294967295
.. attribute:: cost
The cost of the SRLG to the computation algorithm
**type**\: int
**range:** 0..4294967295
.. attribute:: flooding_type
The type of SRLG, either flooded in the IGP or statically configured
**type**\: :py:class:`MplsSrlgFloodingType <ydk.models.openconfig.openconfig_mpls.MplsSrlgFloodingType>`
**default value**\: FLOODED-SRLG
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes.Srlg.Srlg_.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "srlg"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
('value', YLeaf(YType.uint32, 'value')),
('cost', YLeaf(YType.uint32, 'cost')),
('flooding_type', YLeaf(YType.enumeration, 'flooding-type')),
])
self.name = None
self.value = None
self.cost = None
self.flooding_type = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeGlobalAttributes.Srlg.Srlg_.State, ['name', 'value', 'cost', 'flooding_type'], name, value)
class StaticSrlgMembers(Entity):
"""
SRLG members for static (not flooded) SRLGs
.. attribute:: members_list
List of SRLG members, which are expressed as IP address endpoints of links contained in the SRLG
**type**\: list of :py:class:`MembersList <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers.MembersList>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers, self).__init__()
self.yang_name = "static-srlg-members"
self.yang_parent_name = "srlg"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("members-list", ("members_list", Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers.MembersList))])
self._leafs = OrderedDict()
self.members_list = YList(self)
self._segment_path = lambda: "static-srlg-members"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers, [], name, value)
class MembersList(Entity):
"""
List of SRLG members, which are expressed
as IP address endpoints of links contained in the
SRLG
.. attribute:: from_address (key)
The from address of the link in the SRLG
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**refers to**\: :py:class:`from_address <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers.MembersList.Config>`
.. attribute:: config
Configuration parameters relating to the SRLG members
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers.MembersList.Config>`
.. attribute:: state
State parameters relating to the SRLG members
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers.MembersList.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers.MembersList, self).__init__()
self.yang_name = "members-list"
self.yang_parent_name = "static-srlg-members"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['from_address']
self._child_container_classes = OrderedDict([("config", ("config", Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers.MembersList.Config)), ("state", ("state", Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers.MembersList.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('from_address', YLeaf(YType.str, 'from-address')),
])
self.from_address = None
self.config = Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers.MembersList.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers.MembersList.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "members-list" + "[from-address='" + str(self.from_address) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers.MembersList, ['from_address'], name, value)
class Config(Entity):
"""
Configuration parameters relating to the
SRLG members
.. attribute:: from_address
IP address of the a\-side of the SRLG link
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: to_address
IP address of the z\-side of the SRLG link
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers.MembersList.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "members-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('from_address', YLeaf(YType.str, 'from-address')),
('to_address', YLeaf(YType.str, 'to-address')),
])
self.from_address = None
self.to_address = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers.MembersList.Config, ['from_address', 'to_address'], name, value)
class State(Entity):
"""
State parameters relating to the SRLG
members
.. attribute:: from_address
IP address of the a\-side of the SRLG link
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: to_address
IP address of the z\-side of the SRLG link
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers.MembersList.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "members-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('from_address', YLeaf(YType.str, 'from-address')),
('to_address', YLeaf(YType.str, 'to-address')),
])
self.from_address = None
self.to_address = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeGlobalAttributes.Srlg.Srlg_.StaticSrlgMembers.MembersList.State, ['from_address', 'to_address'], name, value)
class IgpFloodingBandwidth(Entity):
"""
Interface bandwidth change percentages
that trigger update events into the IGP traffic
engineering database (TED)
.. attribute:: config
Configuration parameters for TED update threshold
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.IgpFloodingBandwidth.Config>`
.. attribute:: state
State parameters for TED update threshold
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.IgpFloodingBandwidth.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes.IgpFloodingBandwidth, self).__init__()
self.yang_name = "igp-flooding-bandwidth"
self.yang_parent_name = "te-global-attributes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.TeGlobalAttributes.IgpFloodingBandwidth.Config)), ("state", ("state", Mpls.TeGlobalAttributes.IgpFloodingBandwidth.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.TeGlobalAttributes.IgpFloodingBandwidth.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.TeGlobalAttributes.IgpFloodingBandwidth.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "igp-flooding-bandwidth"
self._absolute_path = lambda: "openconfig-mpls:mpls/te-global-attributes/%s" % self._segment_path()
class Config(Entity):
"""
Configuration parameters for TED
update threshold
.. attribute:: threshold_type
The type of threshold that should be used to specify the values at which bandwidth is flooded. DELTA indicates that the local system should flood IGP updates when a change in reserved bandwidth >= the specified delta occurs on the interface. Where THRESHOLD\-CROSSED is specified, the local system should trigger an update (and hence flood) the reserved bandwidth when the reserved bandwidth changes such that it crosses, or becomes equal to one of the threshold values
**type**\: :py:class:`ThresholdType <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.IgpFloodingBandwidth.Config.ThresholdType>`
.. attribute:: delta_percentage
The percentage of the maximum\-reservable\-bandwidth considered as the delta that results in an IGP update being flooded
**type**\: int
**range:** 0..100
.. attribute:: threshold_specification
This value specifies whether a single set of threshold values should be used for both increasing and decreasing bandwidth when determining whether to trigger updated bandwidth values to be flooded in the IGP TE extensions. MIRRORED\-UP\-DOWN indicates that a single value (or set of values) should be used for both increasing and decreasing values, where SEPARATE\-UP\-DOWN specifies that the increasing and decreasing values will be separately specified
**type**\: :py:class:`ThresholdSpecification <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.IgpFloodingBandwidth.Config.ThresholdSpecification>`
.. attribute:: up_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth) at which bandwidth updates are to be triggered when the bandwidth is increasing
**type**\: list of int
**range:** 0..100
.. attribute:: down_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth) at which bandwidth updates are to be triggered when the bandwidth is decreasing
**type**\: list of int
**range:** 0..100
.. attribute:: up_down_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth of the interface) at which bandwidth updates are flooded \- used both when the bandwidth is increasing and decreasing
**type**\: list of int
**range:** 0..100
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes.IgpFloodingBandwidth.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "igp-flooding-bandwidth"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('threshold_type', YLeaf(YType.enumeration, 'threshold-type')),
('delta_percentage', YLeaf(YType.uint8, 'delta-percentage')),
('threshold_specification', YLeaf(YType.enumeration, 'threshold-specification')),
('up_thresholds', YLeafList(YType.uint8, 'up-thresholds')),
('down_thresholds', YLeafList(YType.uint8, 'down-thresholds')),
('up_down_thresholds', YLeafList(YType.uint8, 'up-down-thresholds')),
])
self.threshold_type = None
self.delta_percentage = None
self.threshold_specification = None
self.up_thresholds = []
self.down_thresholds = []
self.up_down_thresholds = []
self._segment_path = lambda: "config"
self._absolute_path = lambda: "openconfig-mpls:mpls/te-global-attributes/igp-flooding-bandwidth/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeGlobalAttributes.IgpFloodingBandwidth.Config, ['threshold_type', 'delta_percentage', 'threshold_specification', 'up_thresholds', 'down_thresholds', 'up_down_thresholds'], name, value)
class ThresholdSpecification(Enum):
"""
ThresholdSpecification (Enum Class)
This value specifies whether a single set of threshold
values should be used for both increasing and decreasing
bandwidth when determining whether to trigger updated
bandwidth values to be flooded in the IGP TE extensions.
MIRRORED\-UP\-DOWN indicates that a single value (or set of
values) should be used for both increasing and decreasing
values, where SEPARATE\-UP\-DOWN specifies that the increasing
and decreasing values will be separately specified
.. data:: MIRRORED_UP_DOWN = 0
MIRRORED-UP-DOWN indicates that a single set of
threshold values should be used for both increasing
and decreasing bandwidth when determining whether
to trigger updated bandwidth values to be flooded
in the IGP TE extensions.
.. data:: SEPARATE_UP_DOWN = 1
SEPARATE-UP-DOWN indicates that a separate
threshold values should be used for the increasing
and decreasing bandwidth when determining whether
to trigger updated bandwidth values to be flooded
in the IGP TE extensions.
"""
MIRRORED_UP_DOWN = Enum.YLeaf(0, "MIRRORED-UP-DOWN")
SEPARATE_UP_DOWN = Enum.YLeaf(1, "SEPARATE-UP-DOWN")
class ThresholdType(Enum):
"""
ThresholdType (Enum Class)
The type of threshold that should be used to specify the
values at which bandwidth is flooded. DELTA indicates that
the local system should flood IGP updates when a change in
reserved bandwidth >= the specified delta occurs on the
interface. Where THRESHOLD\-CROSSED is specified, the local
system should trigger an update (and hence flood) the
reserved bandwidth when the reserved bandwidth changes such
that it crosses, or becomes equal to one of the threshold
values
.. data:: DELTA = 0
DELTA indicates that the local
system should flood IGP updates when a
change in reserved bandwidth >= the specified
delta occurs on the interface.
.. data:: THRESHOLD_CROSSED = 1
THRESHOLD-CROSSED indicates that
the local system should trigger an update (and
hence flood) the reserved bandwidth when the
reserved bandwidth changes such that it crosses,
or becomes equal to one of the threshold values.
"""
DELTA = Enum.YLeaf(0, "DELTA")
THRESHOLD_CROSSED = Enum.YLeaf(1, "THRESHOLD-CROSSED")
class State(Entity):
"""
State parameters for TED update threshold
.. attribute:: threshold_type
The type of threshold that should be used to specify the values at which bandwidth is flooded. DELTA indicates that the local system should flood IGP updates when a change in reserved bandwidth >= the specified delta occurs on the interface. Where THRESHOLD\-CROSSED is specified, the local system should trigger an update (and hence flood) the reserved bandwidth when the reserved bandwidth changes such that it crosses, or becomes equal to one of the threshold values
**type**\: :py:class:`ThresholdType <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.IgpFloodingBandwidth.State.ThresholdType>`
.. attribute:: delta_percentage
The percentage of the maximum\-reservable\-bandwidth considered as the delta that results in an IGP update being flooded
**type**\: int
**range:** 0..100
.. attribute:: threshold_specification
This value specifies whether a single set of threshold values should be used for both increasing and decreasing bandwidth when determining whether to trigger updated bandwidth values to be flooded in the IGP TE extensions. MIRRORED\-UP\-DOWN indicates that a single value (or set of values) should be used for both increasing and decreasing values, where SEPARATE\-UP\-DOWN specifies that the increasing and decreasing values will be separately specified
**type**\: :py:class:`ThresholdSpecification <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.IgpFloodingBandwidth.State.ThresholdSpecification>`
.. attribute:: up_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth) at which bandwidth updates are to be triggered when the bandwidth is increasing
**type**\: list of int
**range:** 0..100
.. attribute:: down_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth) at which bandwidth updates are to be triggered when the bandwidth is decreasing
**type**\: list of int
**range:** 0..100
.. attribute:: up_down_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth of the interface) at which bandwidth updates are flooded \- used both when the bandwidth is increasing and decreasing
**type**\: list of int
**range:** 0..100
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes.IgpFloodingBandwidth.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "igp-flooding-bandwidth"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('threshold_type', YLeaf(YType.enumeration, 'threshold-type')),
('delta_percentage', YLeaf(YType.uint8, 'delta-percentage')),
('threshold_specification', YLeaf(YType.enumeration, 'threshold-specification')),
('up_thresholds', YLeafList(YType.uint8, 'up-thresholds')),
('down_thresholds', YLeafList(YType.uint8, 'down-thresholds')),
('up_down_thresholds', YLeafList(YType.uint8, 'up-down-thresholds')),
])
self.threshold_type = None
self.delta_percentage = None
self.threshold_specification = None
self.up_thresholds = []
self.down_thresholds = []
self.up_down_thresholds = []
self._segment_path = lambda: "state"
self._absolute_path = lambda: "openconfig-mpls:mpls/te-global-attributes/igp-flooding-bandwidth/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeGlobalAttributes.IgpFloodingBandwidth.State, ['threshold_type', 'delta_percentage', 'threshold_specification', 'up_thresholds', 'down_thresholds', 'up_down_thresholds'], name, value)
class ThresholdSpecification(Enum):
"""
ThresholdSpecification (Enum Class)
This value specifies whether a single set of threshold
values should be used for both increasing and decreasing
bandwidth when determining whether to trigger updated
bandwidth values to be flooded in the IGP TE extensions.
MIRRORED\-UP\-DOWN indicates that a single value (or set of
values) should be used for both increasing and decreasing
values, where SEPARATE\-UP\-DOWN specifies that the increasing
and decreasing values will be separately specified
.. data:: MIRRORED_UP_DOWN = 0
MIRRORED-UP-DOWN indicates that a single set of
threshold values should be used for both increasing
and decreasing bandwidth when determining whether
to trigger updated bandwidth values to be flooded
in the IGP TE extensions.
.. data:: SEPARATE_UP_DOWN = 1
SEPARATE-UP-DOWN indicates that a separate
threshold values should be used for the increasing
and decreasing bandwidth when determining whether
to trigger updated bandwidth values to be flooded
in the IGP TE extensions.
"""
MIRRORED_UP_DOWN = Enum.YLeaf(0, "MIRRORED-UP-DOWN")
SEPARATE_UP_DOWN = Enum.YLeaf(1, "SEPARATE-UP-DOWN")
class ThresholdType(Enum):
"""
ThresholdType (Enum Class)
The type of threshold that should be used to specify the
values at which bandwidth is flooded. DELTA indicates that
the local system should flood IGP updates when a change in
reserved bandwidth >= the specified delta occurs on the
interface. Where THRESHOLD\-CROSSED is specified, the local
system should trigger an update (and hence flood) the
reserved bandwidth when the reserved bandwidth changes such
that it crosses, or becomes equal to one of the threshold
values
.. data:: DELTA = 0
DELTA indicates that the local
system should flood IGP updates when a
change in reserved bandwidth >= the specified
delta occurs on the interface.
.. data:: THRESHOLD_CROSSED = 1
THRESHOLD-CROSSED indicates that
the local system should trigger an update (and
hence flood) the reserved bandwidth when the
reserved bandwidth changes such that it crosses,
or becomes equal to one of the threshold values.
"""
DELTA = Enum.YLeaf(0, "DELTA")
THRESHOLD_CROSSED = Enum.YLeaf(1, "THRESHOLD-CROSSED")
class MplsAdminGroups(Entity):
"""
Top\-level container for admin\-groups configuration
and state
.. attribute:: admin_group
configuration of value to name mapping for mpls affinities/admin\-groups
**type**\: list of :py:class:`AdminGroup <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes.MplsAdminGroups, self).__init__()
self.yang_name = "mpls-admin-groups"
self.yang_parent_name = "te-global-attributes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("admin-group", ("admin_group", Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup))])
self._leafs = OrderedDict()
self.admin_group = YList(self)
self._segment_path = lambda: "mpls-admin-groups"
self._absolute_path = lambda: "openconfig-mpls:mpls/te-global-attributes/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeGlobalAttributes.MplsAdminGroups, [], name, value)
class AdminGroup(Entity):
"""
configuration of value to name mapping
for mpls affinities/admin\-groups
.. attribute:: admin_group_name (key)
name for mpls admin\-group
**type**\: str
**refers to**\: :py:class:`admin_group_name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup.Config>`
.. attribute:: config
Configurable items for admin\-groups
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup.Config>`
.. attribute:: state
Operational state for admin\-groups
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup, self).__init__()
self.yang_name = "admin-group"
self.yang_parent_name = "mpls-admin-groups"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['admin_group_name']
self._child_container_classes = OrderedDict([("config", ("config", Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup.Config)), ("state", ("state", Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('admin_group_name', YLeaf(YType.str, 'admin-group-name')),
])
self.admin_group_name = None
self.config = Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "admin-group" + "[admin-group-name='" + str(self.admin_group_name) + "']"
self._absolute_path = lambda: "openconfig-mpls:mpls/te-global-attributes/mpls-admin-groups/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup, ['admin_group_name'], name, value)
class Config(Entity):
"""
Configurable items for admin\-groups
.. attribute:: admin_group_name
name for mpls admin\-group
**type**\: str
.. attribute:: bit_position
bit\-position value for mpls admin\-group. The value for the admin group is an integer that represents one of the bit positions in the admin\-group bitmask. Values between 0 and 31 are interpreted as the original limit of 32 admin groups. Values >=32 are interpreted as extended admin group values as per RFC7308
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "admin-group"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('admin_group_name', YLeaf(YType.str, 'admin-group-name')),
('bit_position', YLeaf(YType.uint32, 'bit-position')),
])
self.admin_group_name = None
self.bit_position = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup.Config, ['admin_group_name', 'bit_position'], name, value)
class State(Entity):
"""
Operational state for admin\-groups
.. attribute:: admin_group_name
name for mpls admin\-group
**type**\: str
.. attribute:: bit_position
bit\-position value for mpls admin\-group. The value for the admin group is an integer that represents one of the bit positions in the admin\-group bitmask. Values between 0 and 31 are interpreted as the original limit of 32 admin groups. Values >=32 are interpreted as extended admin group values as per RFC7308
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "admin-group"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('admin_group_name', YLeaf(YType.str, 'admin-group-name')),
('bit_position', YLeaf(YType.uint32, 'bit-position')),
])
self.admin_group_name = None
self.bit_position = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup.State, ['admin_group_name', 'bit_position'], name, value)
class TeLspTimers(Entity):
"""
Definition for delays associated with setup
and cleanup of TE LSPs
.. attribute:: config
Configuration parameters related to timers for TE LSPs
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.TeLspTimers.Config>`
.. attribute:: state
State related to timers for TE LSPs
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.TeLspTimers.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes.TeLspTimers, self).__init__()
self.yang_name = "te-lsp-timers"
self.yang_parent_name = "te-global-attributes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.TeGlobalAttributes.TeLspTimers.Config)), ("state", ("state", Mpls.TeGlobalAttributes.TeLspTimers.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.TeGlobalAttributes.TeLspTimers.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.TeGlobalAttributes.TeLspTimers.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "te-lsp-timers"
self._absolute_path = lambda: "openconfig-mpls:mpls/te-global-attributes/%s" % self._segment_path()
class Config(Entity):
"""
Configuration parameters related
to timers for TE LSPs
.. attribute:: install_delay
delay the use of newly installed te lsp for a specified amount of time
**type**\: int
**range:** 0..3600
**units**\: seconds
.. attribute:: cleanup_delay
delay the removal of old te lsp for a specified amount of time
**type**\: int
**range:** 0..65535
**units**\: seconds
.. attribute:: reoptimize_timer
frequency of reoptimization of a traffic engineered LSP
**type**\: int
**range:** 0..65535
**units**\: seconds
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes.TeLspTimers.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "te-lsp-timers"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('install_delay', YLeaf(YType.uint16, 'install-delay')),
('cleanup_delay', YLeaf(YType.uint16, 'cleanup-delay')),
('reoptimize_timer', YLeaf(YType.uint16, 'reoptimize-timer')),
])
self.install_delay = None
self.cleanup_delay = None
self.reoptimize_timer = None
self._segment_path = lambda: "config"
self._absolute_path = lambda: "openconfig-mpls:mpls/te-global-attributes/te-lsp-timers/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeGlobalAttributes.TeLspTimers.Config, ['install_delay', 'cleanup_delay', 'reoptimize_timer'], name, value)
class State(Entity):
"""
State related to timers for TE LSPs
.. attribute:: install_delay
delay the use of newly installed te lsp for a specified amount of time
**type**\: int
**range:** 0..3600
**units**\: seconds
.. attribute:: cleanup_delay
delay the removal of old te lsp for a specified amount of time
**type**\: int
**range:** 0..65535
**units**\: seconds
.. attribute:: reoptimize_timer
frequency of reoptimization of a traffic engineered LSP
**type**\: int
**range:** 0..65535
**units**\: seconds
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeGlobalAttributes.TeLspTimers.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "te-lsp-timers"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('install_delay', YLeaf(YType.uint16, 'install-delay')),
('cleanup_delay', YLeaf(YType.uint16, 'cleanup-delay')),
('reoptimize_timer', YLeaf(YType.uint16, 'reoptimize-timer')),
])
self.install_delay = None
self.cleanup_delay = None
self.reoptimize_timer = None
self._segment_path = lambda: "state"
self._absolute_path = lambda: "openconfig-mpls:mpls/te-global-attributes/te-lsp-timers/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeGlobalAttributes.TeLspTimers.State, ['install_delay', 'cleanup_delay', 'reoptimize_timer'], name, value)
class TeInterfaceAttributes(Entity):
"""
traffic engineering attributes specific
for interfaces
.. attribute:: interface
List of TE interfaces
**type**\: list of :py:class:`Interface <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeInterfaceAttributes, self).__init__()
self.yang_name = "te-interface-attributes"
self.yang_parent_name = "mpls"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("interface", ("interface", Mpls.TeInterfaceAttributes.Interface))])
self._leafs = OrderedDict()
self.interface = YList(self)
self._segment_path = lambda: "te-interface-attributes"
self._absolute_path = lambda: "openconfig-mpls:mpls/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeInterfaceAttributes, [], name, value)
class Interface(Entity):
"""
List of TE interfaces
.. attribute:: name (key)
The interface name
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.Config>`
.. attribute:: config
Configuration parameters related to TE interfaces\:
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.Config>`
.. attribute:: state
State parameters related to TE interfaces
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.State>`
.. attribute:: igp_flooding_bandwidth
Interface bandwidth change percentages that trigger update events into the IGP traffic engineering database (TED)
**type**\: :py:class:`IgpFloodingBandwidth <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeInterfaceAttributes.Interface, self).__init__()
self.yang_name = "interface"
self.yang_parent_name = "te-interface-attributes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['name']
self._child_container_classes = OrderedDict([("config", ("config", Mpls.TeInterfaceAttributes.Interface.Config)), ("state", ("state", Mpls.TeInterfaceAttributes.Interface.State)), ("igp-flooding-bandwidth", ("igp_flooding_bandwidth", Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
])
self.name = None
self.config = Mpls.TeInterfaceAttributes.Interface.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.TeInterfaceAttributes.Interface.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self.igp_flooding_bandwidth = Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth()
self.igp_flooding_bandwidth.parent = self
self._children_name_map["igp_flooding_bandwidth"] = "igp-flooding-bandwidth"
self._children_yang_names.add("igp-flooding-bandwidth")
self._segment_path = lambda: "interface" + "[name='" + str(self.name) + "']"
self._absolute_path = lambda: "openconfig-mpls:mpls/te-interface-attributes/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeInterfaceAttributes.Interface, ['name'], name, value)
class Config(Entity):
"""
Configuration parameters related to TE interfaces\:
.. attribute:: name
reference to interface name
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
.. attribute:: te_metric
TE specific metric for the link
**type**\: int
**range:** 0..4294967295
.. attribute:: srlg_membership
list of references to named shared risk link groups that the interface belongs to
**type**\: list of str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg_>`
.. attribute:: admin_group
list of admin groups (by name) on the interface
**type**\: list of str
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeInterfaceAttributes.Interface.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
('te_metric', YLeaf(YType.uint32, 'te-metric')),
('srlg_membership', YLeafList(YType.str, 'srlg-membership')),
('admin_group', YLeafList(YType.str, 'admin-group')),
])
self.name = None
self.te_metric = None
self.srlg_membership = []
self.admin_group = []
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeInterfaceAttributes.Interface.Config, ['name', 'te_metric', 'srlg_membership', 'admin_group'], name, value)
class State(Entity):
"""
State parameters related to TE interfaces
.. attribute:: name
reference to interface name
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
.. attribute:: te_metric
TE specific metric for the link
**type**\: int
**range:** 0..4294967295
.. attribute:: srlg_membership
list of references to named shared risk link groups that the interface belongs to
**type**\: list of str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg_>`
.. attribute:: admin_group
list of admin groups (by name) on the interface
**type**\: list of str
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeInterfaceAttributes.Interface.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
('te_metric', YLeaf(YType.uint32, 'te-metric')),
('srlg_membership', YLeafList(YType.str, 'srlg-membership')),
('admin_group', YLeafList(YType.str, 'admin-group')),
])
self.name = None
self.te_metric = None
self.srlg_membership = []
self.admin_group = []
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeInterfaceAttributes.Interface.State, ['name', 'te_metric', 'srlg_membership', 'admin_group'], name, value)
class IgpFloodingBandwidth(Entity):
"""
Interface bandwidth change percentages
that trigger update events into the IGP traffic
engineering database (TED)
.. attribute:: config
Configuration parameters for TED update threshold
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.Config>`
.. attribute:: state
State parameters for TED update threshold
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth, self).__init__()
self.yang_name = "igp-flooding-bandwidth"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.Config)), ("state", ("state", Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "igp-flooding-bandwidth"
class Config(Entity):
"""
Configuration parameters for TED
update threshold
.. attribute:: threshold_type
The type of threshold that should be used to specify the values at which bandwidth is flooded. DELTA indicates that the local system should flood IGP updates when a change in reserved bandwidth >= the specified delta occurs on the interface. Where THRESHOLD\-CROSSED is specified, the local system should trigger an update (and hence flood) the reserved bandwidth when the reserved bandwidth changes such that it crosses, or becomes equal to one of the threshold values
**type**\: :py:class:`ThresholdType <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.Config.ThresholdType>`
.. attribute:: delta_percentage
The percentage of the maximum\-reservable\-bandwidth considered as the delta that results in an IGP update being flooded
**type**\: int
**range:** 0..100
.. attribute:: threshold_specification
This value specifies whether a single set of threshold values should be used for both increasing and decreasing bandwidth when determining whether to trigger updated bandwidth values to be flooded in the IGP TE extensions. MIRRORED\-UP\-DOWN indicates that a single value (or set of values) should be used for both increasing and decreasing values, where SEPARATE\-UP\-DOWN specifies that the increasing and decreasing values will be separately specified
**type**\: :py:class:`ThresholdSpecification <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.Config.ThresholdSpecification>`
.. attribute:: up_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth) at which bandwidth updates are to be triggered when the bandwidth is increasing
**type**\: list of int
**range:** 0..100
.. attribute:: down_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth) at which bandwidth updates are to be triggered when the bandwidth is decreasing
**type**\: list of int
**range:** 0..100
.. attribute:: up_down_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth of the interface) at which bandwidth updates are flooded \- used both when the bandwidth is increasing and decreasing
**type**\: list of int
**range:** 0..100
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "igp-flooding-bandwidth"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('threshold_type', YLeaf(YType.enumeration, 'threshold-type')),
('delta_percentage', YLeaf(YType.uint8, 'delta-percentage')),
('threshold_specification', YLeaf(YType.enumeration, 'threshold-specification')),
('up_thresholds', YLeafList(YType.uint8, 'up-thresholds')),
('down_thresholds', YLeafList(YType.uint8, 'down-thresholds')),
('up_down_thresholds', YLeafList(YType.uint8, 'up-down-thresholds')),
])
self.threshold_type = None
self.delta_percentage = None
self.threshold_specification = None
self.up_thresholds = []
self.down_thresholds = []
self.up_down_thresholds = []
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.Config, ['threshold_type', 'delta_percentage', 'threshold_specification', 'up_thresholds', 'down_thresholds', 'up_down_thresholds'], name, value)
class ThresholdSpecification(Enum):
"""
ThresholdSpecification (Enum Class)
This value specifies whether a single set of threshold
values should be used for both increasing and decreasing
bandwidth when determining whether to trigger updated
bandwidth values to be flooded in the IGP TE extensions.
MIRRORED\-UP\-DOWN indicates that a single value (or set of
values) should be used for both increasing and decreasing
values, where SEPARATE\-UP\-DOWN specifies that the increasing
and decreasing values will be separately specified
.. data:: MIRRORED_UP_DOWN = 0
MIRRORED-UP-DOWN indicates that a single set of
threshold values should be used for both increasing
and decreasing bandwidth when determining whether
to trigger updated bandwidth values to be flooded
in the IGP TE extensions.
.. data:: SEPARATE_UP_DOWN = 1
SEPARATE-UP-DOWN indicates that a separate
threshold values should be used for the increasing
and decreasing bandwidth when determining whether
to trigger updated bandwidth values to be flooded
in the IGP TE extensions.
"""
MIRRORED_UP_DOWN = Enum.YLeaf(0, "MIRRORED-UP-DOWN")
SEPARATE_UP_DOWN = Enum.YLeaf(1, "SEPARATE-UP-DOWN")
class ThresholdType(Enum):
"""
ThresholdType (Enum Class)
The type of threshold that should be used to specify the
values at which bandwidth is flooded. DELTA indicates that
the local system should flood IGP updates when a change in
reserved bandwidth >= the specified delta occurs on the
interface. Where THRESHOLD\-CROSSED is specified, the local
system should trigger an update (and hence flood) the
reserved bandwidth when the reserved bandwidth changes such
that it crosses, or becomes equal to one of the threshold
values
.. data:: DELTA = 0
DELTA indicates that the local
system should flood IGP updates when a
change in reserved bandwidth >= the specified
delta occurs on the interface.
.. data:: THRESHOLD_CROSSED = 1
THRESHOLD-CROSSED indicates that
the local system should trigger an update (and
hence flood) the reserved bandwidth when the
reserved bandwidth changes such that it crosses,
or becomes equal to one of the threshold values.
"""
DELTA = Enum.YLeaf(0, "DELTA")
THRESHOLD_CROSSED = Enum.YLeaf(1, "THRESHOLD-CROSSED")
class State(Entity):
"""
State parameters for TED update threshold
.. attribute:: threshold_type
The type of threshold that should be used to specify the values at which bandwidth is flooded. DELTA indicates that the local system should flood IGP updates when a change in reserved bandwidth >= the specified delta occurs on the interface. Where THRESHOLD\-CROSSED is specified, the local system should trigger an update (and hence flood) the reserved bandwidth when the reserved bandwidth changes such that it crosses, or becomes equal to one of the threshold values
**type**\: :py:class:`ThresholdType <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.State.ThresholdType>`
.. attribute:: delta_percentage
The percentage of the maximum\-reservable\-bandwidth considered as the delta that results in an IGP update being flooded
**type**\: int
**range:** 0..100
.. attribute:: threshold_specification
This value specifies whether a single set of threshold values should be used for both increasing and decreasing bandwidth when determining whether to trigger updated bandwidth values to be flooded in the IGP TE extensions. MIRRORED\-UP\-DOWN indicates that a single value (or set of values) should be used for both increasing and decreasing values, where SEPARATE\-UP\-DOWN specifies that the increasing and decreasing values will be separately specified
**type**\: :py:class:`ThresholdSpecification <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.State.ThresholdSpecification>`
.. attribute:: up_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth) at which bandwidth updates are to be triggered when the bandwidth is increasing
**type**\: list of int
**range:** 0..100
.. attribute:: down_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth) at which bandwidth updates are to be triggered when the bandwidth is decreasing
**type**\: list of int
**range:** 0..100
.. attribute:: up_down_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth of the interface) at which bandwidth updates are flooded \- used both when the bandwidth is increasing and decreasing
**type**\: list of int
**range:** 0..100
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "igp-flooding-bandwidth"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('threshold_type', YLeaf(YType.enumeration, 'threshold-type')),
('delta_percentage', YLeaf(YType.uint8, 'delta-percentage')),
('threshold_specification', YLeaf(YType.enumeration, 'threshold-specification')),
('up_thresholds', YLeafList(YType.uint8, 'up-thresholds')),
('down_thresholds', YLeafList(YType.uint8, 'down-thresholds')),
('up_down_thresholds', YLeafList(YType.uint8, 'up-down-thresholds')),
])
self.threshold_type = None
self.delta_percentage = None
self.threshold_specification = None
self.up_thresholds = []
self.down_thresholds = []
self.up_down_thresholds = []
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.State, ['threshold_type', 'delta_percentage', 'threshold_specification', 'up_thresholds', 'down_thresholds', 'up_down_thresholds'], name, value)
class ThresholdSpecification(Enum):
"""
ThresholdSpecification (Enum Class)
This value specifies whether a single set of threshold
values should be used for both increasing and decreasing
bandwidth when determining whether to trigger updated
bandwidth values to be flooded in the IGP TE extensions.
MIRRORED\-UP\-DOWN indicates that a single value (or set of
values) should be used for both increasing and decreasing
values, where SEPARATE\-UP\-DOWN specifies that the increasing
and decreasing values will be separately specified
.. data:: MIRRORED_UP_DOWN = 0
MIRRORED-UP-DOWN indicates that a single set of
threshold values should be used for both increasing
and decreasing bandwidth when determining whether
to trigger updated bandwidth values to be flooded
in the IGP TE extensions.
.. data:: SEPARATE_UP_DOWN = 1
SEPARATE-UP-DOWN indicates that a separate
threshold values should be used for the increasing
and decreasing bandwidth when determining whether
to trigger updated bandwidth values to be flooded
in the IGP TE extensions.
"""
MIRRORED_UP_DOWN = Enum.YLeaf(0, "MIRRORED-UP-DOWN")
SEPARATE_UP_DOWN = Enum.YLeaf(1, "SEPARATE-UP-DOWN")
class ThresholdType(Enum):
"""
ThresholdType (Enum Class)
The type of threshold that should be used to specify the
values at which bandwidth is flooded. DELTA indicates that
the local system should flood IGP updates when a change in
reserved bandwidth >= the specified delta occurs on the
interface. Where THRESHOLD\-CROSSED is specified, the local
system should trigger an update (and hence flood) the
reserved bandwidth when the reserved bandwidth changes such
that it crosses, or becomes equal to one of the threshold
values
.. data:: DELTA = 0
DELTA indicates that the local
system should flood IGP updates when a
change in reserved bandwidth >= the specified
delta occurs on the interface.
.. data:: THRESHOLD_CROSSED = 1
THRESHOLD-CROSSED indicates that
the local system should trigger an update (and
hence flood) the reserved bandwidth when the
reserved bandwidth changes such that it crosses,
or becomes equal to one of the threshold values.
"""
DELTA = Enum.YLeaf(0, "DELTA")
THRESHOLD_CROSSED = Enum.YLeaf(1, "THRESHOLD-CROSSED")
class SignalingProtocols(Entity):
"""
top\-level signaling protocol configuration
.. attribute:: rsvp_te
RSVP\-TE global signaling protocol configuration
**type**\: :py:class:`RsvpTe <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe>`
.. attribute:: segment_routing
SR global signaling config
**type**\: :py:class:`SegmentRouting <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.SegmentRouting>`
.. attribute:: ldp
LDP global signaling configuration
**type**\: :py:class:`Ldp <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.Ldp>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols, self).__init__()
self.yang_name = "signaling-protocols"
self.yang_parent_name = "mpls"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("rsvp-te", ("rsvp_te", Mpls.SignalingProtocols.RsvpTe)), ("segment-routing", ("segment_routing", Mpls.SignalingProtocols.SegmentRouting)), ("ldp", ("ldp", Mpls.SignalingProtocols.Ldp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.rsvp_te = Mpls.SignalingProtocols.RsvpTe()
self.rsvp_te.parent = self
self._children_name_map["rsvp_te"] = "rsvp-te"
self._children_yang_names.add("rsvp-te")
self.segment_routing = Mpls.SignalingProtocols.SegmentRouting()
self.segment_routing.parent = self
self._children_name_map["segment_routing"] = "segment-routing"
self._children_yang_names.add("segment-routing")
self.ldp = Mpls.SignalingProtocols.Ldp()
self.ldp.parent = self
self._children_name_map["ldp"] = "ldp"
self._children_yang_names.add("ldp")
self._segment_path = lambda: "signaling-protocols"
self._absolute_path = lambda: "openconfig-mpls:mpls/%s" % self._segment_path()
class RsvpTe(Entity):
"""
RSVP\-TE global signaling protocol configuration
.. attribute:: sessions
Configuration and state of RSVP sessions
**type**\: :py:class:`Sessions <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Sessions>`
.. attribute:: neighbors
Configuration and state for RSVP neighbors connecting to the device
**type**\: :py:class:`Neighbors <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Neighbors>`
.. attribute:: global_
Platform wide RSVP configuration and state
**type**\: :py:class:`Global <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global>`
.. attribute:: interface_attributes
Attributes relating to RSVP\-TE enabled interfaces
**type**\: :py:class:`InterfaceAttributes <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe, self).__init__()
self.yang_name = "rsvp-te"
self.yang_parent_name = "signaling-protocols"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("sessions", ("sessions", Mpls.SignalingProtocols.RsvpTe.Sessions)), ("neighbors", ("neighbors", Mpls.SignalingProtocols.RsvpTe.Neighbors)), ("global", ("global_", Mpls.SignalingProtocols.RsvpTe.Global)), ("interface-attributes", ("interface_attributes", Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.sessions = Mpls.SignalingProtocols.RsvpTe.Sessions()
self.sessions.parent = self
self._children_name_map["sessions"] = "sessions"
self._children_yang_names.add("sessions")
self.neighbors = Mpls.SignalingProtocols.RsvpTe.Neighbors()
self.neighbors.parent = self
self._children_name_map["neighbors"] = "neighbors"
self._children_yang_names.add("neighbors")
self.global_ = Mpls.SignalingProtocols.RsvpTe.Global()
self.global_.parent = self
self._children_name_map["global_"] = "global"
self._children_yang_names.add("global")
self.interface_attributes = Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes()
self.interface_attributes.parent = self
self._children_name_map["interface_attributes"] = "interface-attributes"
self._children_yang_names.add("interface-attributes")
self._segment_path = lambda: "rsvp-te"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/%s" % self._segment_path()
class Sessions(Entity):
"""
Configuration and state of RSVP sessions
.. attribute:: config
Configuration of RSVP sessions on the device
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Sessions.Config>`
.. attribute:: state
State information relating to RSVP sessions on the device
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Sessions.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Sessions, self).__init__()
self.yang_name = "sessions"
self.yang_parent_name = "rsvp-te"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.SignalingProtocols.RsvpTe.Sessions.Config)), ("state", ("state", Mpls.SignalingProtocols.RsvpTe.Sessions.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.SignalingProtocols.RsvpTe.Sessions.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.SignalingProtocols.RsvpTe.Sessions.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "sessions"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/%s" % self._segment_path()
class Config(Entity):
"""
Configuration of RSVP sessions on the device
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Sessions.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "sessions"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self._segment_path = lambda: "config"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/sessions/%s" % self._segment_path()
class State(Entity):
"""
State information relating to RSVP sessions
on the device
.. attribute:: session
List of RSVP sessions
**type**\: list of :py:class:`Session <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Sessions.State.Session>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Sessions.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "sessions"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("session", ("session", Mpls.SignalingProtocols.RsvpTe.Sessions.State.Session))])
self._leafs = OrderedDict()
self.session = YList(self)
self._segment_path = lambda: "state"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/sessions/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.Sessions.State, [], name, value)
class Session(Entity):
"""
List of RSVP sessions
.. attribute:: source_port (key)
RSVP source port
**type**\: int
**range:** 0..65535
.. attribute:: destination_port (key)
RSVP source port
**type**\: int
**range:** 0..65535
.. attribute:: source_address (key)
Origin address of RSVP session
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: destination_address (key)
Destination address of RSVP session
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: status
Enumeration of RSVP session states
**type**\: :py:class:`Status <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Sessions.State.Session.Status>`
.. attribute:: type
Enumeration of possible RSVP session types
**type**\: :py:class:`Type <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Sessions.State.Session.Type>`
.. attribute:: tunnel_id
Unique identifier of RSVP session
**type**\: int
**range:** 0..65535
.. attribute:: label_in
Incoming MPLS label associated with this RSVP session
**type**\: union of the below types:
**type**\: int
**range:** 16..1048575
**type**\: :py:class:`MplsLabel <ydk.models.openconfig.openconfig_segment_routing.MplsLabel>`
.. attribute:: label_out
Outgoing MPLS label associated with this RSVP session
**type**\: union of the below types:
**type**\: int
**range:** 16..1048575
**type**\: :py:class:`MplsLabel <ydk.models.openconfig.openconfig_segment_routing.MplsLabel>`
.. attribute:: associated_lsps
List of label switched paths associated with this RSVP session
**type**\: list of str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.Config>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Sessions.State.Session, self).__init__()
self.yang_name = "session"
self.yang_parent_name = "state"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['source_port','destination_port','source_address','destination_address']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('source_port', YLeaf(YType.uint16, 'source-port')),
('destination_port', YLeaf(YType.uint16, 'destination-port')),
('source_address', YLeaf(YType.str, 'source-address')),
('destination_address', YLeaf(YType.str, 'destination-address')),
('status', YLeaf(YType.enumeration, 'status')),
('type', YLeaf(YType.enumeration, 'type')),
('tunnel_id', YLeaf(YType.uint16, 'tunnel-id')),
('label_in', YLeaf(YType.str, 'label-in')),
('label_out', YLeaf(YType.str, 'label-out')),
('associated_lsps', YLeafList(YType.str, 'associated-lsps')),
])
self.source_port = None
self.destination_port = None
self.source_address = None
self.destination_address = None
self.status = None
self.type = None
self.tunnel_id = None
self.label_in = None
self.label_out = None
self.associated_lsps = []
self._segment_path = lambda: "session" + "[source-port='" + str(self.source_port) + "']" + "[destination-port='" + str(self.destination_port) + "']" + "[source-address='" + str(self.source_address) + "']" + "[destination-address='" + str(self.destination_address) + "']"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/sessions/state/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.Sessions.State.Session, ['source_port', 'destination_port', 'source_address', 'destination_address', 'status', 'type', 'tunnel_id', 'label_in', 'label_out', 'associated_lsps'], name, value)
class Status(Enum):
"""
Status (Enum Class)
Enumeration of RSVP session states
.. data:: UP = 0
RSVP session is up
.. data:: DOWN = 1
RSVP session is down
"""
UP = Enum.YLeaf(0, "UP")
DOWN = Enum.YLeaf(1, "DOWN")
class Type(Enum):
"""
Type (Enum Class)
Enumeration of possible RSVP session types
.. data:: SOURCE = 0
RSVP session originates on this device
.. data:: TRANSIT = 1
RSVP session transits this device only
.. data:: DESTINATION = 2
RSVP session terminates on this device
"""
SOURCE = Enum.YLeaf(0, "SOURCE")
TRANSIT = Enum.YLeaf(1, "TRANSIT")
DESTINATION = Enum.YLeaf(2, "DESTINATION")
class Neighbors(Entity):
"""
Configuration and state for RSVP neighbors connecting
to the device
.. attribute:: config
Configuration of RSVP neighbor information
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Neighbors.Config>`
.. attribute:: state
State information relating to RSVP neighbors
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Neighbors.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Neighbors, self).__init__()
self.yang_name = "neighbors"
self.yang_parent_name = "rsvp-te"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.SignalingProtocols.RsvpTe.Neighbors.Config)), ("state", ("state", Mpls.SignalingProtocols.RsvpTe.Neighbors.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.SignalingProtocols.RsvpTe.Neighbors.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.SignalingProtocols.RsvpTe.Neighbors.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "neighbors"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/%s" % self._segment_path()
class Config(Entity):
"""
Configuration of RSVP neighbor information
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Neighbors.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "neighbors"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self._segment_path = lambda: "config"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/neighbors/%s" % self._segment_path()
class State(Entity):
"""
State information relating to RSVP neighbors
.. attribute:: neighbor
List of RSVP neighbors connecting to the device, keyed by neighbor address
**type**\: list of :py:class:`Neighbor <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Neighbors.State.Neighbor>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Neighbors.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "neighbors"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("neighbor", ("neighbor", Mpls.SignalingProtocols.RsvpTe.Neighbors.State.Neighbor))])
self._leafs = OrderedDict()
self.neighbor = YList(self)
self._segment_path = lambda: "state"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/neighbors/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.Neighbors.State, [], name, value)
class Neighbor(Entity):
"""
List of RSVP neighbors connecting to the device,
keyed by neighbor address
.. attribute:: address (key)
Address of RSVP neighbor
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: detected_interface
Interface where RSVP neighbor was detected
**type**\: str
.. attribute:: neighbor_status
Enumuration of possible RSVP neighbor states
**type**\: :py:class:`NeighborStatus <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Neighbors.State.Neighbor.NeighborStatus>`
.. attribute:: refresh_reduction
Suppport of neighbor for RSVP refresh reduction
**type**\: bool
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Neighbors.State.Neighbor, self).__init__()
self.yang_name = "neighbor"
self.yang_parent_name = "state"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['address']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', YLeaf(YType.str, 'address')),
('detected_interface', YLeaf(YType.str, 'detected-interface')),
('neighbor_status', YLeaf(YType.enumeration, 'neighbor-status')),
('refresh_reduction', YLeaf(YType.boolean, 'refresh-reduction')),
])
self.address = None
self.detected_interface = None
self.neighbor_status = None
self.refresh_reduction = None
self._segment_path = lambda: "neighbor" + "[address='" + str(self.address) + "']"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/neighbors/state/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.Neighbors.State.Neighbor, ['address', 'detected_interface', 'neighbor_status', 'refresh_reduction'], name, value)
class NeighborStatus(Enum):
"""
NeighborStatus (Enum Class)
Enumuration of possible RSVP neighbor states
.. data:: UP = 0
RSVP hello messages are detected from the neighbor
.. data:: DOWN = 1
RSVP neighbor not detected as up, due to a
communication failure or IGP notification
the neighbor is unavailable
"""
UP = Enum.YLeaf(0, "UP")
DOWN = Enum.YLeaf(1, "DOWN")
class Global(Entity):
"""
Platform wide RSVP configuration and state
.. attribute:: graceful_restart
Operational state and configuration parameters relating to graceful\-restart for RSVP
**type**\: :py:class:`GracefulRestart <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart>`
.. attribute:: soft_preemption
Protocol options relating to RSVP soft preemption
**type**\: :py:class:`SoftPreemption <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption>`
.. attribute:: hellos
Top level container for RSVP hello parameters
**type**\: :py:class:`Hellos <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.Hellos>`
.. attribute:: state
Platform wide RSVP state, including counters
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Global, self).__init__()
self.yang_name = "global"
self.yang_parent_name = "rsvp-te"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("graceful-restart", ("graceful_restart", Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart)), ("soft-preemption", ("soft_preemption", Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption)), ("hellos", ("hellos", Mpls.SignalingProtocols.RsvpTe.Global.Hellos)), ("state", ("state", Mpls.SignalingProtocols.RsvpTe.Global.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.graceful_restart = Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart()
self.graceful_restart.parent = self
self._children_name_map["graceful_restart"] = "graceful-restart"
self._children_yang_names.add("graceful-restart")
self.soft_preemption = Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption()
self.soft_preemption.parent = self
self._children_name_map["soft_preemption"] = "soft-preemption"
self._children_yang_names.add("soft-preemption")
self.hellos = Mpls.SignalingProtocols.RsvpTe.Global.Hellos()
self.hellos.parent = self
self._children_name_map["hellos"] = "hellos"
self._children_yang_names.add("hellos")
self.state = Mpls.SignalingProtocols.RsvpTe.Global.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "global"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/%s" % self._segment_path()
class GracefulRestart(Entity):
"""
Operational state and configuration parameters relating to
graceful\-restart for RSVP
.. attribute:: config
Configuration parameters relating to graceful\-restart
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart.Config>`
.. attribute:: state
State information associated with RSVP graceful\-restart
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart, self).__init__()
self.yang_name = "graceful-restart"
self.yang_parent_name = "global"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart.Config)), ("state", ("state", Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "graceful-restart"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/global/%s" % self._segment_path()
class Config(Entity):
"""
Configuration parameters relating to
graceful\-restart
.. attribute:: enable
Enables graceful restart on the node
**type**\: bool
**default value**\: false
.. attribute:: restart_time
Graceful restart time (seconds)
**type**\: int
**range:** 0..4294967295
.. attribute:: recovery_time
RSVP state recovery time
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "graceful-restart"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('enable', YLeaf(YType.boolean, 'enable')),
('restart_time', YLeaf(YType.uint32, 'restart-time')),
('recovery_time', YLeaf(YType.uint32, 'recovery-time')),
])
self.enable = None
self.restart_time = None
self.recovery_time = None
self._segment_path = lambda: "config"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/global/graceful-restart/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart.Config, ['enable', 'restart_time', 'recovery_time'], name, value)
class State(Entity):
"""
State information associated with
RSVP graceful\-restart
.. attribute:: enable
Enables graceful restart on the node
**type**\: bool
**default value**\: false
.. attribute:: restart_time
Graceful restart time (seconds)
**type**\: int
**range:** 0..4294967295
.. attribute:: recovery_time
RSVP state recovery time
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "graceful-restart"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('enable', YLeaf(YType.boolean, 'enable')),
('restart_time', YLeaf(YType.uint32, 'restart-time')),
('recovery_time', YLeaf(YType.uint32, 'recovery-time')),
])
self.enable = None
self.restart_time = None
self.recovery_time = None
self._segment_path = lambda: "state"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/global/graceful-restart/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart.State, ['enable', 'restart_time', 'recovery_time'], name, value)
class SoftPreemption(Entity):
"""
Protocol options relating to RSVP
soft preemption
.. attribute:: config
Configuration parameters relating to RSVP soft preemption support
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption.Config>`
.. attribute:: state
State parameters relating to RSVP soft preemption support
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption, self).__init__()
self.yang_name = "soft-preemption"
self.yang_parent_name = "global"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption.Config)), ("state", ("state", Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "soft-preemption"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/global/%s" % self._segment_path()
class Config(Entity):
"""
Configuration parameters relating to RSVP
soft preemption support
.. attribute:: enable
Enables soft preemption on a node
**type**\: bool
**default value**\: false
.. attribute:: soft_preemption_timeout
Timeout value for soft preemption to revert to hard preemption
**type**\: int
**range:** 0..65535
**default value**\: 0
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "soft-preemption"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('enable', YLeaf(YType.boolean, 'enable')),
('soft_preemption_timeout', YLeaf(YType.uint16, 'soft-preemption-timeout')),
])
self.enable = None
self.soft_preemption_timeout = None
self._segment_path = lambda: "config"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/global/soft-preemption/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption.Config, ['enable', 'soft_preemption_timeout'], name, value)
class State(Entity):
"""
State parameters relating to RSVP
soft preemption support
.. attribute:: enable
Enables soft preemption on a node
**type**\: bool
**default value**\: false
.. attribute:: soft_preemption_timeout
Timeout value for soft preemption to revert to hard preemption
**type**\: int
**range:** 0..65535
**default value**\: 0
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "soft-preemption"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('enable', YLeaf(YType.boolean, 'enable')),
('soft_preemption_timeout', YLeaf(YType.uint16, 'soft-preemption-timeout')),
])
self.enable = None
self.soft_preemption_timeout = None
self._segment_path = lambda: "state"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/global/soft-preemption/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption.State, ['enable', 'soft_preemption_timeout'], name, value)
class Hellos(Entity):
"""
Top level container for RSVP hello parameters
.. attribute:: config
Configuration parameters relating to RSVP hellos
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.Hellos.Config>`
.. attribute:: state
State information associated with RSVP hellos
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.Hellos.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Global.Hellos, self).__init__()
self.yang_name = "hellos"
self.yang_parent_name = "global"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.SignalingProtocols.RsvpTe.Global.Hellos.Config)), ("state", ("state", Mpls.SignalingProtocols.RsvpTe.Global.Hellos.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.SignalingProtocols.RsvpTe.Global.Hellos.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.SignalingProtocols.RsvpTe.Global.Hellos.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "hellos"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/global/%s" % self._segment_path()
class Config(Entity):
"""
Configuration parameters relating to RSVP
hellos
.. attribute:: hello_interval
set the interval in ms between RSVP hello messages
**type**\: int
**range:** 1000..60000
**units**\: milliseconds
**default value**\: 9000
.. attribute:: refresh_reduction
enables all RSVP refresh reduction message bundling, RSVP message ID, reliable message delivery and summary refresh
**type**\: bool
**default value**\: true
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Global.Hellos.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "hellos"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('hello_interval', YLeaf(YType.uint16, 'hello-interval')),
('refresh_reduction', YLeaf(YType.boolean, 'refresh-reduction')),
])
self.hello_interval = None
self.refresh_reduction = None
self._segment_path = lambda: "config"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/global/hellos/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.Global.Hellos.Config, ['hello_interval', 'refresh_reduction'], name, value)
class State(Entity):
"""
State information associated with RSVP hellos
.. attribute:: hello_interval
set the interval in ms between RSVP hello messages
**type**\: int
**range:** 1000..60000
**units**\: milliseconds
**default value**\: 9000
.. attribute:: refresh_reduction
enables all RSVP refresh reduction message bundling, RSVP message ID, reliable message delivery and summary refresh
**type**\: bool
**default value**\: true
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Global.Hellos.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "hellos"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('hello_interval', YLeaf(YType.uint16, 'hello-interval')),
('refresh_reduction', YLeaf(YType.boolean, 'refresh-reduction')),
])
self.hello_interval = None
self.refresh_reduction = None
self._segment_path = lambda: "state"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/global/hellos/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.Global.Hellos.State, ['hello_interval', 'refresh_reduction'], name, value)
class State(Entity):
"""
Platform wide RSVP state, including counters
.. attribute:: counters
Platform wide RSVP statistics and counters
**type**\: :py:class:`Counters <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.State.Counters>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Global.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "global"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("counters", ("counters", Mpls.SignalingProtocols.RsvpTe.Global.State.Counters))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.counters = Mpls.SignalingProtocols.RsvpTe.Global.State.Counters()
self.counters.parent = self
self._children_name_map["counters"] = "counters"
self._children_yang_names.add("counters")
self._segment_path = lambda: "state"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/global/%s" % self._segment_path()
class Counters(Entity):
"""
Platform wide RSVP statistics and counters
.. attribute:: path_timeouts
TODO
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: reservation_timeouts
TODO
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: rate_limited_messages
RSVP messages dropped due to rate limiting
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_path_messages
Number of received RSVP Path messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_path_error_messages
Number of received RSVP Path Error messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_path_tear_messages
Number of received RSVP Path Tear messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_reservation_messages
Number of received RSVP Resv messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_reservation_error_messages
Number of received RSVP Resv Error messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_reservation_tear_messages
Number of received RSVP Resv Tear messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_hello_messages
Number of received RSVP hello messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_srefresh_messages
Number of received RSVP summary refresh messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_ack_messages
Number of received RSVP refresh reduction ack messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_path_messages
Number of sent RSVP PATH messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_path_error_messages
Number of sent RSVP Path Error messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_path_tear_messages
Number of sent RSVP Path Tear messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_reservation_messages
Number of sent RSVP Resv messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_reservation_error_messages
Number of sent RSVP Resv Error messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_reservation_tear_messages
Number of sent RSVP Resv Tear messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_hello_messages
Number of sent RSVP hello messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_srefresh_messages
Number of sent RSVP summary refresh messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_ack_messages
Number of sent RSVP refresh reduction ack messages
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.Global.State.Counters, self).__init__()
self.yang_name = "counters"
self.yang_parent_name = "state"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('path_timeouts', YLeaf(YType.uint64, 'path-timeouts')),
('reservation_timeouts', YLeaf(YType.uint64, 'reservation-timeouts')),
('rate_limited_messages', YLeaf(YType.uint64, 'rate-limited-messages')),
('in_path_messages', YLeaf(YType.uint64, 'in-path-messages')),
('in_path_error_messages', YLeaf(YType.uint64, 'in-path-error-messages')),
('in_path_tear_messages', YLeaf(YType.uint64, 'in-path-tear-messages')),
('in_reservation_messages', YLeaf(YType.uint64, 'in-reservation-messages')),
('in_reservation_error_messages', YLeaf(YType.uint64, 'in-reservation-error-messages')),
('in_reservation_tear_messages', YLeaf(YType.uint64, 'in-reservation-tear-messages')),
('in_hello_messages', YLeaf(YType.uint64, 'in-hello-messages')),
('in_srefresh_messages', YLeaf(YType.uint64, 'in-srefresh-messages')),
('in_ack_messages', YLeaf(YType.uint64, 'in-ack-messages')),
('out_path_messages', YLeaf(YType.uint64, 'out-path-messages')),
('out_path_error_messages', YLeaf(YType.uint64, 'out-path-error-messages')),
('out_path_tear_messages', YLeaf(YType.uint64, 'out-path-tear-messages')),
('out_reservation_messages', YLeaf(YType.uint64, 'out-reservation-messages')),
('out_reservation_error_messages', YLeaf(YType.uint64, 'out-reservation-error-messages')),
('out_reservation_tear_messages', YLeaf(YType.uint64, 'out-reservation-tear-messages')),
('out_hello_messages', YLeaf(YType.uint64, 'out-hello-messages')),
('out_srefresh_messages', YLeaf(YType.uint64, 'out-srefresh-messages')),
('out_ack_messages', YLeaf(YType.uint64, 'out-ack-messages')),
])
self.path_timeouts = None
self.reservation_timeouts = None
self.rate_limited_messages = None
self.in_path_messages = None
self.in_path_error_messages = None
self.in_path_tear_messages = None
self.in_reservation_messages = None
self.in_reservation_error_messages = None
self.in_reservation_tear_messages = None
self.in_hello_messages = None
self.in_srefresh_messages = None
self.in_ack_messages = None
self.out_path_messages = None
self.out_path_error_messages = None
self.out_path_tear_messages = None
self.out_reservation_messages = None
self.out_reservation_error_messages = None
self.out_reservation_tear_messages = None
self.out_hello_messages = None
self.out_srefresh_messages = None
self.out_ack_messages = None
self._segment_path = lambda: "counters"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/global/state/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.Global.State.Counters, ['path_timeouts', 'reservation_timeouts', 'rate_limited_messages', 'in_path_messages', 'in_path_error_messages', 'in_path_tear_messages', 'in_reservation_messages', 'in_reservation_error_messages', 'in_reservation_tear_messages', 'in_hello_messages', 'in_srefresh_messages', 'in_ack_messages', 'out_path_messages', 'out_path_error_messages', 'out_path_tear_messages', 'out_reservation_messages', 'out_reservation_error_messages', 'out_reservation_tear_messages', 'out_hello_messages', 'out_srefresh_messages', 'out_ack_messages'], name, value)
class InterfaceAttributes(Entity):
"""
Attributes relating to RSVP\-TE enabled interfaces
.. attribute:: interface
list of per\-interface RSVP configurations
**type**\: list of :py:class:`Interface <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes, self).__init__()
self.yang_name = "interface-attributes"
self.yang_parent_name = "rsvp-te"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("interface", ("interface", Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface))])
self._leafs = OrderedDict()
self.interface = YList(self)
self._segment_path = lambda: "interface-attributes"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes, [], name, value)
class Interface(Entity):
"""
list of per\-interface RSVP configurations
.. attribute:: interface_name (key)
references a configured IP interface
**type**\: str
**refers to**\: :py:class:`interface_name <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Config>`
.. attribute:: config
Configuration of per\-interface RSVP parameters
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Config>`
.. attribute:: state
Per\-interface RSVP protocol and state information
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.State>`
.. attribute:: hellos
Top level container for RSVP hello parameters
**type**\: :py:class:`Hellos <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Hellos>`
.. attribute:: authentication
Configuration and state parameters relating to RSVP authentication as per RFC2747
**type**\: :py:class:`Authentication <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Authentication>`
.. attribute:: subscription
Bandwidth percentage reservable by RSVP on an interface
**type**\: :py:class:`Subscription <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Subscription>`
.. attribute:: protection
link\-protection (NHOP) related configuration
**type**\: :py:class:`Protection <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Protection>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface, self).__init__()
self.yang_name = "interface"
self.yang_parent_name = "interface-attributes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['interface_name']
self._child_container_classes = OrderedDict([("config", ("config", Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Config)), ("state", ("state", Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.State)), ("hellos", ("hellos", Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Hellos)), ("authentication", ("authentication", Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Authentication)), ("subscription", ("subscription", Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Subscription)), ("protection", ("protection", Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Protection))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_name', YLeaf(YType.str, 'interface-name')),
])
self.interface_name = None
self.config = Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self.hellos = Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Hellos()
self.hellos.parent = self
self._children_name_map["hellos"] = "hellos"
self._children_yang_names.add("hellos")
self.authentication = Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Authentication()
self.authentication.parent = self
self._children_name_map["authentication"] = "authentication"
self._children_yang_names.add("authentication")
self.subscription = Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Subscription()
self.subscription.parent = self
self._children_name_map["subscription"] = "subscription"
self._children_yang_names.add("subscription")
self.protection = Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Protection()
self.protection.parent = self
self._children_name_map["protection"] = "protection"
self._children_yang_names.add("protection")
self._segment_path = lambda: "interface" + "[interface-name='" + str(self.interface_name) + "']"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/rsvp-te/interface-attributes/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface, ['interface_name'], name, value)
class Config(Entity):
"""
Configuration of per\-interface RSVP parameters
.. attribute:: interface_name
Name of configured IP interface
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_name', YLeaf(YType.str, 'interface-name')),
])
self.interface_name = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Config, ['interface_name'], name, value)
class State(Entity):
"""
Per\-interface RSVP protocol and state information
.. attribute:: bandwidth
Available and reserved bandwidth by priority on the interface
**type**\: list of :py:class:`Bandwidth <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.State.Bandwidth>`
.. attribute:: highwater_mark
Maximum bandwidth ever reserved
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: active_reservation_count
Number of active RSVP reservations
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: counters
Interface specific RSVP statistics and counters
**type**\: :py:class:`Counters <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.State.Counters>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("counters", ("counters", Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.State.Counters))])
self._child_list_classes = OrderedDict([("bandwidth", ("bandwidth", Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.State.Bandwidth))])
self._leafs = OrderedDict([
('highwater_mark', YLeaf(YType.uint64, 'highwater-mark')),
('active_reservation_count', YLeaf(YType.uint64, 'active-reservation-count')),
])
self.highwater_mark = None
self.active_reservation_count = None
self.counters = Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.State.Counters()
self.counters.parent = self
self._children_name_map["counters"] = "counters"
self._children_yang_names.add("counters")
self.bandwidth = YList(self)
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.State, ['highwater_mark', 'active_reservation_count'], name, value)
class Bandwidth(Entity):
"""
Available and reserved bandwidth by priority on
the interface.
.. attribute:: priority (key)
RSVP priority level for LSPs traversing the interface
**type**\: int
**range:** 0..7
.. attribute:: available_bandwidth
Bandwidth currently available
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: reserved_bandwidth
Bandwidth currently reserved
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.State.Bandwidth, self).__init__()
self.yang_name = "bandwidth"
self.yang_parent_name = "state"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['priority']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('priority', YLeaf(YType.uint8, 'priority')),
('available_bandwidth', YLeaf(YType.uint64, 'available-bandwidth')),
('reserved_bandwidth', YLeaf(YType.uint64, 'reserved-bandwidth')),
])
self.priority = None
self.available_bandwidth = None
self.reserved_bandwidth = None
self._segment_path = lambda: "bandwidth" + "[priority='" + str(self.priority) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.State.Bandwidth, ['priority', 'available_bandwidth', 'reserved_bandwidth'], name, value)
class Counters(Entity):
"""
Interface specific RSVP statistics and counters
.. attribute:: in_path_messages
Number of received RSVP Path messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_path_error_messages
Number of received RSVP Path Error messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_path_tear_messages
Number of received RSVP Path Tear messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_reservation_messages
Number of received RSVP Resv messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_reservation_error_messages
Number of received RSVP Resv Error messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_reservation_tear_messages
Number of received RSVP Resv Tear messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_hello_messages
Number of received RSVP hello messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_srefresh_messages
Number of received RSVP summary refresh messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_ack_messages
Number of received RSVP refresh reduction ack messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_path_messages
Number of sent RSVP PATH messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_path_error_messages
Number of sent RSVP Path Error messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_path_tear_messages
Number of sent RSVP Path Tear messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_reservation_messages
Number of sent RSVP Resv messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_reservation_error_messages
Number of sent RSVP Resv Error messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_reservation_tear_messages
Number of sent RSVP Resv Tear messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_hello_messages
Number of sent RSVP hello messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_srefresh_messages
Number of sent RSVP summary refresh messages
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_ack_messages
Number of sent RSVP refresh reduction ack messages
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.State.Counters, self).__init__()
self.yang_name = "counters"
self.yang_parent_name = "state"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('in_path_messages', YLeaf(YType.uint64, 'in-path-messages')),
('in_path_error_messages', YLeaf(YType.uint64, 'in-path-error-messages')),
('in_path_tear_messages', YLeaf(YType.uint64, 'in-path-tear-messages')),
('in_reservation_messages', YLeaf(YType.uint64, 'in-reservation-messages')),
('in_reservation_error_messages', YLeaf(YType.uint64, 'in-reservation-error-messages')),
('in_reservation_tear_messages', YLeaf(YType.uint64, 'in-reservation-tear-messages')),
('in_hello_messages', YLeaf(YType.uint64, 'in-hello-messages')),
('in_srefresh_messages', YLeaf(YType.uint64, 'in-srefresh-messages')),
('in_ack_messages', YLeaf(YType.uint64, 'in-ack-messages')),
('out_path_messages', YLeaf(YType.uint64, 'out-path-messages')),
('out_path_error_messages', YLeaf(YType.uint64, 'out-path-error-messages')),
('out_path_tear_messages', YLeaf(YType.uint64, 'out-path-tear-messages')),
('out_reservation_messages', YLeaf(YType.uint64, 'out-reservation-messages')),
('out_reservation_error_messages', YLeaf(YType.uint64, 'out-reservation-error-messages')),
('out_reservation_tear_messages', YLeaf(YType.uint64, 'out-reservation-tear-messages')),
('out_hello_messages', YLeaf(YType.uint64, 'out-hello-messages')),
('out_srefresh_messages', YLeaf(YType.uint64, 'out-srefresh-messages')),
('out_ack_messages', YLeaf(YType.uint64, 'out-ack-messages')),
])
self.in_path_messages = None
self.in_path_error_messages = None
self.in_path_tear_messages = None
self.in_reservation_messages = None
self.in_reservation_error_messages = None
self.in_reservation_tear_messages = None
self.in_hello_messages = None
self.in_srefresh_messages = None
self.in_ack_messages = None
self.out_path_messages = None
self.out_path_error_messages = None
self.out_path_tear_messages = None
self.out_reservation_messages = None
self.out_reservation_error_messages = None
self.out_reservation_tear_messages = None
self.out_hello_messages = None
self.out_srefresh_messages = None
self.out_ack_messages = None
self._segment_path = lambda: "counters"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.State.Counters, ['in_path_messages', 'in_path_error_messages', 'in_path_tear_messages', 'in_reservation_messages', 'in_reservation_error_messages', 'in_reservation_tear_messages', 'in_hello_messages', 'in_srefresh_messages', 'in_ack_messages', 'out_path_messages', 'out_path_error_messages', 'out_path_tear_messages', 'out_reservation_messages', 'out_reservation_error_messages', 'out_reservation_tear_messages', 'out_hello_messages', 'out_srefresh_messages', 'out_ack_messages'], name, value)
class Hellos(Entity):
"""
Top level container for RSVP hello parameters
.. attribute:: config
Configuration parameters relating to RSVP hellos
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Hellos.Config>`
.. attribute:: state
State information associated with RSVP hellos
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Hellos.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Hellos, self).__init__()
self.yang_name = "hellos"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Hellos.Config)), ("state", ("state", Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Hellos.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Hellos.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Hellos.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "hellos"
class Config(Entity):
"""
Configuration parameters relating to RSVP
hellos
.. attribute:: hello_interval
set the interval in ms between RSVP hello messages
**type**\: int
**range:** 1000..60000
**units**\: milliseconds
**default value**\: 9000
.. attribute:: refresh_reduction
enables all RSVP refresh reduction message bundling, RSVP message ID, reliable message delivery and summary refresh
**type**\: bool
**default value**\: true
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Hellos.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "hellos"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('hello_interval', YLeaf(YType.uint16, 'hello-interval')),
('refresh_reduction', YLeaf(YType.boolean, 'refresh-reduction')),
])
self.hello_interval = None
self.refresh_reduction = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Hellos.Config, ['hello_interval', 'refresh_reduction'], name, value)
class State(Entity):
"""
State information associated with RSVP hellos
.. attribute:: hello_interval
set the interval in ms between RSVP hello messages
**type**\: int
**range:** 1000..60000
**units**\: milliseconds
**default value**\: 9000
.. attribute:: refresh_reduction
enables all RSVP refresh reduction message bundling, RSVP message ID, reliable message delivery and summary refresh
**type**\: bool
**default value**\: true
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Hellos.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "hellos"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('hello_interval', YLeaf(YType.uint16, 'hello-interval')),
('refresh_reduction', YLeaf(YType.boolean, 'refresh-reduction')),
])
self.hello_interval = None
self.refresh_reduction = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Hellos.State, ['hello_interval', 'refresh_reduction'], name, value)
class Authentication(Entity):
"""
Configuration and state parameters relating to RSVP
authentication as per RFC2747
.. attribute:: config
Configuration parameters relating to authentication
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Authentication.Config>`
.. attribute:: state
State information associated with authentication
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Authentication.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Authentication, self).__init__()
self.yang_name = "authentication"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Authentication.Config)), ("state", ("state", Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Authentication.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Authentication.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Authentication.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "authentication"
class Config(Entity):
"""
Configuration parameters relating
to authentication
.. attribute:: enable
Enables RSVP authentication on the node
**type**\: bool
**default value**\: false
.. attribute:: authentication_key
authenticate RSVP signaling messages
**type**\: str
**length:** 1..32
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Authentication.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "authentication"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('enable', YLeaf(YType.boolean, 'enable')),
('authentication_key', YLeaf(YType.str, 'authentication-key')),
])
self.enable = None
self.authentication_key = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Authentication.Config, ['enable', 'authentication_key'], name, value)
class State(Entity):
"""
State information associated
with authentication
.. attribute:: enable
Enables RSVP authentication on the node
**type**\: bool
**default value**\: false
.. attribute:: authentication_key
authenticate RSVP signaling messages
**type**\: str
**length:** 1..32
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Authentication.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "authentication"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('enable', YLeaf(YType.boolean, 'enable')),
('authentication_key', YLeaf(YType.str, 'authentication-key')),
])
self.enable = None
self.authentication_key = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Authentication.State, ['enable', 'authentication_key'], name, value)
class Subscription(Entity):
"""
Bandwidth percentage reservable by RSVP
on an interface
.. attribute:: config
Configuration parameters relating to RSVP subscription options
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Subscription.Config>`
.. attribute:: state
State parameters relating to RSVP subscription options
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Subscription.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Subscription, self).__init__()
self.yang_name = "subscription"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Subscription.Config)), ("state", ("state", Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Subscription.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Subscription.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Subscription.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "subscription"
class Config(Entity):
"""
Configuration parameters relating to RSVP
subscription options
.. attribute:: subscription
percentage of the interface bandwidth that RSVP can reserve
**type**\: int
**range:** 0..100
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Subscription.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "subscription"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('subscription', YLeaf(YType.uint8, 'subscription')),
])
self.subscription = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Subscription.Config, ['subscription'], name, value)
class State(Entity):
"""
State parameters relating to RSVP
subscription options
.. attribute:: subscription
percentage of the interface bandwidth that RSVP can reserve
**type**\: int
**range:** 0..100
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Subscription.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "subscription"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('subscription', YLeaf(YType.uint8, 'subscription')),
])
self.subscription = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Subscription.State, ['subscription'], name, value)
class Protection(Entity):
"""
link\-protection (NHOP) related configuration
.. attribute:: config
Configuration for link\-protection
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Protection.Config>`
.. attribute:: state
State for link\-protection
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Protection.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Protection, self).__init__()
self.yang_name = "protection"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Protection.Config)), ("state", ("state", Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Protection.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Protection.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Protection.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "protection"
class Config(Entity):
"""
Configuration for link\-protection
.. attribute:: link_protection_style_requested
Style of mpls frr protection desired\: link, link\-node, or unprotected
**type**\: :py:class:`ProtectionType <ydk.models.openconfig.openconfig_mpls_types.ProtectionType>`
**default value**\: mplst:link-node-protection-requested
.. attribute:: bypass_optimize_interval
interval between periodic optimization of the bypass LSPs
**type**\: int
**range:** 0..65535
**units**\: seconds
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Protection.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "protection"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('link_protection_style_requested', YLeaf(YType.identityref, 'link-protection-style-requested')),
('bypass_optimize_interval', YLeaf(YType.uint16, 'bypass-optimize-interval')),
])
self.link_protection_style_requested = None
self.bypass_optimize_interval = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Protection.Config, ['link_protection_style_requested', 'bypass_optimize_interval'], name, value)
class State(Entity):
"""
State for link\-protection
.. attribute:: link_protection_style_requested
Style of mpls frr protection desired\: link, link\-node, or unprotected
**type**\: :py:class:`ProtectionType <ydk.models.openconfig.openconfig_mpls_types.ProtectionType>`
**default value**\: mplst:link-node-protection-requested
.. attribute:: bypass_optimize_interval
interval between periodic optimization of the bypass LSPs
**type**\: int
**range:** 0..65535
**units**\: seconds
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Protection.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "protection"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('link_protection_style_requested', YLeaf(YType.identityref, 'link-protection-style-requested')),
('bypass_optimize_interval', YLeaf(YType.uint16, 'bypass-optimize-interval')),
])
self.link_protection_style_requested = None
self.bypass_optimize_interval = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes.Interface.Protection.State, ['link_protection_style_requested', 'bypass_optimize_interval'], name, value)
class SegmentRouting(Entity):
"""
SR global signaling config
.. attribute:: srgb
List of Segment Routing Global Block (SRGB) entries. These label blocks are reserved to be allocated as domain\-wide entries
**type**\: list of :py:class:`Srgb <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.SegmentRouting.Srgb>`
.. attribute:: interfaces
List of interfaces with associated segment routing configuration
**type**\: list of :py:class:`Interfaces <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.SegmentRouting.Interfaces>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.SegmentRouting, self).__init__()
self.yang_name = "segment-routing"
self.yang_parent_name = "signaling-protocols"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("srgb", ("srgb", Mpls.SignalingProtocols.SegmentRouting.Srgb)), ("interfaces", ("interfaces", Mpls.SignalingProtocols.SegmentRouting.Interfaces))])
self._leafs = OrderedDict()
self.srgb = YList(self)
self.interfaces = YList(self)
self._segment_path = lambda: "segment-routing"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.SegmentRouting, [], name, value)
class Srgb(Entity):
"""
List of Segment Routing Global Block (SRGB) entries. These
label blocks are reserved to be allocated as domain\-wide
entries.
.. attribute:: lower_bound (key)
Lower value in the block
**type**\: int
**range:** 0..4294967295
.. attribute:: upper_bound (key)
Upper value in the block
**type**\: int
**range:** 0..4294967295
.. attribute:: config
Configuration parameters relating to the Segment Routing Global Block (SRGB)
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.SegmentRouting.Srgb.Config>`
.. attribute:: state
State parameters relating to the Segment Routing Global Block (SRGB)
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.SegmentRouting.Srgb.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.SegmentRouting.Srgb, self).__init__()
self.yang_name = "srgb"
self.yang_parent_name = "segment-routing"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['lower_bound','upper_bound']
self._child_container_classes = OrderedDict([("config", ("config", Mpls.SignalingProtocols.SegmentRouting.Srgb.Config)), ("state", ("state", Mpls.SignalingProtocols.SegmentRouting.Srgb.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('lower_bound', YLeaf(YType.uint32, 'lower-bound')),
('upper_bound', YLeaf(YType.uint32, 'upper-bound')),
])
self.lower_bound = None
self.upper_bound = None
self.config = Mpls.SignalingProtocols.SegmentRouting.Srgb.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.SignalingProtocols.SegmentRouting.Srgb.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "srgb" + "[lower-bound='" + str(self.lower_bound) + "']" + "[upper-bound='" + str(self.upper_bound) + "']"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/segment-routing/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.SegmentRouting.Srgb, ['lower_bound', 'upper_bound'], name, value)
class Config(Entity):
"""
Configuration parameters relating to the Segment Routing
Global Block (SRGB)
.. attribute:: lower_bound
Lower value in the block
**type**\: int
**range:** 0..4294967295
.. attribute:: upper_bound
Upper value in the block
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.SegmentRouting.Srgb.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "srgb"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('lower_bound', YLeaf(YType.uint32, 'lower-bound')),
('upper_bound', YLeaf(YType.uint32, 'upper-bound')),
])
self.lower_bound = None
self.upper_bound = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.SegmentRouting.Srgb.Config, ['lower_bound', 'upper_bound'], name, value)
class State(Entity):
"""
State parameters relating to the Segment Routing Global
Block (SRGB)
.. attribute:: lower_bound
Lower value in the block
**type**\: int
**range:** 0..4294967295
.. attribute:: upper_bound
Upper value in the block
**type**\: int
**range:** 0..4294967295
.. attribute:: size
Number of indexes in the SRGB block
**type**\: int
**range:** 0..4294967295
.. attribute:: free
Number of SRGB indexes that have not yet been allocated
**type**\: int
**range:** 0..4294967295
.. attribute:: used
Number of SRGB indexes that are currently allocated
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.SegmentRouting.Srgb.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "srgb"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('lower_bound', YLeaf(YType.uint32, 'lower-bound')),
('upper_bound', YLeaf(YType.uint32, 'upper-bound')),
('size', YLeaf(YType.uint32, 'size')),
('free', YLeaf(YType.uint32, 'free')),
('used', YLeaf(YType.uint32, 'used')),
])
self.lower_bound = None
self.upper_bound = None
self.size = None
self.free = None
self.used = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.SegmentRouting.Srgb.State, ['lower_bound', 'upper_bound', 'size', 'free', 'used'], name, value)
class Interfaces(Entity):
"""
List of interfaces with associated segment routing
configuration
.. attribute:: interface (key)
Reference to the interface for which segment routing configuration is to be applied
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
.. attribute:: config
Interface configuration parameters for Segment Routing relating to the specified interface
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.SegmentRouting.Interfaces.Config>`
.. attribute:: state
State parameters for Segment Routing features relating to the specified interface
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.SegmentRouting.Interfaces.State>`
.. attribute:: adjacency_sid
Configuration for Adjacency SIDs that are related to the specified interface
**type**\: :py:class:`AdjacencySid <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.SegmentRouting.Interfaces.AdjacencySid>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.SegmentRouting.Interfaces, self).__init__()
self.yang_name = "interfaces"
self.yang_parent_name = "segment-routing"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['interface']
self._child_container_classes = OrderedDict([("config", ("config", Mpls.SignalingProtocols.SegmentRouting.Interfaces.Config)), ("state", ("state", Mpls.SignalingProtocols.SegmentRouting.Interfaces.State)), ("adjacency-sid", ("adjacency_sid", Mpls.SignalingProtocols.SegmentRouting.Interfaces.AdjacencySid))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface', YLeaf(YType.str, 'interface')),
])
self.interface = None
self.config = Mpls.SignalingProtocols.SegmentRouting.Interfaces.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.SignalingProtocols.SegmentRouting.Interfaces.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self.adjacency_sid = Mpls.SignalingProtocols.SegmentRouting.Interfaces.AdjacencySid()
self.adjacency_sid.parent = self
self._children_name_map["adjacency_sid"] = "adjacency-sid"
self._children_yang_names.add("adjacency-sid")
self._segment_path = lambda: "interfaces" + "[interface='" + str(self.interface) + "']"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/segment-routing/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.SegmentRouting.Interfaces, ['interface'], name, value)
class Config(Entity):
"""
Interface configuration parameters for Segment Routing
relating to the specified interface
.. attribute:: interface
Reference to the interface for which segment routing configuration is to be applied
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.SegmentRouting.Interfaces.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "interfaces"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface', YLeaf(YType.str, 'interface')),
])
self.interface = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.SegmentRouting.Interfaces.Config, ['interface'], name, value)
class State(Entity):
"""
State parameters for Segment Routing features relating
to the specified interface
.. attribute:: interface
Reference to the interface for which segment routing configuration is to be applied
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.SegmentRouting.Interfaces.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "interfaces"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface', YLeaf(YType.str, 'interface')),
])
self.interface = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.SegmentRouting.Interfaces.State, ['interface'], name, value)
class AdjacencySid(Entity):
"""
Configuration for Adjacency SIDs that are related to
the specified interface
.. attribute:: config
Configuration parameters for the Adjacency\-SIDs that are related to this interface
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.SegmentRouting.Interfaces.AdjacencySid.Config>`
.. attribute:: state
State parameters for the Adjacency\-SIDs that are related to this interface
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.SegmentRouting.Interfaces.AdjacencySid.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.SegmentRouting.Interfaces.AdjacencySid, self).__init__()
self.yang_name = "adjacency-sid"
self.yang_parent_name = "interfaces"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.SignalingProtocols.SegmentRouting.Interfaces.AdjacencySid.Config)), ("state", ("state", Mpls.SignalingProtocols.SegmentRouting.Interfaces.AdjacencySid.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.SignalingProtocols.SegmentRouting.Interfaces.AdjacencySid.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.SignalingProtocols.SegmentRouting.Interfaces.AdjacencySid.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "adjacency-sid"
class Config(Entity):
"""
Configuration parameters for the Adjacency\-SIDs
that are related to this interface
.. attribute:: advertise
Specifies the type of adjacency SID which should be advertised for the specified entity
**type**\: list of :py:class:`Advertise <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.SegmentRouting.Interfaces.AdjacencySid.Config.Advertise>`
.. attribute:: groups
Specifies the groups to which this interface belongs. Setting a value in this list results in an additional AdjSID being advertised, with the S\-bit set to 1. The AdjSID is assumed to be protected
**type**\: list of int
**range:** 0..4294967295
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.SegmentRouting.Interfaces.AdjacencySid.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "adjacency-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('advertise', YLeafList(YType.enumeration, 'advertise')),
('groups', YLeafList(YType.uint32, 'groups')),
])
self.advertise = []
self.groups = []
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.SegmentRouting.Interfaces.AdjacencySid.Config, ['advertise', 'groups'], name, value)
class Advertise(Enum):
"""
Advertise (Enum Class)
Specifies the type of adjacency SID which should be
advertised for the specified entity.
.. data:: PROTECTED = 0
Advertise an Adjacency-SID for this interface, which is
eligible to be protected using a local protection
mechanism on the local LSR. The local protection
mechanism selected is dependent upon the configuration
of RSVP-TE FRR or LFA elsewhere on the system
.. data:: UNPROTECTED = 1
Advertise an Adajcency-SID for this interface, which is
explicitly excluded from being protected by any local
protection mechanism
"""
PROTECTED = Enum.YLeaf(0, "PROTECTED")
UNPROTECTED = Enum.YLeaf(1, "UNPROTECTED")
class State(Entity):
"""
State parameters for the Adjacency\-SIDs that are
related to this interface
.. attribute:: advertise
Specifies the type of adjacency SID which should be advertised for the specified entity
**type**\: list of :py:class:`Advertise <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.SegmentRouting.Interfaces.AdjacencySid.State.Advertise>`
.. attribute:: groups
Specifies the groups to which this interface belongs. Setting a value in this list results in an additional AdjSID being advertised, with the S\-bit set to 1. The AdjSID is assumed to be protected
**type**\: list of int
**range:** 0..4294967295
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.SegmentRouting.Interfaces.AdjacencySid.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "adjacency-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('advertise', YLeafList(YType.enumeration, 'advertise')),
('groups', YLeafList(YType.uint32, 'groups')),
])
self.advertise = []
self.groups = []
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.SignalingProtocols.SegmentRouting.Interfaces.AdjacencySid.State, ['advertise', 'groups'], name, value)
class Advertise(Enum):
"""
Advertise (Enum Class)
Specifies the type of adjacency SID which should be
advertised for the specified entity.
.. data:: PROTECTED = 0
Advertise an Adjacency-SID for this interface, which is
eligible to be protected using a local protection
mechanism on the local LSR. The local protection
mechanism selected is dependent upon the configuration
of RSVP-TE FRR or LFA elsewhere on the system
.. data:: UNPROTECTED = 1
Advertise an Adajcency-SID for this interface, which is
explicitly excluded from being protected by any local
protection mechanism
"""
PROTECTED = Enum.YLeaf(0, "PROTECTED")
UNPROTECTED = Enum.YLeaf(1, "UNPROTECTED")
class Ldp(Entity):
"""
LDP global signaling configuration
.. attribute:: timers
LDP timers
**type**\: :py:class:`Timers <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.Ldp.Timers>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.Ldp, self).__init__()
self.yang_name = "ldp"
self.yang_parent_name = "signaling-protocols"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("timers", ("timers", Mpls.SignalingProtocols.Ldp.Timers))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.timers = Mpls.SignalingProtocols.Ldp.Timers()
self.timers.parent = self
self._children_name_map["timers"] = "timers"
self._children_yang_names.add("timers")
self._segment_path = lambda: "ldp"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/%s" % self._segment_path()
class Timers(Entity):
"""
LDP timers
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.SignalingProtocols.Ldp.Timers, self).__init__()
self.yang_name = "timers"
self.yang_parent_name = "ldp"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self._segment_path = lambda: "timers"
self._absolute_path = lambda: "openconfig-mpls:mpls/signaling-protocols/ldp/%s" % self._segment_path()
class Lsps(Entity):
"""
LSP definitions and configuration
.. attribute:: constrained_path
traffic\-engineered LSPs supporting different path computation and signaling methods
**type**\: :py:class:`ConstrainedPath <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath>`
.. attribute:: unconstrained_path
LSPs that use the IGP\-determined path, i.e., non traffic\-engineered, or non constrained\-path
**type**\: :py:class:`UnconstrainedPath <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath>`
.. attribute:: static_lsps
statically configured LSPs, without dynamic signaling
**type**\: :py:class:`StaticLsps <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.StaticLsps>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps, self).__init__()
self.yang_name = "lsps"
self.yang_parent_name = "mpls"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("constrained-path", ("constrained_path", Mpls.Lsps.ConstrainedPath)), ("unconstrained-path", ("unconstrained_path", Mpls.Lsps.UnconstrainedPath)), ("static-lsps", ("static_lsps", Mpls.Lsps.StaticLsps))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.constrained_path = Mpls.Lsps.ConstrainedPath()
self.constrained_path.parent = self
self._children_name_map["constrained_path"] = "constrained-path"
self._children_yang_names.add("constrained-path")
self.unconstrained_path = Mpls.Lsps.UnconstrainedPath()
self.unconstrained_path.parent = self
self._children_name_map["unconstrained_path"] = "unconstrained-path"
self._children_yang_names.add("unconstrained-path")
self.static_lsps = Mpls.Lsps.StaticLsps()
self.static_lsps.parent = self
self._children_name_map["static_lsps"] = "static-lsps"
self._children_yang_names.add("static-lsps")
self._segment_path = lambda: "lsps"
self._absolute_path = lambda: "openconfig-mpls:mpls/%s" % self._segment_path()
class ConstrainedPath(Entity):
"""
traffic\-engineered LSPs supporting different
path computation and signaling methods
.. attribute:: named_explicit_paths
A list of explicit paths
**type**\: list of :py:class:`NamedExplicitPaths <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.NamedExplicitPaths>`
.. attribute:: tunnel
List of TE tunnels
**type**\: list of :py:class:`Tunnel <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath, self).__init__()
self.yang_name = "constrained-path"
self.yang_parent_name = "lsps"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("named-explicit-paths", ("named_explicit_paths", Mpls.Lsps.ConstrainedPath.NamedExplicitPaths)), ("tunnel", ("tunnel", Mpls.Lsps.ConstrainedPath.Tunnel))])
self._leafs = OrderedDict()
self.named_explicit_paths = YList(self)
self.tunnel = YList(self)
self._segment_path = lambda: "constrained-path"
self._absolute_path = lambda: "openconfig-mpls:mpls/lsps/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath, [], name, value)
class NamedExplicitPaths(Entity):
"""
A list of explicit paths
.. attribute:: name (key)
A string name that uniquely identifies an explicit path
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.Config>`
.. attribute:: config
Configuration parameters relating to named explicit paths
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.Config>`
.. attribute:: state
Operational state parameters relating to the named explicit paths
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.State>`
.. attribute:: explicit_route_objects
List of explicit route objects
**type**\: list of :py:class:`ExplicitRouteObjects <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.ExplicitRouteObjects>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.NamedExplicitPaths, self).__init__()
self.yang_name = "named-explicit-paths"
self.yang_parent_name = "constrained-path"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['name']
self._child_container_classes = OrderedDict([("config", ("config", Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.Config)), ("state", ("state", Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.State))])
self._child_list_classes = OrderedDict([("explicit-route-objects", ("explicit_route_objects", Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.ExplicitRouteObjects))])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
])
self.name = None
self.config = Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self.explicit_route_objects = YList(self)
self._segment_path = lambda: "named-explicit-paths" + "[name='" + str(self.name) + "']"
self._absolute_path = lambda: "openconfig-mpls:mpls/lsps/constrained-path/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.NamedExplicitPaths, ['name'], name, value)
class Config(Entity):
"""
Configuration parameters relating to named explicit
paths
.. attribute:: name
A string name that uniquely identifies an explicit path
**type**\: str
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "named-explicit-paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
])
self.name = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.Config, ['name'], name, value)
class State(Entity):
"""
Operational state parameters relating to the named
explicit paths
.. attribute:: name
A string name that uniquely identifies an explicit path
**type**\: str
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "named-explicit-paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
])
self.name = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.State, ['name'], name, value)
class ExplicitRouteObjects(Entity):
"""
List of explicit route objects
.. attribute:: index (key)
Index of this explicit route object, to express the order of hops in path
**type**\: int
**range:** 0..255
**refers to**\: :py:class:`index <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.ExplicitRouteObjects.Config>`
.. attribute:: config
Configuration parameters relating to an explicit route
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.ExplicitRouteObjects.Config>`
.. attribute:: state
State parameters relating to an explicit route
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.ExplicitRouteObjects.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.ExplicitRouteObjects, self).__init__()
self.yang_name = "explicit-route-objects"
self.yang_parent_name = "named-explicit-paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['index']
self._child_container_classes = OrderedDict([("config", ("config", Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.ExplicitRouteObjects.Config)), ("state", ("state", Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.ExplicitRouteObjects.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('index', YLeaf(YType.str, 'index')),
])
self.index = None
self.config = Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.ExplicitRouteObjects.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.ExplicitRouteObjects.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "explicit-route-objects" + "[index='" + str(self.index) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.ExplicitRouteObjects, ['index'], name, value)
class Config(Entity):
"""
Configuration parameters relating to an explicit
route
.. attribute:: address
router hop for the LSP path
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: hop_type
strict or loose hop
**type**\: :py:class:`MplsHopType <ydk.models.openconfig.openconfig_mpls.MplsHopType>`
.. attribute:: index
Index of this explicit route object to express the order of hops in the path
**type**\: int
**range:** 0..255
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.ExplicitRouteObjects.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "explicit-route-objects"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', YLeaf(YType.str, 'address')),
('hop_type', YLeaf(YType.enumeration, 'hop-type')),
('index', YLeaf(YType.uint8, 'index')),
])
self.address = None
self.hop_type = None
self.index = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.ExplicitRouteObjects.Config, ['address', 'hop_type', 'index'], name, value)
class State(Entity):
"""
State parameters relating to an explicit route
.. attribute:: address
router hop for the LSP path
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: hop_type
strict or loose hop
**type**\: :py:class:`MplsHopType <ydk.models.openconfig.openconfig_mpls.MplsHopType>`
.. attribute:: index
Index of this explicit route object to express the order of hops in the path
**type**\: int
**range:** 0..255
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.ExplicitRouteObjects.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "explicit-route-objects"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', YLeaf(YType.str, 'address')),
('hop_type', YLeaf(YType.enumeration, 'hop-type')),
('index', YLeaf(YType.uint8, 'index')),
])
self.address = None
self.hop_type = None
self.index = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.ExplicitRouteObjects.State, ['address', 'hop_type', 'index'], name, value)
class Tunnel(Entity):
"""
List of TE tunnels
.. attribute:: name (key)
The tunnel name
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.Config>`
.. attribute:: type (key)
The tunnel type, p2p or p2mp
**type**\: :py:class:`TunnelType <ydk.models.openconfig.openconfig_mpls_types.TunnelType>`
.. attribute:: config
Configuration parameters related to TE tunnels\:
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.Config>`
.. attribute:: state
State parameters related to TE tunnels
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.State>`
.. attribute:: bandwidth
Bandwidth configuration for TE LSPs
**type**\: :py:class:`Bandwidth <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth>`
.. attribute:: p2p_tunnel_attributes
Parameters related to LSPs of type P2P
**type**\: :py:class:`P2PTunnelAttributes <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel, self).__init__()
self.yang_name = "tunnel"
self.yang_parent_name = "constrained-path"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['name','type']
self._child_container_classes = OrderedDict([("config", ("config", Mpls.Lsps.ConstrainedPath.Tunnel.Config)), ("state", ("state", Mpls.Lsps.ConstrainedPath.Tunnel.State)), ("bandwidth", ("bandwidth", Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth)), ("p2p-tunnel-attributes", ("p2p_tunnel_attributes", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
('type', YLeaf(YType.identityref, 'type')),
])
self.name = None
self.type = None
self.config = Mpls.Lsps.ConstrainedPath.Tunnel.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.Lsps.ConstrainedPath.Tunnel.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self.bandwidth = Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth()
self.bandwidth.parent = self
self._children_name_map["bandwidth"] = "bandwidth"
self._children_yang_names.add("bandwidth")
self.p2p_tunnel_attributes = Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes()
self.p2p_tunnel_attributes.parent = self
self._children_name_map["p2p_tunnel_attributes"] = "p2p-tunnel-attributes"
self._children_yang_names.add("p2p-tunnel-attributes")
self._segment_path = lambda: "tunnel" + "[name='" + str(self.name) + "']" + "[type='" + str(self.type) + "']"
self._absolute_path = lambda: "openconfig-mpls:mpls/lsps/constrained-path/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel, ['name', 'type'], name, value)
class Config(Entity):
"""
Configuration parameters related to TE tunnels\:
.. attribute:: name
The tunnel name
**type**\: str
.. attribute:: type
Tunnel type, p2p or p2mp
**type**\: :py:class:`TunnelType <ydk.models.openconfig.openconfig_mpls_types.TunnelType>`
.. attribute:: signaling_protocol
Signaling protocol used to set up this tunnel
**type**\: :py:class:`TunnelType <ydk.models.openconfig.openconfig_mpls_types.TunnelType>`
.. attribute:: local_id
locally signficant optional identifier for the tunnel; may be a numerical or string value
**type**\: union of the below types:
**type**\: int
**range:** 0..4294967295
**type**\: str
.. attribute:: description
optional text description for the tunnel
**type**\: str
.. attribute:: admin_status
TE tunnel administrative state
**type**\: :py:class:`TunnelAdminStatus <ydk.models.openconfig.openconfig_mpls_types.TunnelAdminStatus>`
**default value**\: mplst:ADMIN_UP
.. attribute:: preference
Specifies a preference for this tunnel. A lower number signifies a better preference
**type**\: int
**range:** 1..255
.. attribute:: metric
LSP metric, either explicit or IGP
**type**\: union of the below types:
**type**\: :py:class:`TeMetricType <ydk.models.openconfig.openconfig_mpls.TeMetricType>`
**type**\: int
**range:** 0..4294967295
.. attribute:: protection_style_requested
style of mpls frr protection desired\: can be link, link\-node or unprotected
**type**\: :py:class:`ProtectionType <ydk.models.openconfig.openconfig_mpls_types.ProtectionType>`
**default value**\: mplst:unprotected
.. attribute:: reoptimize_timer
frequency of reoptimization of a traffic engineered LSP
**type**\: int
**range:** 0..65535
**units**\: seconds
.. attribute:: source
RSVP\-TE tunnel source address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: soft_preemption
Enables RSVP soft\-preemption on this LSP
**type**\: bool
**default value**\: false
.. attribute:: setup_priority
RSVP\-TE preemption priority during LSP setup, lower is higher priority; default 7 indicates that LSP will not preempt established LSPs during setup
**type**\: int
**range:** 0..7
**default value**\: 7
.. attribute:: hold_priority
preemption priority once the LSP is established, lower is higher priority; default 0 indicates other LSPs will not preempt the LSPs once established
**type**\: int
**range:** 0..7
**default value**\: 0
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "tunnel"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
('type', YLeaf(YType.identityref, 'type')),
('signaling_protocol', YLeaf(YType.identityref, 'signaling-protocol')),
('local_id', YLeaf(YType.str, 'local-id')),
('description', YLeaf(YType.str, 'description')),
('admin_status', YLeaf(YType.identityref, 'admin-status')),
('preference', YLeaf(YType.uint8, 'preference')),
('metric', YLeaf(YType.str, 'metric')),
('protection_style_requested', YLeaf(YType.identityref, 'protection-style-requested')),
('reoptimize_timer', YLeaf(YType.uint16, 'reoptimize-timer')),
('source', YLeaf(YType.str, 'source')),
('soft_preemption', YLeaf(YType.boolean, 'soft-preemption')),
('setup_priority', YLeaf(YType.uint8, 'setup-priority')),
('hold_priority', YLeaf(YType.uint8, 'hold-priority')),
])
self.name = None
self.type = None
self.signaling_protocol = None
self.local_id = None
self.description = None
self.admin_status = None
self.preference = None
self.metric = None
self.protection_style_requested = None
self.reoptimize_timer = None
self.source = None
self.soft_preemption = None
self.setup_priority = None
self.hold_priority = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.Config, ['name', 'type', 'signaling_protocol', 'local_id', 'description', 'admin_status', 'preference', 'metric', 'protection_style_requested', 'reoptimize_timer', 'source', 'soft_preemption', 'setup_priority', 'hold_priority'], name, value)
class State(Entity):
"""
State parameters related to TE tunnels
.. attribute:: name
The tunnel name
**type**\: str
.. attribute:: type
Tunnel type, p2p or p2mp
**type**\: :py:class:`TunnelType <ydk.models.openconfig.openconfig_mpls_types.TunnelType>`
.. attribute:: signaling_protocol
Signaling protocol used to set up this tunnel
**type**\: :py:class:`TunnelType <ydk.models.openconfig.openconfig_mpls_types.TunnelType>`
.. attribute:: local_id
locally signficant optional identifier for the tunnel; may be a numerical or string value
**type**\: union of the below types:
**type**\: int
**range:** 0..4294967295
**type**\: str
.. attribute:: description
optional text description for the tunnel
**type**\: str
.. attribute:: admin_status
TE tunnel administrative state
**type**\: :py:class:`TunnelAdminStatus <ydk.models.openconfig.openconfig_mpls_types.TunnelAdminStatus>`
**default value**\: mplst:ADMIN_UP
.. attribute:: preference
Specifies a preference for this tunnel. A lower number signifies a better preference
**type**\: int
**range:** 1..255
.. attribute:: metric
LSP metric, either explicit or IGP
**type**\: union of the below types:
**type**\: :py:class:`TeMetricType <ydk.models.openconfig.openconfig_mpls.TeMetricType>`
**type**\: int
**range:** 0..4294967295
.. attribute:: protection_style_requested
style of mpls frr protection desired\: can be link, link\-node or unprotected
**type**\: :py:class:`ProtectionType <ydk.models.openconfig.openconfig_mpls_types.ProtectionType>`
**default value**\: mplst:unprotected
.. attribute:: reoptimize_timer
frequency of reoptimization of a traffic engineered LSP
**type**\: int
**range:** 0..65535
**units**\: seconds
.. attribute:: source
RSVP\-TE tunnel source address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: soft_preemption
Enables RSVP soft\-preemption on this LSP
**type**\: bool
**default value**\: false
.. attribute:: setup_priority
RSVP\-TE preemption priority during LSP setup, lower is higher priority; default 7 indicates that LSP will not preempt established LSPs during setup
**type**\: int
**range:** 0..7
**default value**\: 7
.. attribute:: hold_priority
preemption priority once the LSP is established, lower is higher priority; default 0 indicates other LSPs will not preempt the LSPs once established
**type**\: int
**range:** 0..7
**default value**\: 0
.. attribute:: oper_status
The operational status of the TE tunnel
**type**\: :py:class:`LspOperStatus <ydk.models.openconfig.openconfig_mpls_types.LspOperStatus>`
.. attribute:: role
The lsp role at the current node, whether it is headend, transit or tailend
**type**\: :py:class:`LspRole <ydk.models.openconfig.openconfig_mpls_types.LspRole>`
.. attribute:: counters
State data for MPLS label switched paths. This state data is specific to a single label switched path
**type**\: :py:class:`Counters <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.State.Counters>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "tunnel"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("counters", ("counters", Mpls.Lsps.ConstrainedPath.Tunnel.State.Counters))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
('type', YLeaf(YType.identityref, 'type')),
('signaling_protocol', YLeaf(YType.identityref, 'signaling-protocol')),
('local_id', YLeaf(YType.str, 'local-id')),
('description', YLeaf(YType.str, 'description')),
('admin_status', YLeaf(YType.identityref, 'admin-status')),
('preference', YLeaf(YType.uint8, 'preference')),
('metric', YLeaf(YType.str, 'metric')),
('protection_style_requested', YLeaf(YType.identityref, 'protection-style-requested')),
('reoptimize_timer', YLeaf(YType.uint16, 'reoptimize-timer')),
('source', YLeaf(YType.str, 'source')),
('soft_preemption', YLeaf(YType.boolean, 'soft-preemption')),
('setup_priority', YLeaf(YType.uint8, 'setup-priority')),
('hold_priority', YLeaf(YType.uint8, 'hold-priority')),
('oper_status', YLeaf(YType.identityref, 'oper-status')),
('role', YLeaf(YType.identityref, 'role')),
])
self.name = None
self.type = None
self.signaling_protocol = None
self.local_id = None
self.description = None
self.admin_status = None
self.preference = None
self.metric = None
self.protection_style_requested = None
self.reoptimize_timer = None
self.source = None
self.soft_preemption = None
self.setup_priority = None
self.hold_priority = None
self.oper_status = None
self.role = None
self.counters = Mpls.Lsps.ConstrainedPath.Tunnel.State.Counters()
self.counters.parent = self
self._children_name_map["counters"] = "counters"
self._children_yang_names.add("counters")
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.State, ['name', 'type', 'signaling_protocol', 'local_id', 'description', 'admin_status', 'preference', 'metric', 'protection_style_requested', 'reoptimize_timer', 'source', 'soft_preemption', 'setup_priority', 'hold_priority', 'oper_status', 'role'], name, value)
class Counters(Entity):
"""
State data for MPLS label switched paths. This state
data is specific to a single label switched path.
.. attribute:: bytes
Number of bytes that have been forwarded over the label switched path
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: packets
Number of pacets that have been forwarded over the label switched path
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: path_changes
Number of path changes for the label switched path
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: state_changes
Number of state changes for the label switched path
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: online_time
Indication of the time the label switched path transitioned to an Oper Up or in\-service state
**type**\: str
**pattern:** \\d{4}\-\\d{2}\-\\d{2}T\\d{2}\:\\d{2}\:\\d{2}(\\.\\d+)?(Z\|[\\+\\\-]\\d{2}\:\\d{2})
.. attribute:: current_path_time
Indicates the time the LSP switched onto its current path. This is reset upon a LSP path change
**type**\: str
**pattern:** \\d{4}\-\\d{2}\-\\d{2}T\\d{2}\:\\d{2}\:\\d{2}(\\.\\d+)?(Z\|[\\+\\\-]\\d{2}\:\\d{2})
.. attribute:: next_reoptimization_time
Indicates the next scheduled time the LSP will be reoptimized
**type**\: str
**pattern:** \\d{4}\-\\d{2}\-\\d{2}T\\d{2}\:\\d{2}\:\\d{2}(\\.\\d+)?(Z\|[\\+\\\-]\\d{2}\:\\d{2})
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.State.Counters, self).__init__()
self.yang_name = "counters"
self.yang_parent_name = "state"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('bytes', YLeaf(YType.uint64, 'bytes')),
('packets', YLeaf(YType.uint64, 'packets')),
('path_changes', YLeaf(YType.uint64, 'path-changes')),
('state_changes', YLeaf(YType.uint64, 'state-changes')),
('online_time', YLeaf(YType.str, 'online-time')),
('current_path_time', YLeaf(YType.str, 'current-path-time')),
('next_reoptimization_time', YLeaf(YType.str, 'next-reoptimization-time')),
])
self.bytes = None
self.packets = None
self.path_changes = None
self.state_changes = None
self.online_time = None
self.current_path_time = None
self.next_reoptimization_time = None
self._segment_path = lambda: "counters"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.State.Counters, ['bytes', 'packets', 'path_changes', 'state_changes', 'online_time', 'current_path_time', 'next_reoptimization_time'], name, value)
class Bandwidth(Entity):
"""
Bandwidth configuration for TE LSPs
.. attribute:: config
Configuration parameters related to bandwidth on TE tunnels\:
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.Config>`
.. attribute:: state
State parameters related to bandwidth configuration of TE tunnels
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.State>`
.. attribute:: auto_bandwidth
Parameters related to auto\-bandwidth
**type**\: :py:class:`AutoBandwidth <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth, self).__init__()
self.yang_name = "bandwidth"
self.yang_parent_name = "tunnel"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.Config)), ("state", ("state", Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.State)), ("auto-bandwidth", ("auto_bandwidth", Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self.auto_bandwidth = Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth()
self.auto_bandwidth.parent = self
self._children_name_map["auto_bandwidth"] = "auto-bandwidth"
self._children_yang_names.add("auto-bandwidth")
self._segment_path = lambda: "bandwidth"
class Config(Entity):
"""
Configuration parameters related to bandwidth on TE
tunnels\:
.. attribute:: specification_type
The method used for settign the bandwidth, either explicitly specified or configured
**type**\: :py:class:`TeBandwidthType <ydk.models.openconfig.openconfig_mpls.TeBandwidthType>`
**default value**\: SPECIFIED
.. attribute:: set_bandwidth
set bandwidth explicitly, e.g., using offline calculation
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "bandwidth"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('specification_type', YLeaf(YType.enumeration, 'specification-type')),
('set_bandwidth', YLeaf(YType.uint32, 'set-bandwidth')),
])
self.specification_type = None
self.set_bandwidth = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.Config, ['specification_type', 'set_bandwidth'], name, value)
class State(Entity):
"""
State parameters related to bandwidth
configuration of TE tunnels
.. attribute:: specification_type
The method used for settign the bandwidth, either explicitly specified or configured
**type**\: :py:class:`TeBandwidthType <ydk.models.openconfig.openconfig_mpls.TeBandwidthType>`
**default value**\: SPECIFIED
.. attribute:: set_bandwidth
set bandwidth explicitly, e.g., using offline calculation
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "bandwidth"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('specification_type', YLeaf(YType.enumeration, 'specification-type')),
('set_bandwidth', YLeaf(YType.uint32, 'set-bandwidth')),
])
self.specification_type = None
self.set_bandwidth = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.State, ['specification_type', 'set_bandwidth'], name, value)
class AutoBandwidth(Entity):
"""
Parameters related to auto\-bandwidth
.. attribute:: config
Configuration parameters relating to MPLS auto\-bandwidth on the tunnel
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Config>`
.. attribute:: state
State parameters relating to MPLS auto\-bandwidth on the tunnel
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.State>`
.. attribute:: overflow
configuration of MPLS overflow bandwidth adjustement for the LSP
**type**\: :py:class:`Overflow <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Overflow>`
.. attribute:: underflow
configuration of MPLS underflow bandwidth adjustement for the LSP
**type**\: :py:class:`Underflow <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Underflow>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth, self).__init__()
self.yang_name = "auto-bandwidth"
self.yang_parent_name = "bandwidth"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Config)), ("state", ("state", Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.State)), ("overflow", ("overflow", Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Overflow)), ("underflow", ("underflow", Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Underflow))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self.overflow = Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Overflow()
self.overflow.parent = self
self._children_name_map["overflow"] = "overflow"
self._children_yang_names.add("overflow")
self.underflow = Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Underflow()
self.underflow.parent = self
self._children_name_map["underflow"] = "underflow"
self._children_yang_names.add("underflow")
self._segment_path = lambda: "auto-bandwidth"
class Config(Entity):
"""
Configuration parameters relating to MPLS
auto\-bandwidth on the tunnel.
.. attribute:: enabled
enables mpls auto\-bandwidth on the lsp
**type**\: bool
**default value**\: false
.. attribute:: min_bw
set the minimum bandwidth in Mbps for an auto\-bandwidth LSP
**type**\: int
**range:** 0..4294967295
.. attribute:: max_bw
set the maximum bandwidth in Mbps for an auto\-bandwidth LSP
**type**\: int
**range:** 0..4294967295
.. attribute:: adjust_interval
time in seconds between adjustments to LSP bandwidth
**type**\: int
**range:** 0..4294967295
.. attribute:: adjust_threshold
percentage difference between the LSP's specified bandwidth and its current bandwidth allocation \-\- if the difference is greater than the specified percentage, auto\-bandwidth adjustment is triggered
**type**\: int
**range:** 0..100
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "auto-bandwidth"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('enabled', YLeaf(YType.boolean, 'enabled')),
('min_bw', YLeaf(YType.uint32, 'min-bw')),
('max_bw', YLeaf(YType.uint32, 'max-bw')),
('adjust_interval', YLeaf(YType.uint32, 'adjust-interval')),
('adjust_threshold', YLeaf(YType.uint8, 'adjust-threshold')),
])
self.enabled = None
self.min_bw = None
self.max_bw = None
self.adjust_interval = None
self.adjust_threshold = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Config, ['enabled', 'min_bw', 'max_bw', 'adjust_interval', 'adjust_threshold'], name, value)
class State(Entity):
"""
State parameters relating to MPLS
auto\-bandwidth on the tunnel.
.. attribute:: enabled
enables mpls auto\-bandwidth on the lsp
**type**\: bool
**default value**\: false
.. attribute:: min_bw
set the minimum bandwidth in Mbps for an auto\-bandwidth LSP
**type**\: int
**range:** 0..4294967295
.. attribute:: max_bw
set the maximum bandwidth in Mbps for an auto\-bandwidth LSP
**type**\: int
**range:** 0..4294967295
.. attribute:: adjust_interval
time in seconds between adjustments to LSP bandwidth
**type**\: int
**range:** 0..4294967295
.. attribute:: adjust_threshold
percentage difference between the LSP's specified bandwidth and its current bandwidth allocation \-\- if the difference is greater than the specified percentage, auto\-bandwidth adjustment is triggered
**type**\: int
**range:** 0..100
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "auto-bandwidth"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('enabled', YLeaf(YType.boolean, 'enabled')),
('min_bw', YLeaf(YType.uint32, 'min-bw')),
('max_bw', YLeaf(YType.uint32, 'max-bw')),
('adjust_interval', YLeaf(YType.uint32, 'adjust-interval')),
('adjust_threshold', YLeaf(YType.uint8, 'adjust-threshold')),
])
self.enabled = None
self.min_bw = None
self.max_bw = None
self.adjust_interval = None
self.adjust_threshold = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.State, ['enabled', 'min_bw', 'max_bw', 'adjust_interval', 'adjust_threshold'], name, value)
class Overflow(Entity):
"""
configuration of MPLS overflow bandwidth
adjustement for the LSP
.. attribute:: config
Config information for MPLS overflow bandwidth adjustment
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Overflow.Config>`
.. attribute:: state
Config information for MPLS overflow bandwidth adjustment
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Overflow.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Overflow, self).__init__()
self.yang_name = "overflow"
self.yang_parent_name = "auto-bandwidth"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Overflow.Config)), ("state", ("state", Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Overflow.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Overflow.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Overflow.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "overflow"
class Config(Entity):
"""
Config information for MPLS overflow bandwidth
adjustment
.. attribute:: enabled
enables mpls lsp bandwidth overflow adjustment on the lsp
**type**\: bool
**default value**\: false
.. attribute:: overflow_threshold
bandwidth percentage change to trigger an overflow event
**type**\: int
**range:** 0..100
.. attribute:: trigger_event_count
number of consecutive overflow sample events needed to trigger an overflow adjustment
**type**\: int
**range:** 0..65535
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Overflow.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "overflow"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('enabled', YLeaf(YType.boolean, 'enabled')),
('overflow_threshold', YLeaf(YType.uint8, 'overflow-threshold')),
('trigger_event_count', YLeaf(YType.uint16, 'trigger-event-count')),
])
self.enabled = None
self.overflow_threshold = None
self.trigger_event_count = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Overflow.Config, ['enabled', 'overflow_threshold', 'trigger_event_count'], name, value)
class State(Entity):
"""
Config information for MPLS overflow bandwidth
adjustment
.. attribute:: enabled
enables mpls lsp bandwidth overflow adjustment on the lsp
**type**\: bool
**default value**\: false
.. attribute:: overflow_threshold
bandwidth percentage change to trigger an overflow event
**type**\: int
**range:** 0..100
.. attribute:: trigger_event_count
number of consecutive overflow sample events needed to trigger an overflow adjustment
**type**\: int
**range:** 0..65535
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Overflow.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "overflow"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('enabled', YLeaf(YType.boolean, 'enabled')),
('overflow_threshold', YLeaf(YType.uint8, 'overflow-threshold')),
('trigger_event_count', YLeaf(YType.uint16, 'trigger-event-count')),
])
self.enabled = None
self.overflow_threshold = None
self.trigger_event_count = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Overflow.State, ['enabled', 'overflow_threshold', 'trigger_event_count'], name, value)
class Underflow(Entity):
"""
configuration of MPLS underflow bandwidth
adjustement for the LSP
.. attribute:: config
Config information for MPLS underflow bandwidth adjustment
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Underflow.Config>`
.. attribute:: state
State information for MPLS underflow bandwidth adjustment
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Underflow.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Underflow, self).__init__()
self.yang_name = "underflow"
self.yang_parent_name = "auto-bandwidth"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Underflow.Config)), ("state", ("state", Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Underflow.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Underflow.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Underflow.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "underflow"
class Config(Entity):
"""
Config information for MPLS underflow bandwidth
adjustment
.. attribute:: enabled
enables bandwidth underflow adjustment on the lsp
**type**\: bool
**default value**\: false
.. attribute:: underflow_threshold
bandwidth percentage change to trigger and underflow event
**type**\: int
**range:** 0..100
.. attribute:: trigger_event_count
number of consecutive underflow sample events needed to trigger an underflow adjustment
**type**\: int
**range:** 0..65535
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Underflow.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "underflow"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('enabled', YLeaf(YType.boolean, 'enabled')),
('underflow_threshold', YLeaf(YType.uint8, 'underflow-threshold')),
('trigger_event_count', YLeaf(YType.uint16, 'trigger-event-count')),
])
self.enabled = None
self.underflow_threshold = None
self.trigger_event_count = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Underflow.Config, ['enabled', 'underflow_threshold', 'trigger_event_count'], name, value)
class State(Entity):
"""
State information for MPLS underflow bandwidth
adjustment
.. attribute:: enabled
enables bandwidth underflow adjustment on the lsp
**type**\: bool
**default value**\: false
.. attribute:: underflow_threshold
bandwidth percentage change to trigger and underflow event
**type**\: int
**range:** 0..100
.. attribute:: trigger_event_count
number of consecutive underflow sample events needed to trigger an underflow adjustment
**type**\: int
**range:** 0..65535
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Underflow.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "underflow"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('enabled', YLeaf(YType.boolean, 'enabled')),
('underflow_threshold', YLeaf(YType.uint8, 'underflow-threshold')),
('trigger_event_count', YLeaf(YType.uint16, 'trigger-event-count')),
])
self.enabled = None
self.underflow_threshold = None
self.trigger_event_count = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.Bandwidth.AutoBandwidth.Underflow.State, ['enabled', 'underflow_threshold', 'trigger_event_count'], name, value)
class P2PTunnelAttributes(Entity):
"""
Parameters related to LSPs of type P2P
.. attribute:: config
Configuration parameters for P2P LSPs
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.Config>`
.. attribute:: state
State parameters for P2P LSPs
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.State>`
.. attribute:: p2p_primary_paths
List of p2p primary paths for a tunnel
**type**\: list of :py:class:`P2PPrimaryPaths <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths>`
.. attribute:: p2p_secondary_paths
List of p2p primary paths for a tunnel
**type**\: list of :py:class:`P2PSecondaryPaths <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes, self).__init__()
self.yang_name = "p2p-tunnel-attributes"
self.yang_parent_name = "tunnel"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.Config)), ("state", ("state", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.State))])
self._child_list_classes = OrderedDict([("p2p-primary-paths", ("p2p_primary_paths", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths)), ("p2p-secondary-paths", ("p2p_secondary_paths", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths))])
self._leafs = OrderedDict()
self.config = Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self.p2p_primary_paths = YList(self)
self.p2p_secondary_paths = YList(self)
self._segment_path = lambda: "p2p-tunnel-attributes"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes, [], name, value)
class Config(Entity):
"""
Configuration parameters for P2P LSPs
.. attribute:: destination
P2P tunnel destination address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "p2p-tunnel-attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('destination', YLeaf(YType.str, 'destination')),
])
self.destination = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.Config, ['destination'], name, value)
class State(Entity):
"""
State parameters for P2P LSPs
.. attribute:: destination
P2P tunnel destination address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "p2p-tunnel-attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('destination', YLeaf(YType.str, 'destination')),
])
self.destination = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.State, ['destination'], name, value)
class P2PPrimaryPaths(Entity):
"""
List of p2p primary paths for a tunnel
.. attribute:: name (key)
Path name
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.Config>`
.. attribute:: config
Configuration parameters related to paths
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.Config>`
.. attribute:: state
State parameters related to paths
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.State>`
.. attribute:: candidate_secondary_paths
The set of candidate secondary paths which may be used for this primary path. When secondary paths are specified in the list the path of the secondary LSP in use must be restricted to those path options referenced. The priority of the secondary paths is specified within the list. Higher priority values are less preferred \- that is to say that a path with priority 0 is the most preferred path. In the case that the list is empty, any secondary path option may be utilised when the current primary path is in use
**type**\: :py:class:`CandidateSecondaryPaths <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths>`
.. attribute:: admin_groups
Top\-level container for include/exclude constraints for link affinities
**type**\: :py:class:`AdminGroups <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.AdminGroups>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths, self).__init__()
self.yang_name = "p2p-primary-paths"
self.yang_parent_name = "p2p-tunnel-attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_container_classes = OrderedDict([("config", ("config", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.Config)), ("state", ("state", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.State)), ("candidate-secondary-paths", ("candidate_secondary_paths", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths)), ("admin-groups", ("admin_groups", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.AdminGroups))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
])
self.name = None
self.config = Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self.candidate_secondary_paths = Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths()
self.candidate_secondary_paths.parent = self
self._children_name_map["candidate_secondary_paths"] = "candidate-secondary-paths"
self._children_yang_names.add("candidate-secondary-paths")
self.admin_groups = Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.AdminGroups()
self.admin_groups.parent = self
self._children_name_map["admin_groups"] = "admin-groups"
self._children_yang_names.add("admin-groups")
self._segment_path = lambda: "p2p-primary-paths" + "[name='" + str(self.name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths, ['name'], name, value)
class Config(Entity):
"""
Configuration parameters related to paths
.. attribute:: name
Path name
**type**\: str
.. attribute:: path_computation_method
The method used for computing the path, either locally computed, queried from a server or not computed at all (explicitly configured)
**type**\: :py:class:`PathComputationMethod <ydk.models.openconfig.openconfig_mpls.PathComputationMethod>`
**default value**\: locally-computed
.. attribute:: use_cspf
Flag to enable CSPF for locally computed LSPs
**type**\: bool
.. attribute:: cspf_tiebreaker
Determine the tie\-breaking method to choose between equally desirable paths during CSFP computation
**type**\: :py:class:`CspfTieBreaking <ydk.models.openconfig.openconfig_mpls.CspfTieBreaking>`
.. attribute:: path_computation_server
Address of the external path computation server
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: explicit_path_name
reference to a defined path
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.Config>`
.. attribute:: preference
Specifies a preference for this path. The lower the number higher the preference
**type**\: int
**range:** 1..255
.. attribute:: setup_priority
RSVP\-TE preemption priority during LSP setup, lower is higher priority; default 7 indicates that LSP will not preempt established LSPs during setup
**type**\: int
**range:** 0..7
**default value**\: 7
.. attribute:: hold_priority
preemption priority once the LSP is established, lower is higher priority; default 0 indicates other LSPs will not preempt the LSPs once established
**type**\: int
**range:** 0..7
**default value**\: 0
.. attribute:: retry_timer
sets the time between attempts to establish the LSP
**type**\: int
**range:** 1..600
**units**\: seconds
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "p2p-primary-paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
('path_computation_method', YLeaf(YType.identityref, 'path-computation-method')),
('use_cspf', YLeaf(YType.boolean, 'use-cspf')),
('cspf_tiebreaker', YLeaf(YType.enumeration, 'cspf-tiebreaker')),
('path_computation_server', YLeaf(YType.str, 'path-computation-server')),
('explicit_path_name', YLeaf(YType.str, 'explicit-path-name')),
('preference', YLeaf(YType.uint8, 'preference')),
('setup_priority', YLeaf(YType.uint8, 'setup-priority')),
('hold_priority', YLeaf(YType.uint8, 'hold-priority')),
('retry_timer', YLeaf(YType.uint16, 'retry-timer')),
])
self.name = None
self.path_computation_method = None
self.use_cspf = None
self.cspf_tiebreaker = None
self.path_computation_server = None
self.explicit_path_name = None
self.preference = None
self.setup_priority = None
self.hold_priority = None
self.retry_timer = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.Config, ['name', 'path_computation_method', 'use_cspf', 'cspf_tiebreaker', 'path_computation_server', 'explicit_path_name', 'preference', 'setup_priority', 'hold_priority', 'retry_timer'], name, value)
class State(Entity):
"""
State parameters related to paths
.. attribute:: name
Path name
**type**\: str
.. attribute:: path_computation_method
The method used for computing the path, either locally computed, queried from a server or not computed at all (explicitly configured)
**type**\: :py:class:`PathComputationMethod <ydk.models.openconfig.openconfig_mpls.PathComputationMethod>`
**default value**\: locally-computed
.. attribute:: use_cspf
Flag to enable CSPF for locally computed LSPs
**type**\: bool
.. attribute:: cspf_tiebreaker
Determine the tie\-breaking method to choose between equally desirable paths during CSFP computation
**type**\: :py:class:`CspfTieBreaking <ydk.models.openconfig.openconfig_mpls.CspfTieBreaking>`
.. attribute:: path_computation_server
Address of the external path computation server
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: explicit_path_name
reference to a defined path
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.Config>`
.. attribute:: preference
Specifies a preference for this path. The lower the number higher the preference
**type**\: int
**range:** 1..255
.. attribute:: setup_priority
RSVP\-TE preemption priority during LSP setup, lower is higher priority; default 7 indicates that LSP will not preempt established LSPs during setup
**type**\: int
**range:** 0..7
**default value**\: 7
.. attribute:: hold_priority
preemption priority once the LSP is established, lower is higher priority; default 0 indicates other LSPs will not preempt the LSPs once established
**type**\: int
**range:** 0..7
**default value**\: 0
.. attribute:: retry_timer
sets the time between attempts to establish the LSP
**type**\: int
**range:** 1..600
**units**\: seconds
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "p2p-primary-paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
('path_computation_method', YLeaf(YType.identityref, 'path-computation-method')),
('use_cspf', YLeaf(YType.boolean, 'use-cspf')),
('cspf_tiebreaker', YLeaf(YType.enumeration, 'cspf-tiebreaker')),
('path_computation_server', YLeaf(YType.str, 'path-computation-server')),
('explicit_path_name', YLeaf(YType.str, 'explicit-path-name')),
('preference', YLeaf(YType.uint8, 'preference')),
('setup_priority', YLeaf(YType.uint8, 'setup-priority')),
('hold_priority', YLeaf(YType.uint8, 'hold-priority')),
('retry_timer', YLeaf(YType.uint16, 'retry-timer')),
])
self.name = None
self.path_computation_method = None
self.use_cspf = None
self.cspf_tiebreaker = None
self.path_computation_server = None
self.explicit_path_name = None
self.preference = None
self.setup_priority = None
self.hold_priority = None
self.retry_timer = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.State, ['name', 'path_computation_method', 'use_cspf', 'cspf_tiebreaker', 'path_computation_server', 'explicit_path_name', 'preference', 'setup_priority', 'hold_priority', 'retry_timer'], name, value)
class CandidateSecondaryPaths(Entity):
"""
The set of candidate secondary paths which may be used
for this primary path. When secondary paths are specified
in the list the path of the secondary LSP in use must be
restricted to those path options referenced. The
priority of the secondary paths is specified within the
list. Higher priority values are less preferred \- that is
to say that a path with priority 0 is the most preferred
path. In the case that the list is empty, any secondary
path option may be utilised when the current primary path
is in use.
.. attribute:: candidate_secondary_path
List of secondary paths which may be utilised when the current primary path is in use
**type**\: list of :py:class:`CandidateSecondaryPath <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths.CandidateSecondaryPath>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths, self).__init__()
self.yang_name = "candidate-secondary-paths"
self.yang_parent_name = "p2p-primary-paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("candidate-secondary-path", ("candidate_secondary_path", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths.CandidateSecondaryPath))])
self._leafs = OrderedDict()
self.candidate_secondary_path = YList(self)
self._segment_path = lambda: "candidate-secondary-paths"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths, [], name, value)
class CandidateSecondaryPath(Entity):
"""
List of secondary paths which may be utilised when the
current primary path is in use
.. attribute:: secondary_path (key)
A reference to the secondary path option reference which acts as the key of the candidate\-secondary\-path list
**type**\: str
**refers to**\: :py:class:`secondary_path <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths.CandidateSecondaryPath.Config>`
.. attribute:: config
Configuration parameters relating to the candidate secondary path
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths.CandidateSecondaryPath.Config>`
.. attribute:: state
Operational state parameters relating to the candidate secondary path
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths.CandidateSecondaryPath.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths.CandidateSecondaryPath, self).__init__()
self.yang_name = "candidate-secondary-path"
self.yang_parent_name = "candidate-secondary-paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['secondary_path']
self._child_container_classes = OrderedDict([("config", ("config", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths.CandidateSecondaryPath.Config)), ("state", ("state", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths.CandidateSecondaryPath.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('secondary_path', YLeaf(YType.str, 'secondary-path')),
])
self.secondary_path = None
self.config = Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths.CandidateSecondaryPath.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths.CandidateSecondaryPath.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "candidate-secondary-path" + "[secondary-path='" + str(self.secondary_path) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths.CandidateSecondaryPath, ['secondary_path'], name, value)
class Config(Entity):
"""
Configuration parameters relating to the candidate
secondary path
.. attribute:: secondary_path
A reference to the secondary path that should be utilised when the containing primary path option is in use
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.Config>`
.. attribute:: priority
The priority of the specified secondary path option. Higher priority options are less preferable \- such that a secondary path reference with a priority of 0 is the most preferred
**type**\: int
**range:** 0..65535
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths.CandidateSecondaryPath.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "candidate-secondary-path"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('secondary_path', YLeaf(YType.str, 'secondary-path')),
('priority', YLeaf(YType.uint16, 'priority')),
])
self.secondary_path = None
self.priority = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths.CandidateSecondaryPath.Config, ['secondary_path', 'priority'], name, value)
class State(Entity):
"""
Operational state parameters relating to the candidate
secondary path
.. attribute:: secondary_path
A reference to the secondary path that should be utilised when the containing primary path option is in use
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.Config>`
.. attribute:: priority
The priority of the specified secondary path option. Higher priority options are less preferable \- such that a secondary path reference with a priority of 0 is the most preferred
**type**\: int
**range:** 0..65535
.. attribute:: active
Indicates the current active path option that has been selected of the candidate secondary paths
**type**\: bool
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths.CandidateSecondaryPath.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "candidate-secondary-path"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('secondary_path', YLeaf(YType.str, 'secondary-path')),
('priority', YLeaf(YType.uint16, 'priority')),
('active', YLeaf(YType.boolean, 'active')),
])
self.secondary_path = None
self.priority = None
self.active = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.CandidateSecondaryPaths.CandidateSecondaryPath.State, ['secondary_path', 'priority', 'active'], name, value)
class AdminGroups(Entity):
"""
Top\-level container for include/exclude constraints for
link affinities
.. attribute:: config
Configuration data
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.AdminGroups.Config>`
.. attribute:: state
Operational state data
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.AdminGroups.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.AdminGroups, self).__init__()
self.yang_name = "admin-groups"
self.yang_parent_name = "p2p-primary-paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.AdminGroups.Config)), ("state", ("state", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.AdminGroups.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.AdminGroups.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.AdminGroups.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "admin-groups"
class Config(Entity):
"""
Configuration data
.. attribute:: exclude_group
list of references to named admin\-groups to exclude in path calculation
**type**\: list of str
**refers to**\: :py:class:`admin_group_name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup>`
.. attribute:: include_all_group
list of references to named admin\-groups of which all must be included
**type**\: list of str
**refers to**\: :py:class:`admin_group_name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup>`
.. attribute:: include_any_group
list of references to named admin\-groups of which one must be included
**type**\: list of str
**refers to**\: :py:class:`admin_group_name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.AdminGroups.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "admin-groups"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('exclude_group', YLeafList(YType.str, 'exclude-group')),
('include_all_group', YLeafList(YType.str, 'include-all-group')),
('include_any_group', YLeafList(YType.str, 'include-any-group')),
])
self.exclude_group = []
self.include_all_group = []
self.include_any_group = []
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.AdminGroups.Config, ['exclude_group', 'include_all_group', 'include_any_group'], name, value)
class State(Entity):
"""
Operational state data
.. attribute:: exclude_group
list of references to named admin\-groups to exclude in path calculation
**type**\: list of str
**refers to**\: :py:class:`admin_group_name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup>`
.. attribute:: include_all_group
list of references to named admin\-groups of which all must be included
**type**\: list of str
**refers to**\: :py:class:`admin_group_name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup>`
.. attribute:: include_any_group
list of references to named admin\-groups of which one must be included
**type**\: list of str
**refers to**\: :py:class:`admin_group_name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.AdminGroups.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "admin-groups"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('exclude_group', YLeafList(YType.str, 'exclude-group')),
('include_all_group', YLeafList(YType.str, 'include-all-group')),
('include_any_group', YLeafList(YType.str, 'include-any-group')),
])
self.exclude_group = []
self.include_all_group = []
self.include_any_group = []
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PPrimaryPaths.AdminGroups.State, ['exclude_group', 'include_all_group', 'include_any_group'], name, value)
class P2PSecondaryPaths(Entity):
"""
List of p2p primary paths for a tunnel
.. attribute:: name (key)
Path name
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.Config>`
.. attribute:: config
Configuration parameters related to paths
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.Config>`
.. attribute:: state
State parameters related to paths
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.State>`
.. attribute:: admin_groups
Top\-level container for include/exclude constraints for link affinities
**type**\: :py:class:`AdminGroups <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.AdminGroups>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths, self).__init__()
self.yang_name = "p2p-secondary-paths"
self.yang_parent_name = "p2p-tunnel-attributes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name']
self._child_container_classes = OrderedDict([("config", ("config", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.Config)), ("state", ("state", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.State)), ("admin-groups", ("admin_groups", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.AdminGroups))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
])
self.name = None
self.config = Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self.admin_groups = Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.AdminGroups()
self.admin_groups.parent = self
self._children_name_map["admin_groups"] = "admin-groups"
self._children_yang_names.add("admin-groups")
self._segment_path = lambda: "p2p-secondary-paths" + "[name='" + str(self.name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths, ['name'], name, value)
class Config(Entity):
"""
Configuration parameters related to paths
.. attribute:: name
Path name
**type**\: str
.. attribute:: path_computation_method
The method used for computing the path, either locally computed, queried from a server or not computed at all (explicitly configured)
**type**\: :py:class:`PathComputationMethod <ydk.models.openconfig.openconfig_mpls.PathComputationMethod>`
**default value**\: locally-computed
.. attribute:: use_cspf
Flag to enable CSPF for locally computed LSPs
**type**\: bool
.. attribute:: cspf_tiebreaker
Determine the tie\-breaking method to choose between equally desirable paths during CSFP computation
**type**\: :py:class:`CspfTieBreaking <ydk.models.openconfig.openconfig_mpls.CspfTieBreaking>`
.. attribute:: path_computation_server
Address of the external path computation server
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: explicit_path_name
reference to a defined path
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.Config>`
.. attribute:: preference
Specifies a preference for this path. The lower the number higher the preference
**type**\: int
**range:** 1..255
.. attribute:: setup_priority
RSVP\-TE preemption priority during LSP setup, lower is higher priority; default 7 indicates that LSP will not preempt established LSPs during setup
**type**\: int
**range:** 0..7
**default value**\: 7
.. attribute:: hold_priority
preemption priority once the LSP is established, lower is higher priority; default 0 indicates other LSPs will not preempt the LSPs once established
**type**\: int
**range:** 0..7
**default value**\: 0
.. attribute:: retry_timer
sets the time between attempts to establish the LSP
**type**\: int
**range:** 1..600
**units**\: seconds
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "p2p-secondary-paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
('path_computation_method', YLeaf(YType.identityref, 'path-computation-method')),
('use_cspf', YLeaf(YType.boolean, 'use-cspf')),
('cspf_tiebreaker', YLeaf(YType.enumeration, 'cspf-tiebreaker')),
('path_computation_server', YLeaf(YType.str, 'path-computation-server')),
('explicit_path_name', YLeaf(YType.str, 'explicit-path-name')),
('preference', YLeaf(YType.uint8, 'preference')),
('setup_priority', YLeaf(YType.uint8, 'setup-priority')),
('hold_priority', YLeaf(YType.uint8, 'hold-priority')),
('retry_timer', YLeaf(YType.uint16, 'retry-timer')),
])
self.name = None
self.path_computation_method = None
self.use_cspf = None
self.cspf_tiebreaker = None
self.path_computation_server = None
self.explicit_path_name = None
self.preference = None
self.setup_priority = None
self.hold_priority = None
self.retry_timer = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.Config, ['name', 'path_computation_method', 'use_cspf', 'cspf_tiebreaker', 'path_computation_server', 'explicit_path_name', 'preference', 'setup_priority', 'hold_priority', 'retry_timer'], name, value)
class State(Entity):
"""
State parameters related to paths
.. attribute:: name
Path name
**type**\: str
.. attribute:: path_computation_method
The method used for computing the path, either locally computed, queried from a server or not computed at all (explicitly configured)
**type**\: :py:class:`PathComputationMethod <ydk.models.openconfig.openconfig_mpls.PathComputationMethod>`
**default value**\: locally-computed
.. attribute:: use_cspf
Flag to enable CSPF for locally computed LSPs
**type**\: bool
.. attribute:: cspf_tiebreaker
Determine the tie\-breaking method to choose between equally desirable paths during CSFP computation
**type**\: :py:class:`CspfTieBreaking <ydk.models.openconfig.openconfig_mpls.CspfTieBreaking>`
.. attribute:: path_computation_server
Address of the external path computation server
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: explicit_path_name
reference to a defined path
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.NamedExplicitPaths.Config>`
.. attribute:: preference
Specifies a preference for this path. The lower the number higher the preference
**type**\: int
**range:** 1..255
.. attribute:: setup_priority
RSVP\-TE preemption priority during LSP setup, lower is higher priority; default 7 indicates that LSP will not preempt established LSPs during setup
**type**\: int
**range:** 0..7
**default value**\: 7
.. attribute:: hold_priority
preemption priority once the LSP is established, lower is higher priority; default 0 indicates other LSPs will not preempt the LSPs once established
**type**\: int
**range:** 0..7
**default value**\: 0
.. attribute:: retry_timer
sets the time between attempts to establish the LSP
**type**\: int
**range:** 1..600
**units**\: seconds
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "p2p-secondary-paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
('path_computation_method', YLeaf(YType.identityref, 'path-computation-method')),
('use_cspf', YLeaf(YType.boolean, 'use-cspf')),
('cspf_tiebreaker', YLeaf(YType.enumeration, 'cspf-tiebreaker')),
('path_computation_server', YLeaf(YType.str, 'path-computation-server')),
('explicit_path_name', YLeaf(YType.str, 'explicit-path-name')),
('preference', YLeaf(YType.uint8, 'preference')),
('setup_priority', YLeaf(YType.uint8, 'setup-priority')),
('hold_priority', YLeaf(YType.uint8, 'hold-priority')),
('retry_timer', YLeaf(YType.uint16, 'retry-timer')),
])
self.name = None
self.path_computation_method = None
self.use_cspf = None
self.cspf_tiebreaker = None
self.path_computation_server = None
self.explicit_path_name = None
self.preference = None
self.setup_priority = None
self.hold_priority = None
self.retry_timer = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.State, ['name', 'path_computation_method', 'use_cspf', 'cspf_tiebreaker', 'path_computation_server', 'explicit_path_name', 'preference', 'setup_priority', 'hold_priority', 'retry_timer'], name, value)
class AdminGroups(Entity):
"""
Top\-level container for include/exclude constraints for
link affinities
.. attribute:: config
Configuration data
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.AdminGroups.Config>`
.. attribute:: state
Operational state data
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.AdminGroups.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.AdminGroups, self).__init__()
self.yang_name = "admin-groups"
self.yang_parent_name = "p2p-secondary-paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.AdminGroups.Config)), ("state", ("state", Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.AdminGroups.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.AdminGroups.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.AdminGroups.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "admin-groups"
class Config(Entity):
"""
Configuration data
.. attribute:: exclude_group
list of references to named admin\-groups to exclude in path calculation
**type**\: list of str
**refers to**\: :py:class:`admin_group_name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup>`
.. attribute:: include_all_group
list of references to named admin\-groups of which all must be included
**type**\: list of str
**refers to**\: :py:class:`admin_group_name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup>`
.. attribute:: include_any_group
list of references to named admin\-groups of which one must be included
**type**\: list of str
**refers to**\: :py:class:`admin_group_name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.AdminGroups.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "admin-groups"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('exclude_group', YLeafList(YType.str, 'exclude-group')),
('include_all_group', YLeafList(YType.str, 'include-all-group')),
('include_any_group', YLeafList(YType.str, 'include-any-group')),
])
self.exclude_group = []
self.include_all_group = []
self.include_any_group = []
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.AdminGroups.Config, ['exclude_group', 'include_all_group', 'include_any_group'], name, value)
class State(Entity):
"""
Operational state data
.. attribute:: exclude_group
list of references to named admin\-groups to exclude in path calculation
**type**\: list of str
**refers to**\: :py:class:`admin_group_name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup>`
.. attribute:: include_all_group
list of references to named admin\-groups of which all must be included
**type**\: list of str
**refers to**\: :py:class:`admin_group_name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup>`
.. attribute:: include_any_group
list of references to named admin\-groups of which one must be included
**type**\: list of str
**refers to**\: :py:class:`admin_group_name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.AdminGroups.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "admin-groups"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('exclude_group', YLeafList(YType.str, 'exclude-group')),
('include_all_group', YLeafList(YType.str, 'include-all-group')),
('include_any_group', YLeafList(YType.str, 'include-any-group')),
])
self.exclude_group = []
self.include_all_group = []
self.include_any_group = []
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.ConstrainedPath.Tunnel.P2PTunnelAttributes.P2PSecondaryPaths.AdminGroups.State, ['exclude_group', 'include_all_group', 'include_any_group'], name, value)
class UnconstrainedPath(Entity):
"""
LSPs that use the IGP\-determined path, i.e., non
traffic\-engineered, or non constrained\-path
.. attribute:: path_setup_protocol
select and configure the signaling method for the LSP
**type**\: :py:class:`PathSetupProtocol <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.UnconstrainedPath, self).__init__()
self.yang_name = "unconstrained-path"
self.yang_parent_name = "lsps"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("path-setup-protocol", ("path_setup_protocol", Mpls.Lsps.UnconstrainedPath.PathSetupProtocol))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.path_setup_protocol = Mpls.Lsps.UnconstrainedPath.PathSetupProtocol()
self.path_setup_protocol.parent = self
self._children_name_map["path_setup_protocol"] = "path-setup-protocol"
self._children_yang_names.add("path-setup-protocol")
self._segment_path = lambda: "unconstrained-path"
self._absolute_path = lambda: "openconfig-mpls:mpls/lsps/%s" % self._segment_path()
class PathSetupProtocol(Entity):
"""
select and configure the signaling method for
the LSP
.. attribute:: ldp
LDP signaling setup for IGP\-congruent LSPs
**type**\: :py:class:`Ldp <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp>`
**presence node**\: True
.. attribute:: segment_routing
segment routing signaling extensions for IGP\-confgruent LSPs
**type**\: :py:class:`SegmentRouting <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting>`
**presence node**\: True
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol, self).__init__()
self.yang_name = "path-setup-protocol"
self.yang_parent_name = "unconstrained-path"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("ldp", ("ldp", Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp)), ("segment-routing", ("segment_routing", Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.ldp = None
self._children_name_map["ldp"] = "ldp"
self._children_yang_names.add("ldp")
self.segment_routing = None
self._children_name_map["segment_routing"] = "segment-routing"
self._children_yang_names.add("segment-routing")
self._segment_path = lambda: "path-setup-protocol"
self._absolute_path = lambda: "openconfig-mpls:mpls/lsps/unconstrained-path/%s" % self._segment_path()
class Ldp(Entity):
"""
LDP signaling setup for IGP\-congruent LSPs
.. attribute:: tunnel
contains configuration stanzas for different LSP tunnel types (P2P, P2MP, etc.)
**type**\: :py:class:`Tunnel <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel>`
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp, self).__init__()
self.yang_name = "ldp"
self.yang_parent_name = "path-setup-protocol"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("tunnel", ("tunnel", Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel))])
self._child_list_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict()
self.tunnel = Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel()
self.tunnel.parent = self
self._children_name_map["tunnel"] = "tunnel"
self._children_yang_names.add("tunnel")
self._segment_path = lambda: "ldp"
self._absolute_path = lambda: "openconfig-mpls:mpls/lsps/unconstrained-path/path-setup-protocol/%s" % self._segment_path()
class Tunnel(Entity):
"""
contains configuration stanzas for different LSP
tunnel types (P2P, P2MP, etc.)
.. attribute:: tunnel_type
specifies the type of LSP, e.g., P2P or P2MP
**type**\: :py:class:`TunnelType_ <ydk.models.openconfig.openconfig_mpls_types.TunnelType_>`
.. attribute:: ldp_type
specify basic or targeted LDP LSP
**type**\: :py:class:`LdpType <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel.LdpType>`
.. attribute:: p2p_lsp
properties of point\-to\-point tunnels
**type**\: :py:class:`P2PLsp <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel.P2PLsp>`
.. attribute:: p2mp_lsp
properties of point\-to\-multipoint tunnels
**type**\: :py:class:`P2MpLsp <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel.P2MpLsp>`
.. attribute:: mp2mp_lsp
properties of multipoint\-to\-multipoint tunnels
**type**\: :py:class:`Mp2MpLsp <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel.Mp2MpLsp>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel, self).__init__()
self.yang_name = "tunnel"
self.yang_parent_name = "ldp"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("p2p-lsp", ("p2p_lsp", Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel.P2PLsp)), ("p2mp-lsp", ("p2mp_lsp", Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel.P2MpLsp)), ("mp2mp-lsp", ("mp2mp_lsp", Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel.Mp2MpLsp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('tunnel_type', YLeaf(YType.enumeration, 'tunnel-type')),
('ldp_type', YLeaf(YType.enumeration, 'ldp-type')),
])
self.tunnel_type = None
self.ldp_type = None
self.p2p_lsp = Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel.P2PLsp()
self.p2p_lsp.parent = self
self._children_name_map["p2p_lsp"] = "p2p-lsp"
self._children_yang_names.add("p2p-lsp")
self.p2mp_lsp = Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel.P2MpLsp()
self.p2mp_lsp.parent = self
self._children_name_map["p2mp_lsp"] = "p2mp-lsp"
self._children_yang_names.add("p2mp-lsp")
self.mp2mp_lsp = Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel.Mp2MpLsp()
self.mp2mp_lsp.parent = self
self._children_name_map["mp2mp_lsp"] = "mp2mp-lsp"
self._children_yang_names.add("mp2mp-lsp")
self._segment_path = lambda: "tunnel"
self._absolute_path = lambda: "openconfig-mpls:mpls/lsps/unconstrained-path/path-setup-protocol/ldp/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel, ['tunnel_type', 'ldp_type'], name, value)
class LdpType(Enum):
"""
LdpType (Enum Class)
specify basic or targeted LDP LSP
.. data:: BASIC = 0
basic hop-by-hop LSP
.. data:: TARGETED = 1
tLDP LSP
"""
BASIC = Enum.YLeaf(0, "BASIC")
TARGETED = Enum.YLeaf(1, "TARGETED")
class P2PLsp(Entity):
"""
properties of point\-to\-point tunnels
.. attribute:: fec_address
Address prefix for packets sharing the same forwarding equivalence class for the IGP\-based LSP
**type**\: union of the below types:
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])/(([0\-9])\|([1\-2][0\-9])\|(3[0\-2]))
**type**\: list of str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(/(([0\-9])\|([0\-9]{2})\|(1[0\-1][0\-9])\|(12[0\-8])))
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel.P2PLsp, self).__init__()
self.yang_name = "p2p-lsp"
self.yang_parent_name = "tunnel"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('fec_address', YLeafList(YType.str, 'fec-address')),
])
self.fec_address = []
self._segment_path = lambda: "p2p-lsp"
self._absolute_path = lambda: "openconfig-mpls:mpls/lsps/unconstrained-path/path-setup-protocol/ldp/tunnel/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel.P2PLsp, ['fec_address'], name, value)
class P2MpLsp(Entity):
"""
properties of point\-to\-multipoint tunnels
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel.P2MpLsp, self).__init__()
self.yang_name = "p2mp-lsp"
self.yang_parent_name = "tunnel"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self._segment_path = lambda: "p2mp-lsp"
self._absolute_path = lambda: "openconfig-mpls:mpls/lsps/unconstrained-path/path-setup-protocol/ldp/tunnel/%s" % self._segment_path()
class Mp2MpLsp(Entity):
"""
properties of multipoint\-to\-multipoint tunnels
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.Ldp.Tunnel.Mp2MpLsp, self).__init__()
self.yang_name = "mp2mp-lsp"
self.yang_parent_name = "tunnel"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self._segment_path = lambda: "mp2mp-lsp"
self._absolute_path = lambda: "openconfig-mpls:mpls/lsps/unconstrained-path/path-setup-protocol/ldp/tunnel/%s" % self._segment_path()
class SegmentRouting(Entity):
"""
segment routing signaling extensions for
IGP\-confgruent LSPs
.. attribute:: tunnel
contains configuration stanzas for different LSP tunnel types (P2P, P2MP, etc.)
**type**\: :py:class:`Tunnel <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel>`
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting, self).__init__()
self.yang_name = "segment-routing"
self.yang_parent_name = "path-setup-protocol"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("tunnel", ("tunnel", Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel))])
self._child_list_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict()
self.tunnel = Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel()
self.tunnel.parent = self
self._children_name_map["tunnel"] = "tunnel"
self._children_yang_names.add("tunnel")
self._segment_path = lambda: "segment-routing"
self._absolute_path = lambda: "openconfig-mpls:mpls/lsps/unconstrained-path/path-setup-protocol/%s" % self._segment_path()
class Tunnel(Entity):
"""
contains configuration stanzas for different LSP
tunnel types (P2P, P2MP, etc.)
.. attribute:: tunnel_type
specifies the type of LSP, e.g., P2P or P2MP
**type**\: :py:class:`TunnelType_ <ydk.models.openconfig.openconfig_mpls_types.TunnelType_>`
.. attribute:: p2p_lsp
properties of point\-to\-point tunnels
**type**\: :py:class:`P2PLsp <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel, self).__init__()
self.yang_name = "tunnel"
self.yang_parent_name = "segment-routing"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("p2p-lsp", ("p2p_lsp", Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('tunnel_type', YLeaf(YType.enumeration, 'tunnel-type')),
])
self.tunnel_type = None
self.p2p_lsp = Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp()
self.p2p_lsp.parent = self
self._children_name_map["p2p_lsp"] = "p2p-lsp"
self._children_yang_names.add("p2p-lsp")
self._segment_path = lambda: "tunnel"
self._absolute_path = lambda: "openconfig-mpls:mpls/lsps/unconstrained-path/path-setup-protocol/segment-routing/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel, ['tunnel_type'], name, value)
class P2PLsp(Entity):
"""
properties of point\-to\-point tunnels
.. attribute:: fec
List of FECs that are to be originated as SR LSPs
**type**\: list of :py:class:`Fec <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp, self).__init__()
self.yang_name = "p2p-lsp"
self.yang_parent_name = "tunnel"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("fec", ("fec", Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec))])
self._leafs = OrderedDict()
self.fec = YList(self)
self._segment_path = lambda: "p2p-lsp"
self._absolute_path = lambda: "openconfig-mpls:mpls/lsps/unconstrained-path/path-setup-protocol/segment-routing/tunnel/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp, [], name, value)
class Fec(Entity):
"""
List of FECs that are to be originated as SR LSPs
.. attribute:: fec_address (key)
FEC that is to be advertised as part of the Prefix\-SID
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])/(([0\-9])\|([1\-2][0\-9])\|(3[0\-2]))
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(/(([0\-9])\|([0\-9]{2})\|(1[0\-1][0\-9])\|(12[0\-8])))
.. attribute:: config
Configuration parameters relating to the FEC to be advertised by SR
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.Config>`
.. attribute:: state
Operational state relating to a FEC advertised by SR
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.State>`
.. attribute:: prefix_sid
Parameters relating to the Prefix\-SID used for the originated FEC
**type**\: :py:class:`PrefixSid <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.PrefixSid>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec, self).__init__()
self.yang_name = "fec"
self.yang_parent_name = "p2p-lsp"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['fec_address']
self._child_container_classes = OrderedDict([("config", ("config", Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.Config)), ("state", ("state", Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.State)), ("prefix-sid", ("prefix_sid", Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.PrefixSid))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('fec_address', YLeaf(YType.str, 'fec-address')),
])
self.fec_address = None
self.config = Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self.prefix_sid = Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.PrefixSid()
self.prefix_sid.parent = self
self._children_name_map["prefix_sid"] = "prefix-sid"
self._children_yang_names.add("prefix-sid")
self._segment_path = lambda: "fec" + "[fec-address='" + str(self.fec_address) + "']"
self._absolute_path = lambda: "openconfig-mpls:mpls/lsps/unconstrained-path/path-setup-protocol/segment-routing/tunnel/p2p-lsp/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec, ['fec_address'], name, value)
class Config(Entity):
"""
Configuration parameters relating to the FEC to be
advertised by SR
.. attribute:: fec_address
FEC that is to be advertised as part of the Prefix\-SID
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])/(([0\-9])\|([1\-2][0\-9])\|(3[0\-2]))
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(/(([0\-9])\|([0\-9]{2})\|(1[0\-1][0\-9])\|(12[0\-8])))
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "fec"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('fec_address', YLeaf(YType.str, 'fec-address')),
])
self.fec_address = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.Config, ['fec_address'], name, value)
class State(Entity):
"""
Operational state relating to a FEC advertised by SR
.. attribute:: fec_address
FEC that is to be advertised as part of the Prefix\-SID
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])/(([0\-9])\|([1\-2][0\-9])\|(3[0\-2]))
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(/(([0\-9])\|([0\-9]{2})\|(1[0\-1][0\-9])\|(12[0\-8])))
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "fec"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('fec_address', YLeaf(YType.str, 'fec-address')),
])
self.fec_address = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.State, ['fec_address'], name, value)
class PrefixSid(Entity):
"""
Parameters relating to the Prefix\-SID
used for the originated FEC
.. attribute:: config
Configuration parameters relating to the Prefix\-SID used for the originated FEC
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.PrefixSid.Config>`
.. attribute:: state
Operational state parameters relating to the Prefix\-SID used for the originated FEC
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.PrefixSid.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.PrefixSid, self).__init__()
self.yang_name = "prefix-sid"
self.yang_parent_name = "fec"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("config", ("config", Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.PrefixSid.Config)), ("state", ("state", Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.PrefixSid.State))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.config = Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.PrefixSid.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.state = Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.PrefixSid.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
self._segment_path = lambda: "prefix-sid"
class Config(Entity):
"""
Configuration parameters relating to the Prefix\-SID
used for the originated FEC
.. attribute:: type
Specifies how the value of the Prefix\-SID should be interpreted \- whether as an offset to the SRGB, or as an absolute value
**type**\: :py:class:`Type <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.PrefixSid.Config.Type>`
**default value**\: INDEX
.. attribute:: node_flag
Specifies that the Prefix\-SID is to be treated as a Node\-SID by setting the N\-flag in the advertised Prefix\-SID TLV in the IGP
**type**\: bool
.. attribute:: last_hop_behavior
Configuration relating to the LFIB actions for the Prefix\-SID to be used by the penultimate\-hop
**type**\: :py:class:`LastHopBehavior <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.PrefixSid.Config.LastHopBehavior>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.PrefixSid.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "prefix-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('type', YLeaf(YType.enumeration, 'type')),
('node_flag', YLeaf(YType.boolean, 'node-flag')),
('last_hop_behavior', YLeaf(YType.enumeration, 'last-hop-behavior')),
])
self.type = None
self.node_flag = None
self.last_hop_behavior = None
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.PrefixSid.Config, ['type', 'node_flag', 'last_hop_behavior'], name, value)
class LastHopBehavior(Enum):
"""
LastHopBehavior (Enum Class)
Configuration relating to the LFIB actions for the
Prefix\-SID to be used by the penultimate\-hop
.. data:: EXPLICIT_NULL = 0
Specifies that the explicit null label is to be used
when the penultimate hop forwards a labelled packet to
this Prefix-SID
.. data:: UNCHANGED = 1
Specicies that the Prefix-SID's label value is to be
left in place when the penultimate hop forwards to this
Prefix-SID
.. data:: PHP = 2
Specicies that the penultimate hop should pop the
Prefix-SID label before forwarding to the eLER
"""
EXPLICIT_NULL = Enum.YLeaf(0, "EXPLICIT-NULL")
UNCHANGED = Enum.YLeaf(1, "UNCHANGED")
PHP = Enum.YLeaf(2, "PHP")
class Type(Enum):
"""
Type (Enum Class)
Specifies how the value of the Prefix\-SID should be
interpreted \- whether as an offset to the SRGB, or as an
absolute value
.. data:: INDEX = 0
Set when the value of the prefix SID should be specified
as an off-set from the SRGB's zero-value. When multiple
SRGBs are specified, the zero-value is the minimum
of their lower bounds
.. data:: ABSOLUTE = 1
Set when the value of a prefix SID is specified as the
absolute value within an SRGB. It is an error to specify
an absolute value outside of a specified SRGB
"""
INDEX = Enum.YLeaf(0, "INDEX")
ABSOLUTE = Enum.YLeaf(1, "ABSOLUTE")
class State(Entity):
"""
Operational state parameters relating to the
Prefix\-SID used for the originated FEC
.. attribute:: type
Specifies how the value of the Prefix\-SID should be interpreted \- whether as an offset to the SRGB, or as an absolute value
**type**\: :py:class:`Type <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.PrefixSid.State.Type>`
**default value**\: INDEX
.. attribute:: node_flag
Specifies that the Prefix\-SID is to be treated as a Node\-SID by setting the N\-flag in the advertised Prefix\-SID TLV in the IGP
**type**\: bool
.. attribute:: last_hop_behavior
Configuration relating to the LFIB actions for the Prefix\-SID to be used by the penultimate\-hop
**type**\: :py:class:`LastHopBehavior <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.PrefixSid.State.LastHopBehavior>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.PrefixSid.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "prefix-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('type', YLeaf(YType.enumeration, 'type')),
('node_flag', YLeaf(YType.boolean, 'node-flag')),
('last_hop_behavior', YLeaf(YType.enumeration, 'last-hop-behavior')),
])
self.type = None
self.node_flag = None
self.last_hop_behavior = None
self._segment_path = lambda: "state"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.UnconstrainedPath.PathSetupProtocol.SegmentRouting.Tunnel.P2PLsp.Fec.PrefixSid.State, ['type', 'node_flag', 'last_hop_behavior'], name, value)
class LastHopBehavior(Enum):
"""
LastHopBehavior (Enum Class)
Configuration relating to the LFIB actions for the
Prefix\-SID to be used by the penultimate\-hop
.. data:: EXPLICIT_NULL = 0
Specifies that the explicit null label is to be used
when the penultimate hop forwards a labelled packet to
this Prefix-SID
.. data:: UNCHANGED = 1
Specicies that the Prefix-SID's label value is to be
left in place when the penultimate hop forwards to this
Prefix-SID
.. data:: PHP = 2
Specicies that the penultimate hop should pop the
Prefix-SID label before forwarding to the eLER
"""
EXPLICIT_NULL = Enum.YLeaf(0, "EXPLICIT-NULL")
UNCHANGED = Enum.YLeaf(1, "UNCHANGED")
PHP = Enum.YLeaf(2, "PHP")
class Type(Enum):
"""
Type (Enum Class)
Specifies how the value of the Prefix\-SID should be
interpreted \- whether as an offset to the SRGB, or as an
absolute value
.. data:: INDEX = 0
Set when the value of the prefix SID should be specified
as an off-set from the SRGB's zero-value. When multiple
SRGBs are specified, the zero-value is the minimum
of their lower bounds
.. data:: ABSOLUTE = 1
Set when the value of a prefix SID is specified as the
absolute value within an SRGB. It is an error to specify
an absolute value outside of a specified SRGB
"""
INDEX = Enum.YLeaf(0, "INDEX")
ABSOLUTE = Enum.YLeaf(1, "ABSOLUTE")
class StaticLsps(Entity):
"""
statically configured LSPs, without dynamic
signaling
.. attribute:: label_switched_path
list of defined static LSPs
**type**\: list of :py:class:`LabelSwitchedPath <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.StaticLsps.LabelSwitchedPath>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.StaticLsps, self).__init__()
self.yang_name = "static-lsps"
self.yang_parent_name = "lsps"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("label-switched-path", ("label_switched_path", Mpls.Lsps.StaticLsps.LabelSwitchedPath))])
self._leafs = OrderedDict()
self.label_switched_path = YList(self)
self._segment_path = lambda: "static-lsps"
self._absolute_path = lambda: "openconfig-mpls:mpls/lsps/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.StaticLsps, [], name, value)
class LabelSwitchedPath(Entity):
"""
list of defined static LSPs
.. attribute:: name (key)
name to identify the LSP
**type**\: str
.. attribute:: ingress
Static LSPs for which the router is an ingress node
**type**\: :py:class:`Ingress <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.StaticLsps.LabelSwitchedPath.Ingress>`
.. attribute:: transit
static LSPs for which the router is a transit node
**type**\: :py:class:`Transit <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.StaticLsps.LabelSwitchedPath.Transit>`
.. attribute:: egress
static LSPs for which the router is a egress node
**type**\: :py:class:`Egress <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.StaticLsps.LabelSwitchedPath.Egress>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.StaticLsps.LabelSwitchedPath, self).__init__()
self.yang_name = "label-switched-path"
self.yang_parent_name = "static-lsps"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['name']
self._child_container_classes = OrderedDict([("ingress", ("ingress", Mpls.Lsps.StaticLsps.LabelSwitchedPath.Ingress)), ("transit", ("transit", Mpls.Lsps.StaticLsps.LabelSwitchedPath.Transit)), ("egress", ("egress", Mpls.Lsps.StaticLsps.LabelSwitchedPath.Egress))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
])
self.name = None
self.ingress = Mpls.Lsps.StaticLsps.LabelSwitchedPath.Ingress()
self.ingress.parent = self
self._children_name_map["ingress"] = "ingress"
self._children_yang_names.add("ingress")
self.transit = Mpls.Lsps.StaticLsps.LabelSwitchedPath.Transit()
self.transit.parent = self
self._children_name_map["transit"] = "transit"
self._children_yang_names.add("transit")
self.egress = Mpls.Lsps.StaticLsps.LabelSwitchedPath.Egress()
self.egress.parent = self
self._children_name_map["egress"] = "egress"
self._children_yang_names.add("egress")
self._segment_path = lambda: "label-switched-path" + "[name='" + str(self.name) + "']"
self._absolute_path = lambda: "openconfig-mpls:mpls/lsps/static-lsps/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.StaticLsps.LabelSwitchedPath, ['name'], name, value)
class Ingress(Entity):
"""
Static LSPs for which the router is an
ingress node
.. attribute:: next_hop
next hop IP address for the LSP
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: incoming_label
label value on the incoming packet
**type**\: union of the below types:
**type**\: int
**range:** 16..1048575
**type**\: :py:class:`MplsLabel <ydk.models.openconfig.openconfig_segment_routing.MplsLabel>`
.. attribute:: push_label
label value to push at the current hop for the LSP
**type**\: union of the below types:
**type**\: int
**range:** 16..1048575
**type**\: :py:class:`MplsLabel <ydk.models.openconfig.openconfig_segment_routing.MplsLabel>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.StaticLsps.LabelSwitchedPath.Ingress, self).__init__()
self.yang_name = "ingress"
self.yang_parent_name = "label-switched-path"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('next_hop', YLeaf(YType.str, 'next-hop')),
('incoming_label', YLeaf(YType.str, 'incoming-label')),
('push_label', YLeaf(YType.str, 'push-label')),
])
self.next_hop = None
self.incoming_label = None
self.push_label = None
self._segment_path = lambda: "ingress"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.StaticLsps.LabelSwitchedPath.Ingress, ['next_hop', 'incoming_label', 'push_label'], name, value)
class Transit(Entity):
"""
static LSPs for which the router is a
transit node
.. attribute:: next_hop
next hop IP address for the LSP
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: incoming_label
label value on the incoming packet
**type**\: union of the below types:
**type**\: int
**range:** 16..1048575
**type**\: :py:class:`MplsLabel <ydk.models.openconfig.openconfig_segment_routing.MplsLabel>`
.. attribute:: push_label
label value to push at the current hop for the LSP
**type**\: union of the below types:
**type**\: int
**range:** 16..1048575
**type**\: :py:class:`MplsLabel <ydk.models.openconfig.openconfig_segment_routing.MplsLabel>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.StaticLsps.LabelSwitchedPath.Transit, self).__init__()
self.yang_name = "transit"
self.yang_parent_name = "label-switched-path"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('next_hop', YLeaf(YType.str, 'next-hop')),
('incoming_label', YLeaf(YType.str, 'incoming-label')),
('push_label', YLeaf(YType.str, 'push-label')),
])
self.next_hop = None
self.incoming_label = None
self.push_label = None
self._segment_path = lambda: "transit"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.StaticLsps.LabelSwitchedPath.Transit, ['next_hop', 'incoming_label', 'push_label'], name, value)
class Egress(Entity):
"""
static LSPs for which the router is a
egress node
.. attribute:: next_hop
next hop IP address for the LSP
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: incoming_label
label value on the incoming packet
**type**\: union of the below types:
**type**\: int
**range:** 16..1048575
**type**\: :py:class:`MplsLabel <ydk.models.openconfig.openconfig_segment_routing.MplsLabel>`
.. attribute:: push_label
label value to push at the current hop for the LSP
**type**\: union of the below types:
**type**\: int
**range:** 16..1048575
**type**\: :py:class:`MplsLabel <ydk.models.openconfig.openconfig_segment_routing.MplsLabel>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(Mpls.Lsps.StaticLsps.LabelSwitchedPath.Egress, self).__init__()
self.yang_name = "egress"
self.yang_parent_name = "label-switched-path"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('next_hop', YLeaf(YType.str, 'next-hop')),
('incoming_label', YLeaf(YType.str, 'incoming-label')),
('push_label', YLeaf(YType.str, 'push-label')),
])
self.next_hop = None
self.incoming_label = None
self.push_label = None
self._segment_path = lambda: "egress"
def __setattr__(self, name, value):
self._perform_setattr(Mpls.Lsps.StaticLsps.LabelSwitchedPath.Egress, ['next_hop', 'incoming_label', 'push_label'], name, value)
def clone_ptr(self):
self._top_entity = Mpls()
return self._top_entity
class LocallyComputed(Identity):
"""
indicates a constrained\-path LSP in which the
path is computed by the local LER
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(LocallyComputed, self).__init__("http://openconfig.net/yang/mpls", "openconfig-mpls", "openconfig-mpls:locally-computed")
class ExternallyQueried(Identity):
"""
constrained\-path LSP in which the path is
obtained by querying an external source, such as a PCE server
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(ExternallyQueried, self).__init__("http://openconfig.net/yang/mpls", "openconfig-mpls", "openconfig-mpls:externally-queried")
class ExplicitlyDefined(Identity):
"""
constrained\-path LSP in which the path is
explicitly specified as a collection of strict or/and loose
hops
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
super(ExplicitlyDefined, self).__init__("http://openconfig.net/yang/mpls", "openconfig-mpls", "openconfig-mpls:explicitly-defined")
|
p = 1
r = 1
t = 1
si = (p*r*t)/100
print("simple intrest is ",si) |
"""
Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from typing import Tuple
import torch
def clip_boxes_to_image_(boxes: torch.Tensor, img_shape: Tuple[int]):
"""
Clip boxes to image dimensions inplace
Args:
boxes (Tensor): tensor with boxes [N x (2*dim)] (x_min, y_min, x_max, y_max(, z_min, z_max))
img_shape (Tuple[height, width(, depth)]): size of image
Returns:
Tensor: clipped boxes as tensor
Raises:
ValueError: boxes need to have 4(2D) or 6(3D) components
"""
if boxes.shape[-1] == 4:
return clip_boxes_to_image_2d_(boxes, img_shape)
elif boxes.shape[-1] == 6:
return clip_boxes_to_image_3d_(boxes, img_shape)
else:
raise ValueError(f"Boxes with {boxes.shape[-1]} are not supported.")
def clip_boxes_to_image(boxes: torch.Tensor, img_shape: Tuple[int]):
"""
Clip boxes to image dimensions
Args:
boxes (Tensor): tensor with boxes [N x (2*dim)] (x_min, y_min, x_max, y_max(, z_min, z_max))
img_shape (Tuple[height, width(, depth)]): size of image
Returns:
Tensor: clipped boxes as tensor
Raises:
ValueError: boxes need to have 4(2D) or 6(3D) components
"""
if boxes.shape[-1] == 4:
return clip_boxes_to_image_2d(boxes, img_shape)
elif boxes.shape[-1] == 6:
return clip_boxes_to_image_3d(boxes, img_shape)
else:
raise ValueError(f"Boxes with {boxes.shape[-1]} are not supported.")
def clip_boxes_to_image_2d_(boxes: torch.Tensor, img_shape: Tuple[int, int]):
"""
Clip boxes to image dimensions
Args:
boxes (Tensor): tensor with boxes [N x 4] (x_min, y_min, x_max, y_max)
img_shape (Tuple[x_max, y_max]): size of image
Returns:
Tensor: clipped boxes as tensor
"""
s0, s1 = img_shape
boxes[..., 0::2].clamp_(min=0, max=s0)
boxes[..., 1::2].clamp_(min=0, max=s1)
return boxes
def clip_boxes_to_image_3d_(boxes: torch.Tensor, img_shape: Tuple[int, int, int]):
"""
Clip boxes to image dimensions
Args:
boxes (Tensor): tensor with boxes [N x 6] (x_min, y_min, x_max, y_max, z_min, z_max)
img_shape (Tuple[height, width, depth]): size of image
Returns:
Tensor: clipped boxes as tensor
"""
s0, s1, s2 = img_shape
boxes[..., 0::6].clamp_(min=0, max=s0)
boxes[..., 1::6].clamp_(min=0, max=s1)
boxes[..., 2::6].clamp_(min=0, max=s0)
boxes[..., 3::6].clamp_(min=0, max=s1)
boxes[..., 4::6].clamp_(min=0, max=s2)
boxes[..., 5::6].clamp_(min=0, max=s2)
return boxes
def clip_boxes_to_image_2d(boxes: torch.Tensor, img_shape: Tuple[int, int]):
"""
Clip boxes to image dimensions
Args:
boxes (Tensor): tensor with boxes [N x 4] (x_min, y_min, x_max, y_max)
img_shape (Tuple[x_max, y_max]): size of image
Returns:
Tensor: clipped boxes as tensor
Notes:
Uses float32 internally because clipping of half cpu tensors is not
supported
"""
s0, s1 = img_shape
boxes[..., 0::2] = boxes[..., 0::2].clamp(min=0, max=s0)
boxes[..., 1::2] = boxes[..., 1::2].clamp(min=0, max=s1)
return boxes
def clip_boxes_to_image_3d(boxes: torch.Tensor, img_shape: Tuple[int, int, int]):
"""
Clip boxes to image dimensions
Args:
boxes (Tensor): tensor with boxes [N x 6] (x_min, y_min, x_max, y_max, z_min, z_max)
img_shape (Tuple[height, width, depth]): size of image
Returns:
Tensor: clipped boxes as tensor
Notes:
Uses float32 internally because clipping of half cpu tensors is not
supported
"""
s0, s1, s2 = img_shape
boxes[..., 0::6] = boxes[..., 0::6].clamp(min=0, max=s0)
boxes[..., 1::6] = boxes[..., 1::6].clamp(min=0, max=s1)
boxes[..., 2::6] = boxes[..., 2::6].clamp(min=0, max=s0)
boxes[..., 3::6] = boxes[..., 3::6].clamp(min=0, max=s1)
boxes[..., 4::6] = boxes[..., 4::6].clamp(min=0, max=s2)
boxes[..., 5::6] = boxes[..., 5::6].clamp(min=0, max=s2)
return boxes
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""upgrade
Revision ID: 71f8b4cf1dbf
Revises: 10c9668a816d
Create Date: 2018-01-29 09:09:32.297389
"""
# revision identifiers, used by Alembic.
revision = '71f8b4cf1dbf'
down_revision = '10c9668a816d'
branch_labels = None
depends_on = None
from alembic import op
from sqlalchemy.dialects import mysql
def upgrade():
with op.batch_alter_table('capsule', schema=None) as batch_op:
batch_op.create_unique_constraint('uniq_capsule0uuid', ['uuid'])
batch_op.drop_column('message')
with op.batch_alter_table('container_actions', schema=None) as batch_op:
batch_op.create_foreign_key(
None, 'container', ['container_uuid'], ['uuid'])
with op.batch_alter_table('pci_device', schema=None) as batch_op:
batch_op.create_foreign_key(
None, 'compute_node', ['compute_node_uuid'], ['uuid'])
with op.batch_alter_table('volume_mapping', schema=None) as batch_op:
batch_op.alter_column('container_uuid',
existing_type=mysql.VARCHAR(length=36),
nullable=True)
batch_op.create_foreign_key(
None, 'container', ['container_uuid'], ['uuid'])
|
# derived from https://github.com/danthedeckie/OpenLP-To-ProPresenter5-Converter
import os
import re
from base64 import b64encode
from datetime import datetime
from uuid import uuid1
from utils import (load_scottish_psalter, load_sing_psalms, make_output_folder,
remove_folder, remove_markup, zip_folder)
__re_uni_x = re.compile(r'\\x..') # Unicode \x form
__re_uni_u = re.compile(r'\\u....') # Unicode \u form
DEFAULT_FONT = "Franklin Gothic Book"
def make_uuid():
return uuid1().__str__().upper()
def SuperScRTF(text):
# superscript verse #s at start of stanza:
for ii in re.findall(r'uc0 \d+-*\d*', text):
num = ii.lstrip('uc0 ')
text = text.replace(ii, 'uc0 \\super {' + num + '}\\nosupersub ')
# superscript verse #s in middle of stanza:
for ii in re.findall(r'\n\d+-*\d*', text):
num = re.findall(r'\d+-*\d*', ii)
text = text.replace(ii, '\n\\super {' + num[0] + '}\\nosupersub ')
return text
def underline_slide(text):
text = text.replace('<underline>', r'{\lang2057\ul\ltrch ')
text = text.replace('</underline>', r'}')
return text
def convert_unicode_chars(text):
def conv_char(c):
o = ord(c)
if o < 128:
return c
else:
return rf'\u{o} '
chars = [conv_char(char) for char in text]
return ''.join(chars)
def MakeRTFBlob(text, font_colour, font_size):
slide = '{\\rtf1\\ansi\\ansicpg1252\\cocoartf1038\\cocoasubrtf360{\\fonttbl\\f0\\fswiss\\fcharset0 ' + DEFAULT_FONT + ';}' \
+ '{\\colortbl;\\red' + font_colour[0] + '\\green' + font_colour[1] + '\\blue' + font_colour[2] + ';}' \
+ '\\pard\\tx560\\tx1120\\tx1680\\tx2240\\tx2800\\tx3360\\tx3920\\tx4480\\tx5040\\tx5600\\tx6160\\tx6720\\qc\\pardirnatural' \
+ '\\f0\\fs' + str(font_size * 2) + '\\fsmilli51200 \\cf1 \\expnd0\\expndtw0\\kerning0 \\uc0 ' + \
convert_unicode_chars(text.lstrip("\n")) + '}'
slide = SuperScRTF(slide)
slide = underline_slide(slide)
slide = slide.replace('\n', '\\\n')
return b64encode(slide.encode()).decode()
def SlideBlock(text, screen_size, font_colour, background_colour):
if screen_size[0] == '1080':
font_size = 90
else:
font_size = 72
return '<RVDisplaySlide backgroundColor="' + \
" ".join(background_colour) + \
'" enabled="1" highlightColor="0 0 0 0" hotKey="" label="" notes="" slideType="1" sort_index="0" UUID="' + \
make_uuid() + \
'" drawingBackgroundColor="1" chordChartPath="" serialization-array-index="0"><cues containerClass="NSMutableArray"></cues><displayElements containerClass="NSMutableArray"><RVTextElement displayDelay="0" displayName="Default" locked="0" persistent="0" typeID="0" fromTemplate="0" bezelRadius="0" drawingFill="0" drawingShadow="0" drawingStroke="0" fillColor="1 1 1 1" rotation="0" source="" adjustsHeightToFit="1" verticalAlignment="0" RTFData="' + \
MakeRTFBlob(text, font_colour, font_size) + \
'" revealType="0" serialization-array-index="0"><_-RVRect3D-_position x="0" y="0" z="0" width="' + \
screen_size[1] + \
'" height="' + \
screen_size[0] + \
'"></_-RVRect3D-_position><_-D-_serializedShadow containerClass="NSMutableDictionary"><NSNumber serialization-native-value="4" serialization-dictionary-key="shadowBlurRadius"></NSNumber><NSColor serialization-native-value="0 0 0 1" serialization-dictionary-key="shadowColor"></NSColor><NSMutableString serialization-native-value="{2.82842969894409, -2.82843065261841}" serialization-dictionary-key="shadowOffset"></NSMutableString></_-D-_serializedShadow><stroke containerClass="NSMutableDictionary"><NSColor serialization-native-value="1 1 1 1" serialization-dictionary-key="RVShapeElementStrokeColorKey"></NSColor><NSNumber serialization-native-value="0" serialization-dictionary-key="RVShapeElementStrokeWidthKey"></NSNumber></stroke></RVTextElement></displayElements><_-RVProTransitionObject-_transitionObject transitionType="-1" transitionDuration="1" motionEnabled="0" motionDuration="20" motionSpeed="100"></_-RVProTransitionObject-_transitionObject></RVDisplaySlide>'
def HeaderBlock(Name='New Song',
Authors='',
Artist='',
CCLICopyRightInfo='',
CCLILicenceNumber='',
Publisher='',
Notes='',
height="1080",
width="1920",
category=""):
return '<RVPresentationDocument height="' + \
height + \
'" width="' + \
width + \
'" versionNumber="500" docType="0" creatorCode="1349676880" lastDateUsed="' + \
datetime.now().strftime('%Y-%m-%dT%H:%M:%S') + \
'" usedCount="0" category="' + \
category + \
'" resourcesDirectory="" backgroundColor="0 0 0 1" drawingBackgroundColor="0" notes="' + \
Notes + \
'" artist="' + \
Artist + \
'" author="' + \
Authors + \
'" album="" CCLIDisplay="1" CCLIArtistCredits="" CCLISongTitle="' + \
Name + \
'" CCLIPublisher="' + \
Publisher + \
'" CCLICopyrightInfo="' + \
CCLICopyRightInfo + \
'" CCLILicenseNumber="' + \
CCLILicenceNumber + \
'" chordChartPath=""><timeline timeOffSet="0" selectedMediaTrackIndex="0" unitOfMeasure="60" duration="0" loop="0"><timeCues containerClass="NSMutableArray"></timeCues><mediaTracks containerClass="NSMutableArray"></mediaTracks></timeline><bibleReference containerClass="NSMutableDictionary"></bibleReference><_-RVProTransitionObject-_transitionObject transitionType="-1" transitionDuration="1" motionEnabled="0" motionDuration="20" motionSpeed="100"></_-RVProTransitionObject-_transitionObject><groups containerClass="NSMutableArray">'
FooterBlock = '</groups><arrangements containerClass="NSMutableArray"></arrangements></RVPresentationDocument>'
def write_prop(psalm, screen_size, font_colour, background_colour, underline, extra_slide, output_folder):
to_write = ""
if extra_slide:
psalm['stanzas'] = [""] + psalm['stanzas']
for v in psalm['stanzas']:
if not underline:
v = remove_markup(v)
to_write += SlideBlock(
v,
screen_size,
font_colour,
background_colour
)
if psalm['book'] == "Sing Psalms":
copyright_field = "Free Church of Scotland"
else:
copyright_field = ""
# Prepare Header Block to write:
to_write_header = HeaderBlock(
Name=psalm['name'],
Artist='',
CCLILicenceNumber='',
Notes=psalm['metre'],
CCLICopyRightInfo=copyright_field,
Publisher='',
Authors=psalm['book'],
height=screen_size[0],
width=screen_size[1],
category=psalm['book'])
to_write = to_write_header + \
'<RVSlideGrouping name="' + \
' ' + \
'" uuid="' + \
make_uuid() + \
'" color="' + \
' ' + \
'" serialization-array-index="0"><slides containerClass="NSMutableArray">' + \
to_write + \
'</slides></RVSlideGrouping>' + \
FooterBlock
# Now actually write the thing.
with open(os.path.join(output_folder, psalm['file_name'] + '.pro5'), 'w') as f:
f.write(to_write)
def convert2propresenter(screen_size=("1080", "1920"), font_colour=('0', '0', '0'), background_colour=('1', '1', '1', '1'), colour_name='b_w', underline=False, extra_slide=False):
"""Convert Psalms to propresenter files."""
ratio = "x".join(screen_size)
folder_ids = [ratio, colour_name]
if underline:
folder_ids.append("underlined")
if extra_slide:
folder_ids.append("stcs")
# sing psalms
file_name = "Sing Psalms"
output_folder = make_output_folder(["ProPresenter5", '_'.join(folder_ids), file_name])
psalms = load_sing_psalms()
for psalm in psalms:
write_prop(psalm, screen_size, font_colour, background_colour, underline, extra_slide, output_folder)
# scottish psalter
file_name = "Scottish Psalter"
output_folder = make_output_folder(["ProPresenter5", '_'.join(folder_ids), file_name])
psalms = load_scottish_psalter()
for psalm in psalms:
write_prop(psalm, screen_size, font_colour, background_colour, underline, extra_slide, output_folder)
zip_folder(os.path.dirname(output_folder))
remove_folder(os.path.dirname(output_folder))
if __name__ == '__main__':
convert2propresenter()
|
from typer.testing import CliRunner
from unasync_cli.main import app
runner = CliRunner()
def test_unasync_cli():
assert runner.invoke(app, ["--help"]).exit_code == 0
|
import imgaug as ia
import imgaug.augmenters as iaa
class MyAugmentor(object):
def __init__(self):
sometimes = lambda aug: iaa.Sometimes(0.5, aug)
self.seq = iaa.Sequential([
iaa.Multiply((0.8, 1.2), per_channel=0.5),
sometimes(
iaa.OneOf([
iaa.CoarseDropout((0.01, 0.03), size_percent=(0.1, 0.3)),
iaa.CoarseDropout((0.01, 0.03), size_percent=(0.1, 0.3), per_channel=1.0),
iaa.Dropout((0.03,0.05)),
iaa.Salt((0.03,0.05))
])
),
sometimes(iaa.FrequencyNoiseAlpha(
exponent=(-4, 0),
first=iaa.Multiply((0.8, 1.2), per_channel=0.5),
second=iaa.ContrastNormalization((0.8, 1.5))
)
),
sometimes(
iaa.OneOf([
iaa.MotionBlur(k=(3,4),angle=(0, 360)),
iaa.GaussianBlur((0, 1.2)),
iaa.AverageBlur(k=(2, 3)),
iaa.MedianBlur(k=(3, 5))
])
),
sometimes(
iaa.CropAndPad(
percent=(-0.02, 0.02),
pad_mode='constant',
pad_cval=(0, 255)
),
),
sometimes(iaa.AdditiveGaussianNoise((0.02, 0.1))),
sometimes(iaa.AdditivePoissonNoise((0.02,0.05))),
iaa.Invert(p=0.5)
]) |
message = "Hello World"
print(message)
message = "Hello"
print(message)
|
def debug_trace():
from PyQt5.QtCore import pyqtRemoveInputHook
import pdb
import sys
pyqtRemoveInputHook()
# set up the debugger
debugger = pdb.Pdb()
debugger.reset()
# custom next to get outside of function scope
debugger.do_next(None) # run the next command
users_frame = sys._getframe().f_back # frame where the user invoked `pyqt_set_trace()`
debugger.interaction(users_frame, None)
|
"""Tests for _metadata.py"""
import datetime
def test_metadata_properties(opulent_ds):
ds = opulent_ds
assert ds.pr.references == "doi:10.1012"
assert ds.pr.rights == "Use however you want."
assert ds.pr.contact == "lol_no_one_will_answer@example.com"
assert ds.pr.title == "Completely invented GHG inventory data"
assert ds.pr.comment == "GHG inventory data ..."
assert ds.pr.institution == "PIK"
assert ds.pr.history == (
"2021-01-14 14:50 data invented\n" "2021-01-14 14:51 additional processing step"
)
assert ds.pr.entity_terminology == "primap2"
assert ds.pr.publication_date == datetime.date(2099, 12, 31)
ds.pr.references = "references"
assert ds.pr.references == "references"
ds.pr.rights = "rights"
assert ds.pr.rights == "rights"
ds.pr.contact = "contact"
assert ds.pr.contact == "contact"
ds.pr.title = "title"
assert ds.pr.title == "title"
ds.pr.comment = "comment"
assert ds.pr.comment == "comment"
ds.pr.institution = "institution"
assert ds.pr.institution == "institution"
ds.pr.history = "history"
assert ds.pr.history == "history"
ds.pr.entity_terminology = "entity_terminology"
assert ds.pr.entity_terminology == "entity_terminology"
today = datetime.date.today()
ds.pr.publication_date = today
assert ds.pr.publication_date == today
|
import os
SMTP_USER = os.environ.get('LBC_SMTP_USER', "user")
SMTP_PASS = os.environ.get('LBC_SMTP_PASS', "pass")
SMTP_SERVER = os.environ.get('LBC_SMTP_SERVER', "smtp.gmail.com:587")
JOB_FILE = os.environ.get('LBC_JOB_FILE', "jobs.csv")
DELETE_FILE = os.environ.get('LBC_DELETE_FILE', "delete.csv")
|
from abc import ABCMeta
import time
from .base_service import BaseService
from ..utils.generic_utils import log
from ..utils.generic_utils import class_name
class ServiceManager(metaclass=ABCMeta):
"""Manage the services in a centralized way.
"""
def __init__(self, *services):
for service in services:
self.add_recursive(service)
def add(self, service, name=None):
"""
Adds a new service to the service manager, under certain name. The
service is then accessible with `self.name`.
# Arguments:
service: an instance of BaseService or inherited class from it.
name: a key to associate the service with. If None, the name is
chosen automatically.
# Returns: the service itself
# Raises:
ValueError: if this manager already has a service with this name
"""
if name is None:
base_name = type(service).__name__
i = 1
ith_name = base_name + str(i)
while hasattr(self, ith_name):
i += 1
ith_name = base_name + str(i)
name = ith_name
if hasattr(self, name):
raise ValueError('Attribute "{}" already exists.'.format(name))
setattr(self, name, service)
self._register(name)
return service
def add_recursive(self, service):
"""
Adds the specified service to the manager, and its whole dependency
tree as well.
# Arguments:
service: an instance of BaseService or inherited class from it.
# Returns: the service itself.
"""
self.add(service)
for _, child_service in service._input_adapter._input_services.items():
self.add_recursive(child_service)
return service
def start(self):
"""Starts all services that are assigned to the manager at once.
"""
services = self.enlist_services()
for service in services:
service.start()
def stop(self, warning_interval=10, rage_resign_threshold=-1):
"""
Sends stop signals to each thread. Beware that it is up to each
service's implementation to ensure that it will be able to stop
correctly.
# Arguments:
warning_interval: Integer. Display warning messages every warning
interval if some services are not stopped yet.
rage_resign_threshold: Integer. Stop waiting for threads to stop
after this interval. Might be useful if you require a high
level of responsiveness. If -1, never give up on waiting.
# Returns:
True if all services are successfully stopped. False otherwise.
"""
services = self.enlist_services()
begin = time.time()
warning_threshold = warning_interval
for service in services:
service.stop()
while True:
all_stopped = True
not_stopped = []
for service in services:
if service._thread is not None:
all_stopped = False
not_stopped.append(class_name(service))
if not all_stopped:
diff = time.time() - begin
if diff > warning_threshold:
log.warning(
'The following services are not responding after '
'{:d} seconds: {}'.format(
warning_interval, ', '.join(not_stopped)))
if diff > rage_resign_threshold and rage_resign_threshold >= 0:
log.warning('Resign on wairning for services to stop.')
return False
time.sleep(0.01)
else:
break
return True
def enlist_services(self):
"""Returns a list of attributes that are instances of `BaseService`.
"""
services = []
for attr_name in dir(self):
attr = getattr(self, attr_name)
if isinstance(attr, BaseService):
services.append(attr)
return services
def _register(self, name):
"""Sets the manager of a service with given name to self.
# Arguments:
name: the key assigned to the service for current manager.
# Raises:
KeyError: if a service with given key was not found.
ValueError: if the attribute under the key is not a BaseService
"""
if not hasattr(self, name):
raise KeyError('No service with name {}'.format(name))
if not isinstance(getattr(self, name), BaseService):
raise ValueError('The attribute "{}" is not a service'
.format(name))
service = getattr(self, name)
service.manager = self
|
import nuke
import pyblish.api
class RepairWriteResolutionDifference(pyblish.api.Action):
label = "Repair"
icon = "wrench"
on = "failed"
def process(self, context, plugin):
# Get the errored instances
failed = []
for result in context.data["results"]:
if (result["error"] is not None and result["instance"] is not None
and result["instance"] not in failed):
failed.append(result["instance"])
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(failed, plugin)
for instance in instances:
reformat = instance[0].dependencies()[0]
if reformat.Class() != "Reformat":
reformat = nuke.nodes.Reformat(inputs=[instance[0].input(0)])
xpos = instance[0].xpos()
ypos = instance[0].ypos() - 26
dependent_ypos = instance[0].dependencies()[0].ypos()
if (instance[0].ypos() - dependent_ypos) <= 51:
xpos += 110
reformat.setXYpos(xpos, ypos)
instance[0].setInput(0, reformat)
reformat["resize"].setValue("none")
class ValidateOutputResolution(pyblish.api.InstancePlugin):
"""Validates Output Resolution.
It is making sure the resolution of write's input is the same as
Format definition of script in Root node.
"""
order = pyblish.api.ValidatorOrder
optional = True
families = ["render", "render.local", "render.farm"]
label = "Write Resolution"
hosts = ["nuke"]
actions = [RepairWriteResolutionDifference]
def process(self, instance):
# Skip bounding box check if a crop node exists.
if instance[0].dependencies()[0].Class() == "Crop":
return
msg = "Bounding box is outside the format."
assert self.check_resolution(instance), msg
def check_resolution(self, instance):
node = instance[0]
root_width = instance.data["resolutionWidth"]
root_height = instance.data["resolutionHeight"]
write_width = node.format().width()
write_height = node.format().height()
if (root_width != write_width) or (root_height != write_height):
return None
else:
return True
|
import csv
import pandas as pd
import random
from sklearn.metrics import classification_report
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
from sklearn.svm import SVC
from sklearn.naive_bayes import GaussianNB, BernoulliNB
from sklearn.linear_model import SGDClassifier, LogisticRegressionCV
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.ensemble import AdaBoostClassifier
from sklearn.ensemble import VotingClassifier
from sklearn.neural_network import MLPClassifier
raw_data = pd.read_csv('./data/train.csv')
test_data = pd.read_csv('./data/test.csv')
##########################
# Data preprocessing
##########################
used_features = [
'Pclass',
# 'Name',
'Sex',
'Age',
'SibSp',
'Parch',
# 'Ticket',
'Fare',
# 'Cabin',
'Embarked'
]
label_encoder = LabelEncoder()
for dataset in [raw_data, test_data]:
# Imput missing values
dataset['Age'].fillna(dataset['Age'].median(), inplace = True)
dataset['Fare'].fillna(dataset['Fare'].median(), inplace = True)
# Remove remaining rows with nulls
dataset.dropna(subset=['Embarked'], axis=0, inplace=True)
# Encode features to discrete values
dataset['Sex'] = label_encoder.fit_transform(dataset['Sex'])
dataset['Embarked'] = label_encoder.fit_transform(dataset['Embarked'])
# TODO: infer new features?
# Split data into train and test
raw_y = raw_data.Survived
raw_X = raw_data[used_features]
X_train, X_test, y_train, y_test = train_test_split(
raw_X,
raw_y,
test_size=0.2,
random_state=1
)
##########################
# Model definition
##########################
def generate_svm_models(n_models=4):
'''
Creates and returns SVM models.
The parameters of the created models are
randomic choosen.
'''
models = []
for i in range(n_models):
models.append(
SVC(
C=random.uniform(0.9, 1.7),
kernel=random.choice(['linear', 'poly', 'rbf', 'sigmoid']),
# degree=random.randint(3, 4),
gamma=random.choice(['scale', 'auto'])
)
)
return models
model_titles = [
'Random Forest',
'Gradient Boost',
'Ada Boost',
'Multi-layer Perceptron',
'Gaussian NB',
'Bernoulli NB',
'Logistic Regression',
'SGD Classification',
'SVM 1',
'SVM 2',
'SVM 3',
]
models = [
RandomForestClassifier(
n_estimators=100,
random_state=1
),
GradientBoostingClassifier(
n_estimators=100,
random_state=1
),
AdaBoostClassifier(
n_estimators=50,
random_state=1
),
MLPClassifier(
max_iter=300
),
GaussianNB(),
BernoulliNB(),
LogisticRegressionCV(),
SGDClassifier(),
]
for svc_model in generate_svm_models(3):
models.append(svc_model)
models_ensemble = VotingClassifier(
estimators=[tuple(pair) for pair in zip(model_titles, models)]
)
models_ensemble.fit(X_train, y_train)
##########################
# Model evaluation
##########################
y_pred = models_ensemble.predict(X_test)
print('Ensemble Model')
print(classification_report(y_test, y_pred))
print()
##########################
# Submission
##########################
with open('submission.csv', 'w') as submission_file:
writer = csv.writer(submission_file)
indexes = test_data['PassengerId']
writer.writerow(['PassengerId', 'Survived'])
for i in range(len(y_pred)):
writer.writerow([indexes[i], y_pred[i]])
|
from django.contrib import admin
from .models import CSVData, CSVFiles
# Register your models here.
admin.site.register(CSVData)
admin.site.register(CSVFiles)
|
import numpy as np
import sklearn.metrics as skm
import utils.file_manager as fm
from utils.logger import logger
RECALL_LEVEL = 0.95
def compute_metrics(
in_scores: np.ndarray,
out_scores: np.ndarray,
recall_level: float = RECALL_LEVEL,
fpr_only=False,
print_thr=False,
):
"""Compute evaluation metrics for a binary detection problem. The label to be detected is `1`.
Args:
in_scores (np.ndarray): Score for the correctly predicted samples or in-distribution.
out_scores (np.ndarray): Score for the wrongly predicted samples or out-of-distribution.
recall_level (float): recall level for calculating FPR at given TPR.
Returns:
Tuple[float, float, float, float]: FPR, AUROC, AUPR, DETECTION
"""
pos = np.ones(len(in_scores)) # configured to detect in-distribution samples
neg = np.zeros(len(out_scores))
y_true = np.concatenate([pos, neg]).reshape(-1)
y_pred = np.concatenate([in_scores, out_scores]).reshape(-1)
fprs, tprs, thresholds = skm.roc_curve(y_true, y_pred, pos_label=1) # fpr: s > thr
fpr_at_tpr = compute_fpr_tpr(tprs, fprs, recall_level)
index = np.argmin([abs(recall_level - f) for f in tprs])
thr = thresholds[index]
if print_thr:
print(thr)
if fpr_only:
return fpr_at_tpr
auroc = compute_auroc(tprs, fprs)
aupr = compute_aupr(y_true, y_pred)
detection = detection_error(in_scores, out_scores)
return (fpr_at_tpr, auroc, aupr, detection)
def confusion_matrix(scores, corr_pred, threshold, verbose=True):
# tn -> ood data that was classified as ood (GOOD)
# fp -> ood data that was classified as in-distribution BAD)
# fn -> in-distribution data that was clasified as ood (TUNABLE ~5%)
# tp -> in-distribution data that was clasified as in-distribution (GOOD)
if not verbose:
logger.setLevel(logger.WARNING)
binary_detector = scores > threshold
c_mat = skm.confusion_matrix(corr_pred, binary_detector)
tn, fp, fn, tp = c_mat.ravel()
logger.debug(f"Amount of one scores data: {sum(corr_pred)} / {len(corr_pred)}")
logger.debug(f"Threshold: {threshold}")
logger.debug(
f"Amount of one scores data detected: {sum(binary_detector[corr_pred])} / {sum(corr_pred)}"
)
logger.debug(
f"""Confusion matrix:
label/pred C.1 C.0
1: {tp}, {fn} | {tp + fn}
0: {fp}, {tn} | {fp + tn}
"""
)
return tn, fp, fn, tp
def compute_fpr_tpr(tprs, fprs, recall_level):
return np.interp(recall_level, tprs, fprs)
def compute_auroc(tprs, fprs):
return np.trapz(tprs, fprs)
def compute_aupr(y_true, y_pred):
return skm.average_precision_score(y_true, y_pred)
def get_measures(_pos, _neg, recall_level=RECALL_LEVEL):
if (len(_pos.shape) == 2 and _pos.shape[1] > 1) or (
len(_neg.shape) == 2 and _neg.shape[1] > 1
):
raise ValueError("Scores with wrong dimensions.")
logger.debug(f"recall level {recall_level}")
pos = np.array(_pos).reshape((-1, 1)).round(decimals=7)
neg = np.array(_neg).reshape((-1, 1)).round(decimals=7)
fpr, auroc, aupr, detection = compute_metrics(pos, neg, recall_level)
return auroc, aupr, fpr, detection
def detection_error(S1, S2):
unique = np.unique(S2)
error = 1.0
for delta in [*unique, unique.max() + 1]:
tpr = np.sum(np.sum(S1 < delta)) / float(len(S1))
error2 = np.sum(np.sum(S2 >= delta)) / float(len(S2))
error = np.minimum(error, (tpr + error2) / 2.0)
return error
def false_positive_rate(tn, fp, fn, tp):
return fp / (fp + tn)
def false_negative_rate(tn, fp, fn, tp):
return fn / (tp + fn)
def true_negative_rate(tn, fp, fn, tp):
# specificity, selectivity or true negative rate (TNR)
return tn / (fp + tn)
def precision(tn, fp, fn, tp):
# precision or positive predictive value (PPV)
return tp / (tp + fp + 1e-6)
def recall(tn, fp, fn, tp):
# sensitivity, recall, hit rate, or true positive rate
return tp / (tp + fn)
def true_positive_rate(tn, fp, fn, tp):
return recall(tn, fp, fn, tp)
def negative_predictive_value(tn, fp, fn, tp):
return tn / (tn + fn)
def f1_score(tn, fp, fn, tp):
return 2 * tp / (2 * tp + fp + fn)
def accuracy_score(tn, fp, fn, tp):
return (tp + tn) / (tp + tn + fp + fn)
def error_score(tn, fp, fn, tp):
return 1 - accuracy_score(tn, fp, fn, tp)
def threat_score(tn, fp, fn, tp):
return tp / (tp + fn + fp)
def print_metrics_and_info(
s_in,
s_out,
nn_name="",
in_dataset_name="",
out_dataset_name="",
method_name="",
header=True,
save_flag=False,
verbose=2,
):
if verbose in [2, False]:
logger.setLevel(logger.WARNING)
auroc, aupr_in, fpr_at_tpr_in, detection = get_measures(s_in, s_out)
auroc, aupr_out, fpr_at_tpr_out, detection = get_measures(-s_out, -s_in)
header_lines = [
"{:31}{:>22}".format("Neural network architecture:", nn_name),
"{:31}{:>22}".format("In-distribution dataset:", in_dataset_name),
"{:31}{:>22}".format("Out-of-distribution dataset:", out_dataset_name),
]
output_lines = [
"{:>34}{:>19}".format("Method:", method_name),
"{:21}{:13.2f}%".format("FPR at TPR 95% (In):", fpr_at_tpr_in * 100),
"{:21}{:13.2f}%".format("FPR at TPR 95% (Out):", fpr_at_tpr_out * 100),
"{:21}{:13.2f}%".format("Detection error:", detection * 100),
"{:21}{:13.2f}%".format("AUROC:", auroc * 100),
"{:21}{:13.2f}%".format("AUPR (In):", aupr_in * 100),
"{:21}{:13.2f}%".format("AUPR (Out):", aupr_out * 100),
]
if verbose:
if header:
for line in header_lines:
print(line)
for line in output_lines:
print(line)
if save_flag:
f = fm.make_evaluation_metrics_file(
nn_name, out_dataset_name, fm.clean_title(method_name)
)
fm.write_evaluation_metrics_file(f, header_lines, output_lines)
f.close()
return fpr_at_tpr_in, fpr_at_tpr_out, detection, auroc, aupr_in, aupr_out
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-06-19 11:53
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('academicPhylogeny', '0012_auto_20170617_2222'),
]
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('current_position', models.CharField(blank=True, max_length=100, null=True)),
('current_affiliation', models.CharField(blank=True, max_length=100, null=True)),
('reputation_points', models.IntegerField(default=10)),
],
),
migrations.AddField(
model_name='userprofile',
name='associated_PhD',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='academicPhylogeny.PhD'),
),
migrations.AddField(
model_name='userprofile',
name='user',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
import argparse
import json
import os
import sqlite3
from io import BytesIO
from tempfile import NamedTemporaryFile
from zipfile import ZipFile
from PIL import Image
from pydub import AudioSegment
from tqdm import tqdm
IMAGE_EXT = ("jpg", "jpeg", "png", "tif", "tiff", "gif", "webp")
AUDIO_EXT = (
"wav",
"mp3",
"ogg",
"flac",
"mp4",
"swf",
"mov",
"mpeg",
"mkv",
"m4a",
"3gp",
"spx",
"oga",
)
def update_db(conn, cur, filename, ext):
new_filename = ".".join([".".join(filename.split(".")[:-1]), ext])
cur.execute(
"SELECT id, flds, sfld FROM notes WHERE flds LIKE ? OR sfld LIKE ?",
("%{}%".format(filename), "%{}%".format(filename)),
)
rows = cur.fetchall()
for row in rows:
cur.execute(
"UPDATE notes SET flds = ?, sfld = ? WHERE id = ?",
(
str(row[1]).replace(filename, new_filename),
str(row[2]).replace(filename, new_filename),
row[0],
),
)
conn.commit()
def compress_image(ext, image_bytes, quality=50):
# PIL does not recognize .tif extension
if ext == "tif":
ext = "tiff"
img_buf = BytesIO()
img_buf.write(image_bytes)
img_buf.seek(0)
try:
im = Image.open(img_buf)
output_buf = BytesIO()
im.convert("RGB").save(output_buf, format=ext, optimize=True, quality=quality)
return output_buf.getvalue()
except Exception:
pass
def compress_audio(ext, audio_bytes, bitrate="48k"):
in_tmp = NamedTemporaryFile(delete=False)
out_tmp = NamedTemporaryFile(delete=False)
in_tmp.write(audio_bytes)
in_tmp.seek(0)
in_tmp.close()
out_tmp.close()
try:
segment = AudioSegment.from_file(in_tmp.name, ext)
segment.export(out_tmp.name, format="ogg", bitrate=bitrate)
with open(out_tmp.name, "rb") as f:
compressed_audio = f.read()
os.remove(in_tmp.name)
os.remove(out_tmp.name)
return compressed_audio
except Exception:
pass
def main():
parser = argparse.ArgumentParser(description="Compress Anki .apkg file size")
parser.add_argument(
"-i",
"--input",
dest="input",
required=True,
help="Input .apkg file to compress",
)
parser.add_argument(
"-o",
"--output",
dest="output",
required=False,
help="Output file to write, defaults to MIN_<INPUT>",
)
parser.add_argument(
"-q",
"--quality",
dest="quality",
default=50,
type=int,
help="Quality value for image compression (0-100), defaults to 50",
)
parser.add_argument(
"-b",
"--bitrate",
dest="bitrate",
default="48k",
help="ffmpeg-compliant bitrate value for audio compression, defaults to 48k",
)
parser.add_argument(
"-t",
"--image_type",
dest="image_type",
default="jpeg",
choices=IMAGE_EXT,
help="Filetype for image compression, defaults to jpeg",
)
args = parser.parse_args()
output_file = args.output
if output_file is None:
output_file = os.path.join(
os.path.dirname(args.input), "MIN_{}".format(os.path.basename(args.input))
)
if args.input == output_file:
raise ValueError("Output file cannot have the same name as input")
anki_zip = ZipFile(args.input)
if "media" not in anki_zip.namelist():
raise ValueError("{} does not contain a media file".format(args.input))
# Create new zip, temp file for SQLite database
compressed_zip = ZipFile(output_file, "w")
db_tmp = NamedTemporaryFile(delete=False)
db_tmp.write(anki_zip.read("collection.anki2"))
db_tmp.seek(0)
db_tmp.close()
conn = sqlite3.connect(db_tmp.name)
cur = conn.cursor()
# Read media JSON, create new dict for updates
media_json = json.loads(anki_zip.read("media").decode("utf-8"))
media = {}
for k, v in tqdm(media_json.items()):
if len(v.split(".")) < 2:
compressed_zip.writestr(k, anki_zip.read(k))
continue
ext = v.split(".")[-1].lower()
contents = None
if ext in IMAGE_EXT:
contents = compress_image(
args.image_type, anki_zip.read(k), quality=args.quality
)
if contents is not None:
update_db(conn, cur, v, args.image_type)
v = ".".join([".".join(v.split(".")[:-1]), args.image_type])
elif ext in AUDIO_EXT:
contents = compress_audio(ext, anki_zip.read(k), bitrate=args.bitrate)
if contents is not None:
update_db(conn, cur, v, "ogg")
v = ".".join([".".join(v.split(".")[:-1]), "ogg"])
if contents is None:
contents = anki_zip.read(k)
media[k] = v
compressed_zip.writestr(k, contents)
compressed_zip.writestr("media", json.dumps(media))
conn.close()
with open(db_tmp.name, "rb") as db_file:
compressed_zip.writestr("collection.anki2", db_file.read())
os.remove(db_file.name)
compressed_zip.close()
if __name__ == "__main__":
main()
|
from otree.api import (
models, widgets, BaseConstants, BaseSubsession, BaseGroup, BasePlayer,
Currency as c, currency_range
)
import random
doc = """
2 firms complete in a market by setting prices for homogenous goods.
See "Kruse, J. B., Rassenti, S., Reynolds, S. S., & Smith, V. L. (1994).
Bertrand-Edgeworth competition in experimental markets.
Econometrica: Journal of the Econometric Society, 343-371."
"""
class Constants(BaseConstants):
players_per_group = 2
name_in_url = 'bertrand'
num_rounds = 1
instructions_template = 'bertrand/Instructions.html'
maximum_price = c(100)
class Subsession(BaseSubsession):
pass
class Group(BaseGroup):
winning_price = models.CurrencyField()
def set_payoffs(self):
players = self.get_players()
self.winning_price = min([p.price for p in players])
winners = [p for p in players if p.price == self.winning_price]
winner = random.choice(winners)
for p in players:
if p == winner:
p.is_winner = True
p.payoff = p.price
else:
p.is_winner = False
p.payoff = c(0)
class Player(BasePlayer):
price = models.CurrencyField(
min=0, max=Constants.maximum_price,
doc="""Price player offers to sell product for"""
)
is_winner = models.BooleanField()
|
"""Montando o caso de uso GetUser"""
from mitmirror.infra.repository import UserRepository
from mitmirror.data.users import GetUser
from mitmirror.presenters.controllers.users import GetUserController
def get_user_composer():
"""Montagem do caso de uso GetUser"""
user_repository = UserRepository()
usecase = GetUser(user_repository)
controller = GetUserController(usecase)
return controller
|
#!/usr/bin/env python
"""Read in a small obo, print list of GO terms and plot."""
__copyright__ = "Copyright (C) 2016-2017, DV Klopfenstein, H Tang, All rights reserved."
__author__ = "DV Klopfenstein"
import os
import sys
from goatools.obo_parser import GODag
from goatools.gosubdag.gosubdag import GoSubDag
from goatools.gosubdag.plot.gosubdag_plot import GoSubDagPlot
def test_rpt(prt=sys.stdout):
"""Read in a small obo, print list of GO terms and plot."""
png = "i86_godag.png"
gosubdag = _get_gosubdag()
_prt_goids(gosubdag, prt)
goobjplt = _get_goobjplt(gosubdag)
goobjplt.plt_dag(png)
def _get_goobjplt(gosubdag):
"""STEP 3) Get a plotting object."""
go_sources = set(["GO:0036476", "GO:0007516"])
gopltdag = GoSubDag(go_sources, gosubdag.go2obj)
return GoSubDagPlot(gopltdag)
def _prt_goids(gosubdag, prt):
"""STEP 2) Print a list of GO IDs in the GoSubDag."""
sortby = lambda nt: [nt.NS, -1*nt.dcnt, nt.depth]
gosubdag.prt_goids(gosubdag.go_sources, sortby=sortby, prt=prt)
def _get_gosubdag():
"""STEP 1) Get GoSubDag containing a small test obo."""
repo = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../")
fin_obo = os.path.join(repo, "data/i86.obo")
go2obj = GODag(fin_obo)
return GoSubDag(None, go2obj)
if __name__ == '__main__':
test_rpt()
# Copyright (C) 2016-2017, DV Klopfenstein, H Tang, All rights reserved.
|
import sys
import threading
# import concurrent.futures as confu
import platform
class MyThread(threading.Thread):
callback = None
def __init__(self, target):
super(MyThread, self).__init__()
self.callback = target
self.stop_event = threading.Event()
self.setDaemon(True)
def stop(self):
self.stop_event.set()
def start(self):
self.callback()
class StrFormatter:
# Standardly, 'terminalColors' has escape sequences for MacOS and Unix.
terminalColors = {
"red": "\u001b[31m",
"yellow": "\u001b[33m",
"clear": "\u001b[0m"
}
def start(self):
'''
When executed in __init__, it is executed each time this class is instantiated, so it is prevented by coding in this function.
'''
os = platform.platform(terse=True)
if "Windows" in os:
# If os is Windows, use module 'colorama'. And only init to fit terminal to ANSI.
# Mac and Unix can't import colorama, so import here.
from colorama import init
init()
def get_colored_console_log(self, color, message):
'''
Show colored message like error or warning in terminal.
Args:
coloe (str): "red" or "yellow"
message (str): Alert message
Returns:
(str): Colored text for terminal
'''
if not color in self.terminalColors:
print("{0}Error: Invalid in Arg 'color'.\nYou can select from 'yellow' or 'red'.{1}".format(self.terminalColors["red"], self.terminalColors["clear"]))
sys.exit()
return "{0}{1}{2}".format(self.terminalColors[color], message, self.terminalColors["clear"])
|
import os
import gc
import torch
import torch.nn.functional as F
from torch_geometric.data import HeteroData
from torch_geometric.datasets import DBLP
import torch_geometric.transforms as T
from torch_geometric.nn import Linear
from models import HGTConv
from utils import get_device
def get_hetero_data():
path = os.path.join(os.getcwd(), 'data/DBLP')
dataset = DBLP(path)
data = dataset[0]
hetero_data = HeteroData()
hetero_data['author'].x = data['author'].x
hetero_data['author'].y = data['author'].y
# torch.full((data['author'].x.shape[0], ), True)
hetero_data['author'].train_mask = data['author']['train_mask']
hetero_data['author'].val_mask = data['author']['val_mask']
hetero_data['author'].test_mask = data['author']['test_mask']
hetero_data['paper'].x = data['paper'].x
hetero_data['paper'].train_mask = torch.full((data['paper'].x.shape[0], ), True)
hetero_data['paper', 'to', 'author'].edge_index = data['paper', 'to', 'author']['edge_index']
hetero_data = T.ToUndirected()(hetero_data)
return hetero_data
hetero_data = get_hetero_data()
class HGT(torch.nn.Module):
def __init__(
self,
hetero_data,
hidden_channels,
out_channels,
num_heads,
num_layers
):
super().__init__()
self.lin_dict = torch.nn.ModuleDict()
for node_type in hetero_data.node_types:
self.lin_dict[node_type] = Linear(-1, hidden_channels)
self.convs = torch.nn.ModuleList()
for _ in range(num_layers):
conv = HGTConv(
in_channels=hidden_channels,
out_channels=hidden_channels,
metadata=hetero_data.metadata(),
heads=num_heads,
group='sum')
self.convs.append(conv)
def forward(self, x_dict, edge_index_dict):
for node_type, x in x_dict.items():
x_dict[node_type] = self.lin_dict[node_type](x).relu_()
for conv in self.convs:
x_dict = conv(x_dict, edge_index_dict)
output = x_dict
return output
device = get_device()
model = HGT(hetero_data=hetero_data, hidden_channels=64, out_channels=4, num_heads=2, num_layers=1)
device = torch.device(device)
hetero_data, model = hetero_data.to(device), model.to(device)
# Initialize lazy modules
with torch.no_grad():
out = model(hetero_data.x_dict, hetero_data.edge_index_dict)
print(f"initialized HGT model with {sum([p.numel() for p in model.parameters()])} params")
optimizer = torch.optim.Adam(model.parameters(), lr=0.005, weight_decay=0.001)
def train():
model.train()
optimizer.zero_grad()
out = model(hetero_data.x_dict, hetero_data.edge_index_dict)
out = out['author']
mask = hetero_data['author'].train_mask
loss = F.cross_entropy(out[mask], hetero_data['author'].y[mask])
loss.backward()
optimizer.step()
return float(loss)
@torch.no_grad()
def test():
model.eval()
pred = model(hetero_data.x_dict, hetero_data.edge_index_dict)
pred = pred['author']
pred = pred.argmax(dim=-1)
accs = []
for split in ['train_mask', 'val_mask', 'test_mask']:
mask = hetero_data['author'][split]
acc = (pred[mask] == hetero_data['author'].y[mask]).sum() / mask.sum()
accs.append(float(acc))
return accs
for epoch in range(1, 101):
loss = train()
train_acc, val_acc, test_acc = test()
print(f'Epoch: {epoch:03d}, Loss: {loss:.4f}, Train: {train_acc:.4f}, '
f'Val: {val_acc:.4f}, Test: {test_acc:.4f}')
params = {n: p for n, p in model.named_parameters()}
print(params['convs.0.skip.author'].detach().cpu().numpy()[0],
params['convs.0.skip.paper'].detach().cpu().numpy()[0])
|
import torch
import torch.utils.data
import torchvision
from absl import logging
# 装一下下面这个库
from pytorch_quantization import nn as quant_nn
logging.set_verbosity(logging.FATAL) # Disable logging as they are too noisy in notebook
from pytorch_quantization import quant_modules
# 调用这个 quant_modules.initialize()
# 然后你正常训练就行了 ...
quant_modules.initialize()
model = torchvision.models.resnet50()
model.cuda()
# Quantization Aware Training is based on Straight Through Estimator (STE) derivative approximation.
# It is some time known as “quantization aware training”.
# We don’t use the name because it doesn’t reflect the underneath assumption.
# If anything, it makes training being “unaware” of quantization because of the STE approximation.
# After calibration is done, Quantization Aware Training is simply select a training schedule and continue training the calibrated model.
# Usually, it doesn’t need to fine tune very long. We usually use around 10% of the original training schedule,
# starting at 1% of the initial training learning rate,
# and a cosine annealing learning rate schedule that follows the decreasing half of a cosine period,
# down to 1% of the initial fine tuning learning rate (0.01% of the initial training learning rate).
# Quantization Aware Training (Essentially a discrete numerical optimization problem) is not a solved problem mathematically.
# Based on our experience, here are some recommendations:
# For STE approximation to work well, it is better to use small learning rate.
# Large learning rate is more likely to enlarge the variance introduced by STE approximation and destroy the trained network.
# Do not change quantization representation (scale) during training, at least not too frequently.
# Changing scale every step, it is effectively like changing data format (e8m7, e5m10, e3m4, et.al) every step,
# which will easily affect convergence.
# https://github.com/NVIDIA/TensorRT/blob/main/tools/pytorch-quantization/examples/finetune_quant_resnet50.ipynb
def export_onnx(model, onnx_filename, batch_onnx):
model.eval()
quant_nn.TensorQuantizer.use_fb_fake_quant = True # We have to shift to pytorch's fake quant ops before exporting the model to ONNX
opset_version = 13
# Export ONNX for multiple batch sizes
print("Creating ONNX file: " + onnx_filename)
dummy_input = torch.randn(batch_onnx, 3, 224, 224, device='cuda') #TODO: switch input dims by model
torch.onnx.export(model, dummy_input, onnx_filename, verbose=False, opset_version=opset_version, enable_onnx_checker=False, do_constant_folding=True)
return True
|
import unittest
from rl.episode import Episode
class EpisodeTest(unittest.TestCase):
def test_validation_none_rejected(self):
with self.assertRaises(AssertionError):
Episode(experience_tuples=None)
def test_validation_input_len_greater_equal_one(self):
expected_regex = "Input 'experience_tuples' should have at least one " \
"element."
with self.assertRaisesRegex(AssertionError, expected_regex):
Episode(experience_tuples=[])
def test_validation_input_are_not_experience_tuples(self):
expected_regex = "Input 'experience_tuples' should contain only " \
"ExperienceTuple, but contains: '1'."
with self.assertRaisesRegex(AssertionError, expected_regex):
Episode(experience_tuples=[1, 2, 3])
|
"""
==== joystick input ====
x = self.JoystickX
y = self.JoystickY
forward = self.A
jump = self.RightBumper
use_item = self.C_left
"""
from inputs import get_gamepad
import mss, time, math, threading
def _monitor_controller():
while True:
events = get_gamepad()
for event in events:
print(event.code, event.state)
if __name__ == '__main__':
_monitor_controller() |
import argparse
import time
import logging
from urllib.parse import quote
import numpy as np
from bs4 import BeautifulSoup
from src.db import DB
from src.http import req
from src.model import tag_model, book_model
log = logging.getLogger(__name__)
db = DB()
def _get_book_links_from_tag_by_default(tag_data):
_get_book_links_from_tag(tag_data, "t")
def _get_book_links_from_tag_by_publish_date(tag_data):
_get_book_links_from_tag(tag_data, "r")
def _get_book_links_from_tag_by_rate(tag_data):
_get_book_links_from_tag(tag_data, "s")
def _get_book_links_from_tag(tag_data, type_value):
tag = tag_data['name']
page = tag_data[f'current_page_{type_value}']
page_size = 20
base_url = f"https://book.douban.com/tag/{quote(tag)}"
attempts = 0
max_attempts = 3
while(1):
log.info(f"attempting on page {page} for tag {tag}")
time.sleep(np.random.rand()*5)
url = f"{base_url}?start={page * page_size}&type={type_value.upper()}"
source = None
try:
source, _ = req(url)
except Exception as err:
log.error(err)
attempts += 1
if source == None:
if attempts< max_attempts:
log.warn(f"failed to fetch page {page} for tag {tag} {type_value.upper()}, will retry")
continue
else:
log.warn(f"failed to fetch page {page} for tag {tag} {type_value.upper()}, exhausted and abort")
break
soup = BeautifulSoup(source, "html.parser")
book_list = soup.select("ul.subject-list > .subject-item")
if book_list == None and attempts < max_attempts:
log.warn(f"no books on page {page}, will retry")
continue
elif book_list == None or len(book_list) <= 1:
log.warn(f"no books on page {page}, exhausted and abort")
try:
tag_data[f'current_page_{type_value}'] = page
tag_data[f'exhausted_{type_value}'] = True
db.update_tags([tag_model(tag_data)])
except Exception as err:
db.rollback()
log.error(f"failed to update tag {tag} {type_value.upper()} to exhausted")
log.error(err)
break
book_urls = list(map(lambda book_el: book_el.select('h2 > a')[0].get('href'), book_list))
book_data = list(map(lambda link: book_model({ 'origin_url': link }), book_urls))
try:
db.insert_books(book_data)
log.info(f"saved {len(book_data)} books for tag {tag} {type_value.upper()} on page {page}")
except Exception as err:
db.rollback()
log.error(f"failed to save book links for tag {tag} {type_value.upper()} on page {page}")
log.error(err)
page += 1
try:
tag_data[f'current_page_{type_value}'] = page
db.update_tags([tag_model(tag_data)])
except Exception as err:
db.rollback()
log.error(f"failed to update tag {tag} {type_value.upper()}")
log.error(err)
def _start():
tags = db.get_tags()
log.info(f"read {len(tags)} tags")
for tag in tags:
log.info(f"getting links from tag {tag['name']} by default sorting...")
_get_book_links_from_tag_by_default(tag)
log.info(f"getting links from tag {tag['name']} by publish date sorting...")
_get_book_links_from_tag_by_publish_date(tag)
log.info(f"getting links from tag {tag['name']} by rate sorting...")
_get_book_links_from_tag_by_rate(tag)
log.info("all tags exhausted")
def main(raw_args=None):
parser = argparse.ArgumentParser(
description="Crawl book links from douban.com from the given tags. You can generate the tags with 'get_tags' script."
)
args = parser.parse_args(raw_args)
_start()
if __name__ == "__main__":
main()
|
##########################################
###### This file was autogenerated. ######
######### DO NOT EDIT this file. #########
##########################################
### file to edit: dev_nb/imflash217__00_exports.ipynb ####
TEST = "test" |
from soluzione_it2.cliente import Cliente
from soluzione_it2.banca import Banca
from soluzione_it2.conto import Conto
# CODICE DI TEST. Rispondi alle domande scritte nei commenti #
cliente1 = Cliente('Davide', '3924663077')
cliente2 = Cliente('Simona', '3335688985')
cliente3 = Cliente('Marco', '3335688285')
banca_san_paolo = Banca('Banca San Paolo')
account = Conto('00001',cliente1)
# Per questi print mi aspetto la stampa nel metodo __repr__
print(account)
print(banca_san_paolo)
print(cliente1)
# RISPONDI ALLE DOMANDE SCRITTE NEI COMMENTI *
#1. Commenta il metodo con il decorator @property nella classe Cliente per l'attributo nome_cliente.
# OUTPUT PREVISTO:
print( cliente1.nome_cliente )
#2. Decommenta il codice del punto #1. Ora cambia il nome del metodo con il decorator @property
# per l'attributo nome_cliente
# OUTPUT PREVISTO:
print(account.cliente.nome_cliente)
#3. Commenta il metodo con decorator setter per l'attributo nome_cliente nella classe Ciente.
# OUTPUT PREVISTO:
cliente1.nome_cliente = 'Giovanni'
print(account.cliente.nome_cliente) |
# Deleting tuples
my_tuple = ('p', 'r', 'o', 'g', 'r', 'a', 'm', 'i', 'z')
# can't delete items
# TypeError: 'tuple' object doesn't support item deletion
# del my_tuple[3]
# Must delete an entire tuple
del my_tuple
# NameError: name 'my_tuple' is not defined
print(my_tuple)
|
# Generated by Django 2.0.5 on 2018-09-23 06:37
from django.db import migrations, models
import jumpstart.models
import jumpstart.validators
class Migration(migrations.Migration):
dependencies = [
('jumpstart', '0006_auto_20180922_0031'),
]
operations = [
migrations.AddField(
model_name='group',
name='coding_challenge_score',
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='group',
name='mitre_challenge_score',
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='group',
name='stags_quiz_score',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='group',
name='charity_shop_challenge_photo',
field=models.ImageField(blank=True, null=True, upload_to=jumpstart.models.charity_shop_challenge_photo_file_name, validators=[jumpstart.validators.validate_photo_file_extension], verbose_name='Charity Shop Challenge photo'),
),
]
|
__author__ = "Jerry Overton"
__copyright__ = "Copyright (C) 2022 appliedAIstudio"
__version__ = "0.1"
from highcliff_sdk.laundry.laundry import LaundryScheduler, LaundryServiceAuthorization, \
LaundryService, ConfirmLaundryMaintained
|
#! /usr/bin/env python
import argparse
import gzip
import httplib
import urllib
import sys
class AppError(Exception):
pass
class CompileError(Exception):
pass
COMPILATION_LEVEL = {
'whitespace': 'WHITESPACE_ONLY',
'simple': 'SIMPLE_OPTIMIZATIONS',
'advanced': 'ADVANCED_OPTIMIZATIONS',
}
OUTPUT_INFO = {
'code': 'compiled_code',
'warn': 'warnings',
'errors': 'errors',
'stats': 'statistics',
}
LANGUAGE = {
'ecma3': 'ECMASCRIPT3',
'ecma5': 'ECMASCRIPT5',
'ecma6': 'ECMASCRIPT6',
}
def entrypoint():
try:
args = parse_command_line()
_process_input(args, args.input)
except KeyboardInterrupt:
sys.exit(0)
except CompileError as ex:
sys.stderr.write(str(ex))
sys.exit(1)
except AppError as ex:
sys.stderr.write('{}: {}\n'.format(ex.__class__.__name__, ex))
sys.stderr.flush()
sys.exit(1)
def _process_input(args, files):
# read input file
tmp = []
for f in files:
for line in f:
# ignore lines starting with ;;;
if not line.strip().startswith(';;;'):
tmp.append(line)
js_code = ''.join(tmp)
# gzip type is text for Closure Compiler service
if args.output_format == 'gzip':
output_format = 'text'
else:
output_format = args.output_format
# query closure compiler service
js_output = _query(
js_code,
LANGUAGE[args.language],
output_format,
OUTPUT_INFO[args.output_info],
COMPILATION_LEVEL[args.comp_level],
args.pretty,
)
if args.output_info in ('errors', 'warnings'):
if len(js_output.strip()) > 0:
raise CompileError(js_output)
else:
return
# if compiled code is empty..
if args.output_info == 'code' and len(js_output.strip()) == 0:
# re-run querying for errors
errors = _query(
js_code,
LANGUAGE[args.language],
'text',
'errors',
COMPILATION_LEVEL[args.comp_level]
)
raise CompileError(errors)
# compress gzip output
if args.output_format == 'gzip':
with gzip.open('output.js.gz', 'wb') as f:
f.write(js_output)
print 'Wrote output.js.gz'
else:
sys.stdout.write(js_output)
def _query(js_code, lang, output_format, output_info, comp_level, pretty=False):
# build POST params from args
params = [
('js_code', js_code),
('language', lang),
('output_format', output_format),
('output_info', output_info),
('compilation_level', comp_level),
]
if pretty:
params.append(('formatting', 'pretty_print'))
# connect to Closure service
conn = httplib.HTTPConnection('closure-compiler.appspot.com')
conn.request(
'POST',
'/compile',
urllib.urlencode(params),
{'Content-type': 'application/x-www-form-urlencoded'}
)
response = conn.getresponse()
js_output = response.read()
conn.close()
return js_output
def parse_command_line():
parser = argparse.ArgumentParser(
description="CLI for Google's Closure Compiler service"
)
parser.add_argument(
'input', type=argparse.FileType('r'), nargs='+',
help='File path or URL containing JS to process, also accepts STDIN')
parser.add_argument(
'-c', '--comp-level', choices=COMPILATION_LEVEL.keys(), default='simple',
help='Compilation level: whitespace, simple or advanced. Defaults to simple.')
parser.add_argument(
'-o', '--output-info', choices=OUTPUT_INFO.keys(), default='code',
help='Output info: code, warnings, errors or statistics.')
parser.add_argument(
'-f', '--output-format', choices=['text','gzip','xml','json'], default='text',
help='Output format: text, gzip, XML or JSON. Defaults to text.')
parser.add_argument(
'-l', '--language', choices=LANGUAGE.keys(), default='ecma5',
help='Language: ECMAScript language version to target')
parser.add_argument(
'-p', '--pretty', action='store_true',
help='Pretty formatting for output javascript')
# TODO output_file_name
return parser.parse_args()
if __name__ == '__main__':
entrypoint()
|
import pygama.lh5 as lh5
def test_array():
a = lh5.Array(shape=(1), dtype=float)
assert a.dataype_name() == 'array'
|
#!/usr/bin/env python
from __future__ import print_function
import sys, imp, collections, itertools
# verify that same node isn't added to index multiple times
def _wrap_extend(extend):
def wrapped(self, files):
# files is a generator and not all items may be used before the program
# completes, but all items must be unique, so check all of them here
seen = self._checks_seen = getattr(self, "_checks_seen", {})
files, check = itertools.tee(files)
for node in check:
assert id(node) not in seen
seen[id(node)] = node
extend(self, files)
return wrapped
sys.argv.pop(0)
dedup = imp.load_source("dedup", sys.argv[0])
dedup.Index.extend = _wrap_extend(dedup.Index.extend)
dedup.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This is a sample script.
"""
__author__ = "Lamjed Ben Jabeur"
__copyright__ = "Copyright 2019, Airbus"
__version__ = "0.9.0"
__maintainer__ = "Lamjed Ben Jabeur"
__email__ = "lamjed.la.ben-jabeur@airbus.com"
__status__ = "Prototype"
import argparse
import os
import sys
import time
from tqdm import tqdm
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from templateproject.utils import setup_logger, get_configuration
def get_arguments():
"""
Parse command line arguments
:return: arguments
"""
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input', help="The input CSV file", type=str, required=True)
parser.add_argument('-o', '--output', help="The output CSV file", type=str, required=True)
parser.add_argument('-n', '--nrows', default=5, help="The number of rows", type=int, required=False)
args = parser.parse_args()
return args.input, args.output, args.nrows
if __name__ == '__main__':
start = time.time()
# SECTION: Setup logger
logger = setup_logger()
logger.info('START SCRIPT %s' % sys.argv[0])
# SECTION:Config parameters
raw_path = get_configuration('raw', 'path')
processed_path = get_configuration('processed', 'path')
logger.info('Configuration: row path=%s' % raw_path)
logger.info('Configuration: processed path=%s' % processed_path)
# SECTION: Argument parameters
input_file, output_file, nrows = get_arguments()
logger.info('Argument: input=%s' % input_file)
logger.info('Argument: output=%s' % output_file)
logger.info('Argument: nrows=%s' % nrows)
logger.info('This is list of required packages')
# SECTION: sample loop
logger.info('Start processing')
items = [i for i in range(0, 100)]
with tqdm(total=len(items)) as pbar:
for item in items:
time.sleep(0.1)
pbar.update(1)
# End script
end = time.time()
logger.info('END SCRIPT')
logger.info('EXECUTION TIME %0.3f' % (end - start))
|
import cv2 as cv
import numpy as np
haar_cascade = cv.CascadeClassifier('Section3-Face/haar_face.xml')
people = ['Ben Afflek', 'Elton John', 'Jerry Seinfield', 'Madonna', 'Mindy Kaling']
# features = np.load('Section3-Face/features.npy', allow_pickle=True)
# labels = np.load('Section3-Face/labels.npy', allow_pickle=True)
face_recognizer = cv.face.LBPHFaceRecognizer_create()
face_recognizer.read('Section3-Face/face_trained.yml')
# img = cv.imread('Resources/Faces/val/jerry_seinfeld/3.jpg')
# img = cv.imread('Resources/Faces/val/madonna/1.jpg') #failed
# img = cv.imread('Resources/Faces/val/madonna/4.jpg')
# img = cv.imread('Resources/Faces/val/elton_john/5.jpg') #failed
img = cv.imread('Resources/Faces/val/elton_john/3.jpg')
# img = cv.imread('Resources/Faces/val/ben_afflek/5.jpg')
# img = cv.imread('Resources/Faces/val/mindy_kaling/4.jpg')
gray = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
cv.imshow('Person', gray)
# Detect the face in img
faces_rect = haar_cascade.detectMultiScale(gray, 1.1, 4)
for (x,y,w,h) in faces_rect:
faces_roi = gray[y:y+h,x:x+w]
label, confidence = face_recognizer.predict(faces_roi)
print(f'Label = {people[label]} with a confidence of {confidence}')
cv.putText(img, str(people[label]), (20,20), cv.FONT_HERSHEY_COMPLEX, 1.0, (0,255,0), thickness=1)
cv.rectangle(img, (x,y), (x+w,y+h), (0,255,0), thickness=2)
cv.imshow(str(people[label]), img)
cv.waitKey(0) |
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from wx.lib.agw import flatnotebook as fnb
except ImportError:
from wx.lib import flatnotebook as fnb
from robotide.publish import RideNotebookTabChanging, RideNotebookTabChanged
class NoteBook(fnb.FlatNotebook):
def __init__(self, parent, app):
self._app = app
style = fnb.FNB_NODRAG|fnb.FNB_HIDE_ON_SINGLE_TAB|fnb.FNB_VC8
fnb.FlatNotebook.__init__(self, parent, style=style)
self.Bind(fnb.EVT_FLATNOTEBOOK_PAGE_CLOSING, self.OnTabClosing)
self.Bind(fnb.EVT_FLATNOTEBOOK_PAGE_CHANGING, self.OnTabChanging)
self.Bind(fnb.EVT_FLATNOTEBOOK_PAGE_CHANGED, self.OnTabChanged)
self._tab_closing = False
self._uncloseable = []
def add_tab(self, tab, title, allow_closing=True):
if not allow_closing:
self._uncloseable.append(tab)
self.AddPage(tab, title.strip())
def show_tab(self, tab):
"""Shows the notebook page that contains the given tab."""
if not self.tab_is_visible(tab):
page = self.GetPageIndex(tab)
if page >= 0:
self.SetSelection(page)
def delete_tab(self, tab):
if tab in self._uncloseable:
self._uncloseable.remove(tab)
page = self.GetPageIndex(tab)
self.DeletePage(page)
def rename_tab(self, tab, new_name):
self.SetPageText(self.GetPageIndex(tab), new_name)
def allow_closing(self, tab):
if tab in self._uncloseable:
self._uncloseable.remove(tab)
def disallow_closing(self, tab):
if tab not in self._uncloseable:
self._uncloseable.append(tab)
def tab_is_visible(self, tab):
return tab == self.GetCurrentPage()
@property
def current_page_title(self):
return self.GetPageText(self.GetSelection())
def OnTabClosing(self, event):
if self.GetPage(event.GetSelection()) in self._uncloseable:
event.Veto()
return
self._tab_closing = True
def OnTabChanging(self, event):
if not self._tab_changed():
return
oldtitle = self.GetPageText(event.GetOldSelection())
newindex = event.GetSelection()
if newindex <= self.GetPageCount() - 1:
newtitle = self.GetPageText(event.GetSelection())
self.GetPage(event.GetSelection()).SetFocus()
else:
newtitle = None
RideNotebookTabChanging(oldtab=oldtitle, newtab=newtitle).publish()
def OnTabChanged(self, event):
if not self._tab_changed():
self._tab_closing = False
return
RideNotebookTabChanged().publish()
def _tab_changed(self):
"""Change event is send even when no tab available or tab is closed"""
if not self.GetPageCount() or self._tab_closing:
return False
return True
|
from vinge.filters import id, logline, tag
from parser import *
from semex import SensorSemex, ConcatSemex, DisjunctSemex, StarSemex, TrivialSemex
def _base_absyn(graph, node):
bt = node.base_type
if bt == BaseType.ANYTHING:
return TrivialSemex(graph.number_of_nodes())
else:
if bt == BaseType.LOGLINE:
f = logline
elif bt == BaseType.TAG:
f = tag
elif bt == BaseType.ID:
f = id
return SensorSemex(graph.number_of_nodes(),
f, graph)
def _concat_absyn(graph, transition, transition_op, node):
regex1 = ast_to_semex(graph, transition, transition_op, node.regex1)
regex2 = ast_to_semex(graph, transition, transition_op, node.regex2)
return ConcatSemex(transition, transition_op, regex1, regex2)
def _disjunct_absyn(graph, transition, transition_op, node):
regex1 = ast_to_semex(graph, transition, transition_op, node.regex1)
regex2 = ast_to_semex(graph, transition, transition_op, node.regex2)
return DisjunctSemex(regex1, regex2)
def _star_absyn(graph, transition, transition_op, node):
rest = ast_to_semex(graph, transition, transition_op, node.regex)
# TODO(trevor) length, what to do with that?
return StarSemex(transition, transition_op,
graph.number_of_nodes(),
rest, length=3.0)
def ast_to_semex(graph, transition, transition_op, ast):
"""
Converts the given regex abstract syntax tree into an actual regex to
be used with the given graph.
Args:
graph (networkx.graph)
transition (scipy.sparse.base.spmatrix) Adjacency matrix of graph
transition_op (scipy.sparse.linalg.LinearOperator) LinOp of transition
ast (regex_parser.RegExAbsSyn)
Returns:
(semex.Semex)
"""
# TODO(trevor) should redo the regex stuff to not have to pass around these
# three args (graph, transition, transition_op).
if isinstance(ast, BaseAbsyn):
return _base_absyn(graph, ast)
elif isinstance(ast, ConcatAbsyn):
return _concat_absyn(graph, transition, transition_op, ast)
elif isinstance(ast, DisjunctAbsyn):
return _disjunct_absyn(graph, transition, transition_op, ast)
elif isinstance(ast, StarAbsyn):
return _star_absyn(graph, transition, transition_op, ast)
else:
raise ValueError("Programmer error: This should never happen.")
|
from django.contrib import admin
from .models import School
# Register your models here.
class SchoolAdmin(admin.ModelAdmin):
school_display = ('name')
search_fields = ['name']
admin.site.register(School, SchoolAdmin) |
# module name must be the same as the directory name in the build file
class module_name:
def __init__(self, config):
'''
reference to sources\quick_cmake\config.py for config instance.
config contains following values:
output: output for the module, config.Output.BINARY, STATIC_LIB, DYNAMIC_LIB
configuration: build configuration. config.Configuration.DEBUG, RELEASE
platform: config.Platform.WIN32, X64, ARM, ARM64
system: config.System.WINDOWS, LINUX
'''
# required, output can be the BINARY, STATIC_LIB, DYNAMIC_LIB
self.output = config.Output.BINARY
# optional, dependencies contains the dependency of this module
self.dependencies = ['other_module']
# optional, define the third parties, must located at the folder third_parties/
self.third_parties = ['third_party']
# optional, define the system libs here, contains the system lib.
self.system_libs = ['socket']
# optional, only set it if the output value is BINARY and enable the unit test generation.
self.main_file = 'main.cpp'
# define the pre build event
def pre_build(self):
pass
# define the post build event
def post_build(self):
pass
|
from bc4py import __chain_version__
from bc4py.config import C, V, BlockChainError
from bc4py.chain.tx import TX
from bc4py.database.account import insert_movelog, read_address2userid
from bc4py.user import Balance, Accounting
from bc4py.user.txcreation.utils import *
from bc4py_extension import PyAddress
from time import time
async def send_many(sender,
send_pairs,
cur,
fee_coin_id=0,
gas_price=None,
msg_type=C.MSG_NONE,
msg_body=b'',
subtract_fee_from_amount=False,
retention=10800):
assert isinstance(sender, int), 'Sender is user id'
assert 0 < len(send_pairs), 'Empty send_pairs'
# send_pairs check
movements = Accounting()
send_coins = Balance()
outputs = list()
coins = Balance()
for address, coin_id, amount in send_pairs:
assert isinstance(address, PyAddress)
assert isinstance(coin_id, int) and isinstance(amount, int), 'CoinID, amount is int'
coins[coin_id] += amount
outputs.append((address, coin_id, amount))
user = await read_address2userid(address=address, cur=cur)
if user is not None:
movements[user][coin_id] += amount # send to myself
movements[sender] -= coins
# movements[C.ANT_OUTSIDE] += coins
# tx
now = int(time() - V.BLOCK_GENESIS_TIME)
tx = TX.from_dict(
tx={
'version': __chain_version__,
'type': C.TX_TRANSFER,
'time': now,
'deadline': now + retention,
'inputs': list(),
'outputs': outputs,
'gas_price': gas_price or V.COIN_MINIMUM_PRICE,
'gas_amount': 1,
'message_type': msg_type,
'message': msg_body
})
tx.gas_amount = tx.size + C.SIGNATURE_GAS
# fill unspents
input_address = await fill_inputs_outputs(tx=tx, cur=cur, fee_coin_id=fee_coin_id)
# subtract fee from amount
if subtract_fee_from_amount:
if fee_coin_id != 0:
raise BlockChainError('subtract_fee option require fee_coin_id=0')
subtract_fee = subtract_fee_from_user_balance(tx)
# fee returns to sender's balance
movements[sender][0] += subtract_fee
send_coins[0] -= subtract_fee
fee_coins = Balance(coin_id=fee_coin_id, amount=tx.gas_price * tx.gas_amount)
# check enough balance account have
for address, coin_id, amount in send_pairs:
send_coins[coin_id] += amount
await check_enough_amount(sender=sender, send_coins=send_coins, fee_coins=fee_coins, cur=cur)
# replace dummy address
await replace_redeem_dummy_address(tx, cur)
# setup signature
tx.serialize()
await add_sign_by_address(tx, input_address, cur)
movements[sender] -= fee_coins
# movements[C.ANT_OUTSIDE] += fee_coins
await insert_movelog(movements, cur, tx.type, tx.time, tx.hash)
return tx
async def send_from(
sender,
address,
coins,
cur,
fee_coin_id=0,
gas_price=None,
msg_type=C.MSG_NONE,
msg_body=b'',
subtract_fee_amount=False,
retention=10800):
assert isinstance(coins, Balance)
assert isinstance(address, PyAddress)
send_pairs = list()
for coin_id, amount in coins:
send_pairs.append((address, coin_id, amount))
return await send_many(sender=sender, send_pairs=send_pairs, cur=cur, fee_coin_id=fee_coin_id,
gas_price=gas_price, msg_type=msg_type, msg_body=msg_body,
subtract_fee_from_amount=subtract_fee_amount, retention=retention)
def subtract_fee_from_user_balance(tx: TX):
"""subtract fee from user's sending outputs"""
subtract_fee = tx.gas_amount * tx.gas_price
f_subtracted = False
f_added = False
for index, (address, coin_id, amount) in enumerate(tx.outputs):
if coin_id != 0:
continue
elif amount < subtract_fee:
continue
elif not f_added and address == DUMMY_REDEEM_ADDRESS:
# add used fee to redeem output
tx.outputs[index] = (address, coin_id, amount + subtract_fee)
f_added = True
elif not f_subtracted and address != DUMMY_REDEEM_ADDRESS:
# subtract used fee from sending output
tx.outputs[index] = (address, coin_id, amount - subtract_fee)
f_subtracted = True
else:
continue
# check
if f_subtracted is False or f_added is False:
raise BlockChainError('failed to subtract fee sub={} add={} fee={}'
.format(f_subtracted, f_added, subtract_fee))
return subtract_fee
__all__ = [
"send_from",
"send_many",
]
|
# The following codes are adapted from ab_analysis.py.
import sys
import pandas as pd
import numpy as np
from scipy import stats
from statsmodels.stats.multicomp import pairwise_tukeyhsd
def main():
searchdata_file = sys.argv[1]
data = pd.read_csv(searchdata_file)
# Anova and F test.
# The following codes are adapted from https://towardsdatascience.com/anova-tukey-test-in-python-b3082b6e6bda .
print("-----------------------ANOVA and F Test-----------------------------------------")
fvalue, pvalue = stats.f_oneway(data['qs1'],
data['qs2'],
data['qs3'],
data['qs4'],
data['qs5'],
data['merge1'],
data['partition_sort'])
print(f"Results of ANOVA test:\n The F-statistic is: {fvalue}\n The p-value is: {pvalue}")
if(pvalue < 0.05):
print("Given the p-value is less than significance level, we have strong evidence against the null hypothesis of ANOVA test. Hence,there is difference in means.")
else:
print("Given the p-value is not less than significance level, we don't have strong evidence against the null hypothesis of ANOVA test. Hence,there is no difference in means.")
print("--------------------------------------------------------------------------------")
print("-----------------------Tukey's HSD Test-----------------------------------------")
# Tukey's HSD test.
# The following code is adapted from https://pandas.pydata.org/docs/reference/api/pandas.melt.html .
horizontal_data = pd.melt(data)
# The following codes are adapted from https://towardsdatascience.com/anova-tukey-test-in-python-b3082b6e6bda .
m_comp = pairwise_tukeyhsd(endog = horizontal_data['value'], groups = horizontal_data['variable'], alpha = 0.05)
print(m_comp)
print("--------------------------------------------------------------------------------")
print("------------------------------Ranking-------------------------------------------")
mean_running = data.mean()
row = ['qs1', 'qs2', 'qs3', 'qs4', 'qs5', 'merge1', 'partition_sort']
col = ['Avaerage Running Time']
mean_table = pd.DataFrame(mean_running, index = row, columns = col)
mean_table['Rank'] = mean_table['Avaerage Running Time'].rank(ascending = True).astype('int')
mean_table = mean_table.sort_values(['Avaerage Running Time'],ascending = True)
print(mean_table)
print("--------------------------------------------------------------------------------")
if __name__ == '__main__':
main()
|
"""
Tests for the MachColl sim client module
"""
import pytest
from bluebird.metrics import MetricsProviders
from tests.unit.sim_client.common.imports_test import sim_client_instantiation
_MODULE_NAME = "MachColl"
mc_aircraft_controls = pytest.importorskip(
"bluebird.sim_client.machcoll.machcoll_aircraft_controls"
)
def test_sim_client_instantiation():
"""Tests that the SimClient can be instantiated"""
class FakeProvider:
def __str__(self):
return "MachColl"
@property
def metrics(self):
return []
sim_client_instantiation(
_MODULE_NAME, MetricsProviders([FakeProvider()]), extra_methods={"mc_client"}
)
|
# Generated by Django 3.2.8 on 2022-01-07 00:34
import datetime
from django.db import migrations, models
import django.db.models.deletion
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('amcm', '0042_auto_20220106_1522'),
]
operations = [
migrations.AlterField(
model_name='condicionesevento',
name='tipoCondicion',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='amcm.tipocondicion', verbose_name='Límite'),
),
migrations.AlterField(
model_name='credito',
name='fecha_pago',
field=models.DateField(blank=True, default=datetime.datetime(2022, 1, 7, 0, 33, 59, 662849, tzinfo=utc), null=True, verbose_name='Fecha de pago'),
),
migrations.AlterField(
model_name='credito',
name='fecha_registro',
field=models.DateField(default=datetime.datetime(2022, 1, 7, 0, 33, 59, 662829, tzinfo=utc), verbose_name='Fecha de registro'),
),
migrations.AlterField(
model_name='cuentaspago',
name='fecha_registro',
field=models.DateField(default=datetime.datetime(2022, 1, 7, 0, 33, 59, 645449, tzinfo=utc), verbose_name='Fecha de Registro'),
),
migrations.AlterField(
model_name='elegible',
name='fecha_registro',
field=models.DateField(default=datetime.datetime(2022, 1, 7, 0, 33, 59, 645925, tzinfo=utc), verbose_name='Fecha de registro'),
),
migrations.AlterField(
model_name='pago',
name='fechaPago',
field=models.DateField(blank=True, default=datetime.datetime(2022, 1, 7, 0, 33, 59, 642554, tzinfo=utc), null=True, verbose_name='Fecha del Pago'),
),
migrations.AlterField(
model_name='pago',
name='fechaRegistro',
field=models.DateField(default=datetime.datetime(2022, 1, 7, 0, 33, 59, 642586, tzinfo=utc), verbose_name='Fecha de Registro'),
),
migrations.AlterField(
model_name='recibo',
name='fecha_registro',
field=models.DateField(default=datetime.datetime(2022, 1, 7, 0, 33, 59, 661866, tzinfo=utc), verbose_name='Fecha de registro'),
),
]
|
#!/usr/bin/env python
import vtk
def main():
colors = vtk.vtkNamedColors()
#Create a sphere
earthSource = vtk.vtkEarthSource()
earthSource.OutlineOff()
earthSource.Update()
#Create a mapper and actor
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(earthSource.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(mapper)
#Create a renderer, render window, and interactor
renderer = vtk.vtkRenderer()
renderWindow = vtk.vtkRenderWindow()
renderWindow.AddRenderer(renderer)
renderWindowInteractor = vtk.vtkRenderWindowInteractor()
renderWindowInteractor.SetRenderWindow(renderWindow)
#Add the actor to the scene
renderer.AddActor(actor)
renderer.SetBackground(colors.GetColor3d("green")) # Background color green
#Render and interact
renderWindow.Render()
# screenshot code:
w2if = vtk.vtkWindowToImageFilter()
w2if.SetInput(renderWindow)
w2if.Update()
writer = vtk.vtkPNGWriter()
writer.SetFileName("TestEarthSource.png")
writer.SetInputConnection(w2if.GetOutputPort())
writer.Write()
# begin interaction
renderWindowInteractor.Start()
if __name__ == '__main__':
main()
|
"""Filter backends for the ``know_me`` module.
"""
from django.db.models import Q
from django.shortcuts import get_object_or_404
from rest_framework import filters
from know_me import models
class KMUserAccessFilterBackend(filters.BaseFilterBackend):
"""
Filter for listing items owned by a Know Me user.
Access to items is only granted if one of the following is true:
1. The user ID provided is that of the requesting user
2. There is an accessor granting the requesting user access to the
user whose ID is provided in the request.
"""
def filter_queryset(self, request, queryset, view):
"""
Filter items for a list action.
Args:
request:
The request being made.
queryset:
A queryset containing the objects to filter.
view:
The view being accessed.
Returns:
The provided queryset filtered to only include items owned
by the user specified in the provided views arguments.
"""
# Requesting user has access to KMUser
query = Q(km_user_accessor__user_with_access=request.user)
query &= Q(km_user_accessor__is_accepted=True)
# Requesting user is KMUser
query |= Q(user=request.user)
# In rare cases, this query will return duplicate rows. See #343
# for details.
km_user_query = models.KMUser.objects.filter(query).distinct()
km_user = get_object_or_404(km_user_query, pk=view.kwargs.get("pk"))
return queryset.filter(km_user=km_user)
|
import cv2 as cv
import numpy as np
def connected_component_demo(src):
src = cv.GaussianBlur(src, (3, 3), 10)
# 中值滤波去除小噪点
src = cv.medianBlur(src, 5)
gray = cv.cvtColor(src, cv.COLOR_BGR2GRAY)
# 图像二值化
thr, binary = cv.threshold(gray, 150, 255, cv.THRESH_BINARY)
# 进行联通组件查找
# 给每一个像素都标上相应的label
# 第一个参数必须为二值图像,黑色背景, 因为这里黑色背景的label值为0
# 第三个参数表示在八连通域查找
# 第四个参数表示输出labels图像的数据类型,默认为带符号整形CV_32S
output = cv.connectedComponents(binary, connectivity=8, ltype=cv.CV_32S)
num_labels = output[0]
labels = output[1]
colors = []
for i in range(0, num_labels):
b = np.random.randint(0, 256)
g = np.random.randint(0, 256)
r = np.random.randint(0, 256)
colors.append((b, g, r))
# index为0的是背景色彩,黑色
colors[0] = (0, 0, 0)
# 给加上标记图像进行重新上色
dst = np.zeros(src.shape, dtype=src.dtype)
height, width = src.shape[:2]
for i in range(height):
for j in range(width):
dst[i, j] = colors[labels[i, j]]
cv.imshow("dst", dst)
# 联通组件数量要减去黑色背景
print("total connected component: " + str(num_labels - 1))
if __name__ == "__main__":
src = cv.imread("D:/Images/normal1.jpg")
cv.namedWindow("src", cv.WINDOW_AUTOSIZE)
cv.imshow("src", src)
connected_component_demo(src)
cv.waitKey(0)
cv.destroyAllWindows()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.