content
stringlengths 5
1.05M
|
|---|
#! /usr/bin/python3
import json
import os
from .color import Color
from .transport import Transport
class CMSync:
"""Sync directories and files based on the specifications of the json file
"""
def __init__(self, config_file='sync.json'):
"""
Loads the JSON file and executes the sync based on the manifest definition.
Args:
config_file (str, optional): Path to the JSON file containing the
manifest definition.
Examples:
JSON manifest file:
{
"manifest": [
{
"src": "/dir/to/copy",
"dest": "/tmp/"
},
{
"src": "/dir/file.txt",
"dest": "/tmp/xyz/newfile.txt"
}
]
}
"""
self._load_config(config_file)
self._now()
def _load_config(self, config_file):
"""Loads the configuration file
Args:
config_file (str, optional): Path to the JSON file containing the
manifest definition.
"""
if not os.path.isfile(config_file):
print(Color.ERROR + '{} not found.'.format(config_file))
quit()
self._config = json.load(open(config_file))
def _now(self):
"""Starts synchronization"""
for entry in self._config['manifest']:
t = Transport(
entry['src'],
entry['dest']
)
t.send()
if 'run_after' in entry.keys():
print(Color.COMMAND + "Running `{}`".format(entry['run_after']))
os.system(entry['run_after'])
def main():
CMSync()
if __name__ == '__main__':
s = CMSync()
|
import logging
import dateutil.parser
from udata.models import Dataset
from .utils import check_url, UnreachableLinkChecker
log = logging.getLogger(__name__)
class CroquemortLinkChecker(object):
"""Croquemort link checker implementation.
The main interface is the `check` method.
"""
def _format_response(self, response):
status = response.get('final-status-code')
try:
status = int(status)
except ValueError:
return {'check:error': 'Malformed check response'}
else:
check_date = response.get('updated')
if check_date:
check_date = dateutil.parser.parse(response.get('updated'))
result = {
'check:url': response.get('checked-url'),
'check:status': status,
'check:available': status and status >= 200 and status < 400,
'check:date': check_date,
}
for header in [
'content-type', 'content-length', 'content-md5', 'charset',
'content-disposition'
]:
value = str(response.get(header, '') or '')
if len(value) > 0:
try:
result[f"check:headers:{header}"] = int(value)
except ValueError:
result[f"check:headers:{header}"] = value
return result
def check(self, resource):
"""
Parameters
----------
resource : a uData resource instance to be checked
Returns
-------
dict or None
The formatted response from the linkchecker, like so:
{
'check:url': 'https://example.com',
'check:status': 200,
'check:available': True,
'check:date': datetime.datetime(2017, 9, 4, 11, 13, 8, 888288),
'check:headers:content-type': 'text/csv',
'check:headers:content-length': 245436,
'check:headers:content-md5': 'acbd18db4cc2f85cedef654fccc4a4d8',
'check:headers:charset': 'utf-8',
'check:headers:content-disposition': 'inline'
}
Or in case of failure (in udata-croquemort, not croquemort):
{
'check:error': 'Something went terribly wrong.'
}
Or in case of failure in croquemort:
None
"""
log.debug('Checking resource w/ URL %s', resource.url)
dataset = Dataset.objects(resources__id=resource.id).first()
if not dataset:
message = 'No dataset found for resource %s', resource.id
log.error(message)
return {'check:error': message}
try:
# do not check ftp(s) urls
if resource.url and resource.url.lower().startswith('ftp'):
return
check_response = check_url(resource.url, group=dataset.slug)
return self._format_response(check_response)
except UnreachableLinkChecker as e:
log.error('Unreachable croquemort for url %s: %s', resource.url, e)
return
|
"""
If a "function" is in the plt it's a wrapper for something in the GOT.
Make that apparent.
"""
import vivisect
import envi
import envi.archs.i386 as e_i386
import envi.archs.i386.opcode86 as opcode86
def analyze(vw):
"""
Do simple linear disassembly of the .plt section if present.
"""
for sva,ssize,sname,sfname in vw.getSegments():
if sname != ".plt":
continue
nextva = sva + ssize
while sva < nextva:
vw.makeCode(sva)
ltup = vw.getLocation(sva)
sva += ltup[vivisect.L_SIZE]
def analyzeFunction(vw, funcva):
seg = vw.getSegment(funcva)
if seg == None:
return
segva, segsize, segname, segfname = seg
if segname != ".plt":
return
#FIXME check for i386
op = vw.parseOpcode(funcva)
if op.opcode != opcode86.INS_BRANCH:
return
loctup = None
oper0 = op.opers[0]
if isinstance(oper0, e_i386.i386ImmMemOper):
loctup = vw.getLocation(oper0.getOperAddr(op))
elif isinstance(oper0, e_i386.i386RegMemOper):
# FIXME this is i386 elf only!
if oper0.reg != e_i386.REG_EBX:
print "UNKNOWN PLT CALL",hex(funcva)
got = vw.vaByName("%s._GLOBAL_OFFSET_TABLE_" % segfname)
#FIXME this totally sucks
if got == None:
for va,size,name,fname in vw.getSegments():
if name == ".got.plt":
got = va
break
if got != None:
loctup = vw.getLocation(got+oper0.disp)
if loctup == None:
return
if loctup[vivisect.L_LTYPE] != vivisect.LOC_IMPORT:
return
tinfo = loctup[vivisect.L_TINFO]
lname,fname = tinfo.split(".")
#vw.makeName(funcva, "plt_%s" % fname, filelocal=True)
vw.makeFunctionThunk(funcva, tinfo)
|
#!/usr/bin/env python3
"""
Module containing functions to calculate structural environment profiles of AAs
"""
# TODO Tests to make sure these are producing correct profiles
from collections import defaultdict
import numpy as np
from Bio.SeqUtils import seq1
from Bio.Alphabet.IUPAC import protein as protein_alphabet
AA_INDEX = {aa: i for i, aa in enumerate(protein_alphabet.letters)}
NUM_AAS = len(protein_alphabet.letters)
def k_nearest_residues(residues, k=10, distance_matrix=None):
"""
Yields chemical environments parameterised by the make up of the k nearest AAs.
Hetero atoms are included so must be dropped separately if desired.
residues: list of residues to consider
k: count the k nearest residues
distance_matrix: numpy matrix of distances between residues, with rows/columns in
that order. Calculated if not supplied
yields: chemical environment profiles (np.array)
"""
if k >= len(residues):
raise ValueError('k >= number of residues')
if distance_matrix is None:
distance_matrix = residue_distance_matrix(residues)
for res_index in range(len(residues)):
dists = distance_matrix[res_index,]
non_self = np.ones_like(dists, dtype=bool)
non_self[res_index] = False
nearest_k = [residues[i] for i in np.argpartition(dists[non_self], k)[:k]]
counts = defaultdict(lambda: 0)
for i in nearest_k:
counts[seq1(i.get_resname())] += 1
yield np.array([counts[aa] for aa in protein_alphabet.letters])
# 10A selected as default because used in Bagley & Altman 1995
def within_distance(residues, max_dist=10, distance_matrix=None):
"""
Yeilds chemical environments parameterised as the residues within max_dist
angstroms. Hetero atoms are included so must be dropped separately if desired.
residues: list of residues to consider
max_dist: maximum distance to count within (in Angstroms)
distance_matrix: numpy matrix of distances between residues, with rows/columns in
that order. Calculated if not supplied
yields: chemical environment profiles (np.array)
"""
if distance_matrix is None:
distance_matrix = residue_distance_matrix(residues)
for res_index in range(len(residues)):
dists = distance_matrix[res_index,]
res_within_dist = [residues[i] for i in np.argwhere(dists < max_dist)[:, 0]
if not i == res_index]
counts = defaultdict(lambda: 0)
for i in res_within_dist:
counts[seq1(i.get_resname())] += 1
yield np.array([counts[aa] for aa in protein_alphabet.letters])
def distance_to_nearest(residues, distance_matrix=None):
"""
Yeilds chemical environments parameterised as the distance to the nearest residue
of each type. Hetero atoms are included so must be dropped separately if desired.
residues: list of residues to consider
distance_matrix: numpy matrix of distances between residues, with rows/columns in
that order. Calculated if not supplied
yields: chemical environment profiles (np.array)
"""
if distance_matrix is None:
distance_matrix = residue_distance_matrix(residues)
residue_indices = [np.array([seq1(r.get_resname()) == aa for r in residues]) for
aa in protein_alphabet.letters]
for res_index in range(len(residues)):
dists = distance_matrix[res_index,]
non_self = np.ones_like(dists, dtype=bool)
non_self[res_index] = False
yield np.array([min(dists[aa & non_self]) if any(aa & non_self) else np.inf for
aa in residue_indices])
def residue_distance_matrix(residues, ref_atom='CA'):
"""
Generate a distance matrix from an iterable of Bio.PDB residues.
There is no checking whether the specified atom makes sense (i.e. using C-beta
with Gly will fail)
residues: iterable of Bio.PDB residues to get distances from
ref_atom: atom to measure distances from
returns: distance matrix, rows and columns in order of residues (np.array)
"""
dist = np.zeros((len(residues), len(residues)))
for i, res1 in enumerate(residues):
for j, res2 in enumerate(residues[i + 1:]):
dist[i, j + i + 1] = dist[j + i + 1, i] = res1[ref_atom] - res2[ref_atom]
return dist
|
# Copyright (c) Microsoft Corporation
# Licensed under the MIT License.
"""Manager for causal analysis."""
import pandas as pd
from econml.solutions.causal_analysis import CausalAnalysis
from pathlib import Path
from responsibleai._internal.constants import ManagerNames
from responsibleai._managers.base_manager import BaseManager
from responsibleai.exceptions import (
UserConfigValidationException)
from responsibleai._tools.causal.causal_constants import (
DefaultParams, ModelTypes, ResultAttributes, SerializationAttributes)
from responsibleai._tools.causal.causal_config import CausalConfig
from responsibleai._tools.causal.causal_result import CausalResult
from responsibleai.modelanalysis.constants import ModelTask
class CausalManager(BaseManager):
"""Manager for causal analysis."""
def __init__(self, train, test, target_column, task_type,
categorical_features):
"""Construct a CausalManager for generating causal analyses
from a dataset.
:param train: Dataset on which to compute global causal effects
(#samples x #features).
:type train: pandas.DataFrame
:param test: Dataset on which to compute local causal effects
(#samples x #features).
:type test: pandas.DataFrame
:param target_column: The name of the label column.
:type target_column: str
:param task_type: Task type is either 'classification/regression'
:type task_type: str
:param categorical_features: All categorical feature names.
:type categorical_features: list
"""
self._train = train
self._test = test
self._target_column = target_column
self._task_type = task_type
self._categorical_features = categorical_features
self._results = []
def add(
self,
treatment_features,
heterogeneity_features=None,
nuisance_model=ModelTypes.LINEAR,
heterogeneity_model=ModelTypes.LINEAR,
alpha=DefaultParams.DEFAULT_ALPHA,
upper_bound_on_cat_expansion=DefaultParams.DEFAULT_MAX_CAT_EXPANSION,
treatment_cost=DefaultParams.DEFAULT_TREATMENT_COST,
min_tree_leaf_samples=DefaultParams.DEFAULT_MIN_TREE_LEAF_SAMPLES,
max_tree_depth=DefaultParams.DEFAULT_MAX_TREE_DEPTH,
skip_cat_limit_checks=DefaultParams.DEFAULT_SKIP_CAT_LIMIT_CHECKS,
categories=DefaultParams.DEFAULT_CATEGORIES,
n_jobs=DefaultParams.DEFAULT_N_JOBS,
verbose=DefaultParams.DEFAULT_VERBOSE,
random_state=DefaultParams.DEFAULT_RANDOM_STATE,
):
"""Compute causal insights.
:param treatment_features: Treatment feature names.
:type treatment_features: list
:param heterogeneity_features: Features that mediate the causal effect.
:type heterogeneity_features: list
:param nuisance_model: Model type to use for nuisance estimation.
:type nuisance_model: str
:param heterogeneity_model: Model type to use for
treatment effect heterogeneity.
:type heterogeneity_model: str
:param alpha: Confidence level of confidence intervals.
:type alpha: float
:param upper_bound_on_cat_expansion: Maximum expansion for
categorical features.
:type upper_bound_on_cat_expansion: int
:param treatment_cost: Cost to treat one individual or
per-individual costs as an array.
:type treatment_cost: float or array
:param min_tree_leaf_samples: Minimum number of samples per leaf
in policy tree.
:type min_tree_leaf_samples: int
:param max_tree_depth: Maximum depth of policy tree.
:type max_tree_depth: int
:param skip_cat_limit_checks: By default, categorical features need
to have several instances of each
category in order for a model to be
fit robustly. Setting this to True
will skip these checks.
:type skip_cat_limit_checks: bool
:param categories: 'auto' or list of category values, default 'auto'
What categories to use for the categorical columns.
If 'auto', then the categories will be inferred for all
categorical columns. Otherwise, this argument should have
as many entries as there are categorical columns.
Each entry should be either 'auto' to infer the values for
that column or the list of values for the column.
If explicit values are provided, the first value is treated
as the "control" value for that column against which other
values are compared.
:type categories: str or list
:param n_jobs: Degree of parallelism to use when training models
via joblib.Parallel
:type n_jobs: int
:param verbose: Controls the verbosity when fitting and predicting.
:type verbose: int
:param random_state: Controls the randomness of the estimator.
:type random_state: int or RandomState or None
"""
difference_set = set(treatment_features) - set(self._train.columns)
if len(difference_set) > 0:
message = ("Feature names in treatment_features do "
f"not exist in train data: {list(difference_set)}")
raise UserConfigValidationException(message)
if nuisance_model not in [ModelTypes.AUTOML,
ModelTypes.LINEAR]:
message = (f"nuisance_model should be one of "
f"['{ModelTypes.AUTOML}', "
f"'{ModelTypes.LINEAR}'], "
f"got {nuisance_model}")
raise UserConfigValidationException(message)
# Update X and y to contain both train and test
# This solves issues with categorical features missing some
# categories in the test set and causing transformers to fail
X = pd.concat([self._train, self._test], ignore_index=True)\
.drop([self._target_column], axis=1)
y = pd.concat([self._train, self._test], ignore_index=True)[
self._target_column].values.ravel()
categoricals = self._categorical_features
if categoricals is None:
categoricals = []
is_classification = self._task_type == ModelTask.CLASSIFICATION
analysis = CausalAnalysis(
treatment_features,
categoricals,
heterogeneity_inds=heterogeneity_features,
classification=is_classification,
nuisance_models=nuisance_model,
heterogeneity_model=heterogeneity_model,
upper_bound_on_cat_expansion=upper_bound_on_cat_expansion,
skip_cat_limit_checks=skip_cat_limit_checks,
n_jobs=n_jobs,
categories=categories,
verbose=verbose,
random_state=random_state,
)
self._fit_causal_analysis(analysis, X, y,
upper_bound_on_cat_expansion)
result = CausalResult()
result.config = CausalConfig(
treatment_features=treatment_features,
heterogeneity_features=heterogeneity_features,
nuisance_model=nuisance_model,
heterogeneity_model=heterogeneity_model,
alpha=alpha,
upper_bound_on_cat_expansion=upper_bound_on_cat_expansion,
treatment_cost=treatment_cost,
min_tree_leaf_samples=min_tree_leaf_samples,
max_tree_depth=max_tree_depth,
skip_cat_limit_checks=skip_cat_limit_checks,
n_jobs=n_jobs,
categories=categories,
verbose=verbose,
random_state=random_state,
)
result.causal_analysis = analysis
X_test = self._test.drop([self._target_column], axis=1)
result.global_effects = analysis.global_causal_effect(
alpha=alpha, keep_all_levels=True)
result.local_effects = analysis.local_causal_effect(
X_test, alpha=alpha, keep_all_levels=True)
result.policies = []
for treatment_feature in treatment_features:
policy = self._create_policy(
analysis, X_test,
treatment_feature, treatment_cost,
alpha, max_tree_depth, min_tree_leaf_samples)
result.policies.append(policy)
self._results.append(result)
def _fit_causal_analysis(
self,
causal_analysis,
X,
y,
max_cat_expansion
):
try:
causal_analysis.fit(X, y)
except ValueError as e:
message = str(e)
expected = "increase the upper_bound_on_cat_expansion"
clarification = (
" Increase the value {} in model_analysis.causal.add("
"upper_bound_on_cat_expansion={})."
).format(max_cat_expansion, max_cat_expansion)
if expected in message:
raise ValueError(message + clarification)
raise e
def _create_policy(
self,
causal_analysis,
X_test,
treatment_feature,
treatment_cost,
alpha,
max_tree_depth,
min_tree_leaf_samples,
):
local_policies = causal_analysis.individualized_policy(
X_test, treatment_feature,
treatment_costs=treatment_cost,
alpha=alpha)
tree = causal_analysis._policy_tree_output(
X_test, treatment_feature,
treatment_costs=treatment_cost,
max_depth=max_tree_depth,
min_samples_leaf=min_tree_leaf_samples,
alpha=alpha)
return {
ResultAttributes.TREATMENT_FEATURE: treatment_feature,
ResultAttributes.CONTROL_TREATMENT: tree.control_name,
ResultAttributes.LOCAL_POLICIES: local_policies,
ResultAttributes.POLICY_GAINS: {
ResultAttributes.RECOMMENDED_POLICY_GAINS:
tree.policy_value,
ResultAttributes.TREATMENT_GAINS: tree.always_treat,
},
ResultAttributes.POLICY_TREE: tree.tree_dictionary
}
def _whatif(self, id, X, X_feature_new, feature_name, y, alpha=0.1):
"""Get what-if data."""
filtered = [r for r in self.get() if r.id == id]
if len(filtered) == 0:
raise ValueError(f"Failed to find causal result with ID: {id}")
result = filtered[0]
return result._whatif(X, X_feature_new, feature_name,
y, alpha=alpha).to_dict(orient="records")
def compute(self):
"""No-op function to comply with model analysis design."""
pass
def get(self):
"""Get the computed causal insights."""
return self._results
def list(self):
pass
def get_data(self):
"""Get causal data
:return: List of CausalData objects.
:rtype: List[CausalData]
"""
return [result._get_dashboard_object() for result in self._results]
@property
def name(self):
"""Get the name of the causal manager.
:return: The name of the causal manager.
:rtype: str
"""
return ManagerNames.CAUSAL
def _save(self, path):
"""Save the CausalManager to the given path.
:param path: The directory path to save the CausalManager to.
:type path: str
"""
causal_dir = Path(path)
causal_dir.mkdir(parents=True, exist_ok=True)
# Save results to disk
results_path = causal_dir / SerializationAttributes.RESULTS
results_path.mkdir(parents=True, exist_ok=True)
for result in self._results:
result_path = results_path / result.id
result.save(result_path)
@classmethod
def _load(cls, path, model_analysis):
"""Load the CausalManager from the given path.
:param path: The directory path to load the CausalManager from.
:type path: str
:param model_analysis: The loaded parent ModelAnalysis.
:type model_analysis: ModelAnalysis
"""
this = cls.__new__(cls)
causal_dir = Path(path)
# Rehydrate results
results_path = causal_dir / SerializationAttributes.RESULTS
paths = results_path.resolve().glob('*')
this.__dict__['_results'] = [CausalResult.load(p) for p in paths]
# Rehydrate model analysis data
this.__dict__['_train'] = model_analysis.train
this.__dict__['_test'] = model_analysis.test
this.__dict__['_target_column'] = model_analysis.target_column
this.__dict__['_task_type'] = model_analysis.task_type
this.__dict__['_categorical_features'] = \
model_analysis.categorical_features
return this
|
# Copyright (c) OpenMMLab. All rights reserved.
import numpy as np
import torch
def get_edge_indices(img_metas,
downsample_ratio,
step=1,
pad_mode='default',
dtype=np.float32,
device='cpu'):
"""Function to filter the objects label outside the image.
The edge_indices are generated using numpy on cpu rather
than on CUDA due to the latency issue. When batch size = 8,
this function with numpy array is ~8 times faster than that
with CUDA tensor (0.09s and 0.72s in 100 runs).
Args:
img_metas (list[dict]): Meta information of each image, e.g.,
image size, scaling factor, etc.
downsample_ratio (int): Downsample ratio of output feature,
step (int, optional): Step size used for generateing
edge indices. Default: 1.
pad_mode (str, optional): Padding mode during data pipeline.
Default: 'default'.
dtype (torch.dtype, optional): Dtype of edge indices tensor.
Default: np.float32.
device (str, optional): Device of edge indices tensor.
Default: 'cpu'.
Returns:
list[Tensor]: Edge indices for each image in batch data.
"""
edge_indices_list = []
for i in range(len(img_metas)):
img_shape = img_metas[i]['img_shape']
pad_shape = img_metas[i]['pad_shape']
h, w = img_shape[:2]
pad_h, pad_w = pad_shape
edge_indices = []
if pad_mode == 'default':
x_min = 0
y_min = 0
x_max = (w - 1) // downsample_ratio
y_max = (h - 1) // downsample_ratio
elif pad_mode == 'center':
x_min = np.ceil((pad_w - w) / 2 * downsample_ratio)
y_min = np.ceil((pad_h - h) / 2 * downsample_ratio)
x_max = x_min + w // downsample_ratio
y_max = y_min + h // downsample_ratio
else:
raise NotImplementedError
# left
y = np.arange(y_min, y_max, step, dtype=dtype)
x = np.ones(len(y)) * x_min
edge_indices_edge = np.stack((x, y), axis=1)
edge_indices.append(edge_indices_edge)
# bottom
x = np.arange(x_min, x_max, step, dtype=dtype)
y = np.ones(len(x)) * y_max
edge_indices_edge = np.stack((x, y), axis=1)
edge_indices.append(edge_indices_edge)
# right
y = np.arange(y_max, y_min, -step, dtype=dtype)
x = np.ones(len(y)) * x_max
edge_indices_edge = np.stack((x, y), axis=1)
edge_indices.append(edge_indices_edge)
# top
x = np.arange(x_max, x_min, -step, dtype=dtype)
y = np.ones(len(x)) * y_min
edge_indices_edge = np.stack((x, y), axis=1)
edge_indices.append(edge_indices_edge)
edge_indices = \
np.concatenate([index for index in edge_indices], axis=0)
edge_indices = torch.from_numpy(edge_indices).to(device).long()
edge_indices_list.append(edge_indices)
return edge_indices_list
|
from collections import OrderedDict
import numpy as np
import pytest
from rest_framework.test import APIClient
from standardizing_api.transformer import Transformer
@pytest.fixture(scope='module')
def transformer():
transformer = Transformer()
return transformer
@pytest.fixture
def validated_data():
validated_data = (OrderedDict({"sensor_1": [5.44, 3.22, 6.55, 8.54, 1.24],
"sensor_2": [5444.44, 33.22, 622.55, 812.54, 1233.24],
"sensor_3": [0.44, 0.22, 0.55, 0.54, 0.24]}))
return validated_data
@pytest.fixture
def combined_list():
combined_list = [[5.44, 3.22, 6.55, 8.54, 1.24],
[5444.44, 33.22, 622.55, 812.54, 1233.24],
[0.44, 0.22, 0.55, 0.54, 0.24]]
return combined_list
@pytest.fixture
def transformed_list():
transformed_list = np.array([[0.17354382, -0.69810162, 0.60936654, 1.39070637, -1.47551512],
[1.96026147, -0.82000937, -0.51721314, -0.41959677, -0.2034422],
[0.29452117, -1.24820879, 1.06588616, 0.99576207, -1.10796061]])
return transformed_list
@pytest.fixture(scope="session")
def client():
client = APIClient()
return client
@pytest.fixture(scope="session")
def json_payload():
json_payload = {"sensor_1": [5.44, 3.22, 6.55, 8.54, 1.24],
"sensor_2": [5444.44, 33.22, 622.55, 812.54, 1233.24],
"sensor_3": [0.44, 0.22, 0.55, 0.54, 0.24]}
return json_payload
@pytest.fixture(scope="session")
def incorrect_json_payload(request):
incorrect_json_payload = {"sensor_1": request.param,
"sensor_2": [5444.44, 33.22, 622.55, 812.54, 1233.24],
"sensor_3": [0.44, 0.22, 0.54, 0.24]}
return incorrect_json_payload
@pytest.fixture(scope="session")
def successful_json_response():
successful_json_response = {"success": True,
"result": {
"sensor1": [0.17354382, -0.69810162, 0.60936654, 1.39070637, -1.47551512],
"sensor2": [1.96026147, -0.82000937, -0.51721314, -0.41959677, -0.2034422],
"sensor3": [0.29452117, -1.24820879, 1.06588616, 0.99576207, -1.10796061]
}}
return successful_json_response
@pytest.fixture(scope="session")
def endpoint():
endpoint = '/api/v1/standardize'
return endpoint
|
import random
class EventSpawner(object):
def __init__(self, asteroidController, powerupController, stats):
self.asteroidController = asteroidController
self.powerupController = powerupController
self.gameStats = stats
self.events = {self.asteroidController.spawnBasicAsteroid: 100,
self.asteroidController.spawnHomingAsteroid: 20,
self.powerupController.spawnRandomPowerup: 2,
self.noEvent: 2000}
def updateEventChances(self):
curLevel = self.gameStats['level']
self.events[self.noEvent] = 2000 - 200 * (curLevel - 1)
def spawnRandomEvent(self):
self.updateEventChances()
event = self.getRandomEvent()
event()
def getRandomEvent(self):
totalChances = sum(self.events.values())
randomChance = random.randint(1, totalChances)
for event, eventChance in self.events.items():
if randomChance <= eventChance:
return event
else:
randomChance -= eventChance
raise Exception('Not suppossed to reach here.')
def noEvent(self):
pass
|
# coding: utf-8
# create by tongshiwei on 2019/7/2
from .train_valid_test import train_valid_test
from .download_data import get_data
|
# -*- coding: utf-8 -*-
from hangulize import *
class Azerbaijani(Language):
"""For transcribing Azerbaijani."""
__iso639__ = {1: 'az', 2: 'aze', 3: 'aze'}
__tmp__ = ',;'
vowels = 'aAeIioOuU'
cs = 'bcCdfgGhjklmnNpqrsStvxz'
vl = 'CfhkKpsStx'
notation = Notation([
('-', '/'),
('ə', 'A'),
('ç', 'C'),
('ğ', 'G'),
('ı', 'I'),
('ö', 'O'),
('ş', 'S'),
('ü', 'U'),
('^y{@}', 'Y'),
('iyy', 'iy'),
('yy', 'iy'),
('{@}y{@}', 'Y'),
('iy', 'i'),
('y', 'i'),
('rl', 'l'),
('{p|t|k}h', None),
('w', 'v'),
('bb', 'b'),
('cc', 'c'),
('CC', 'C'),
('dd', 'd'),
('dc', 'c'),
('ff', 'f'),
('gg', 'g'),
('GG', 'G'),
('hh', 'h'),
('jj', 'j'),
('kk', 'k'),
('ll', 'l'),
('mm', 'm,m'),
('nn', 'n,n'),
('pp', 'p'),
('qq', 'q'),
('rr', 'r'),
('ss', 's'),
('SS', 'S'),
('tt', 't'),
('tC', 'C'),
('vv', 'v'),
('xx', 'x'),
('zz', 'z'),
('aa', 'a'),
('AA', 'A'),
('ee', 'e'),
('ii', 'i'),
('II', 'I'),
('oo', 'o'),
('OO', 'O'),
('uu', 'u'),
('UU', 'U'),
('b{<vl>}', 'p'),
('b$', 'p'),
('c{<vl>}', 'C'),
('c$', 'C'),
('d{<vl>}', 't'),
('d$', 't'),
('g{<vl>}', 'k'),
('g$', 'k'),
('G{<vl>}', 'x'),
('G$', 'x'),
('j{<vl>}', 'S'),
('j$', 'S'),
('q{<vl>}', 'Q'),
('q$', 'Q'),
('v{<vl>}', 'f'),
('v$', 'f'),
('z{<vl>}', 's'),
('z$', 's'),
('{@}k{<vl>}', 'k,'),
('{@}p{<vl>}', 'p,'),
('{@}Q{<vl>}', 'k,'),
('c{<cs>}', 'ci'),
('c$', 'ci'),
('C{<cs>}', 'Ci'),
('C$', 'Ci'),
('j{<cs>}', 'ju'),
('j$', 'ju'),
('S{@}', 'sY'),
('S', 'sYu'),
('^l', 'l;'),
('^m', 'm;'),
('^n', 'n;'),
('n{G|q|Q|x}', 'N'),
('l$', 'l,'),
('m$', 'm,'),
('n$', 'n,'),
('l{@|m,|n,|N}', 'l;'),
('{,}l', 'l;'),
('m{@}', 'm;'),
('n{@}', 'n;'),
('l', 'l,'),
('m', 'm,'),
('n', 'n,'),
(',,', ','),
(',;', None),
(',l,', 'l,'),
(',m,', 'm,'),
(',n,', 'n,'),
('l{m;|n;}', 'l,'),
(';', None),
('b', Choseong(B)),
('c', Choseong(J)),
('C', Choseong(C)),
('d', Choseong(D)),
('f', Choseong(P)),
('g', Choseong(G)),
('G', Choseong(G)),
('h', Choseong(H)),
('j', Choseong(J)),
('k,', Jongseong(G)),
('k', Choseong(K)),
('^l', Choseong(L)),
('{,}l', Choseong(L)),
('l,', Jongseong(L)),
('l', Jongseong(L), Choseong(L)),
('m,', Jongseong(M)),
('m', Choseong(M)),
('n,', Jongseong(N)),
('n', Choseong(N)),
('N', Jongseong(NG)),
('p,', Jongseong(B)),
('p', Choseong(P)),
('q', Choseong(G)),
('Q', Choseong(K)),
('r', Choseong(L)),
('s', Choseong(S)),
('t', Choseong(T)),
('v', Choseong(B)),
('x', Choseong(H)),
('z', Choseong(J)),
('Ya', Jungseong(YA)),
('YA', Jungseong(YE)),
('Ye', Jungseong(YE)),
('YI', Jungseong(I), Jungseong(EU)),
('Yi', Jungseong(I)),
('Yo', Jungseong(YO)),
('YO', Jungseong(OE)),
('Yu', Jungseong(YU)),
('YU', Jungseong(WI)),
('a', Jungseong(A)),
('A', Jungseong(E)),
('e', Jungseong(E)),
('I', Jungseong(EU)),
('i', Jungseong(I)),
('o', Jungseong(O)),
('O', Jungseong(OE)),
('u', Jungseong(U)),
('U', Jungseong(WI)),
])
def normalize(self, string):
return normalize_roman(string, {
'Ə': 'ə', 'Ç': 'ç', 'Ğ': 'ğ', 'I': 'ı', 'İ': 'i',
'Ö': 'ö', 'Ş': 'ş', 'Ü': 'ü', 'А': 'a', 'а': 'a',
'Ә': 'ə', 'ә': 'ə', 'Б': 'b', 'б': 'b', 'Ҹ': 'c',
'ҹ': 'c', 'Ч': 'ç', 'ч': 'ç', 'Д': 'd', 'д': 'd',
'Е': 'e', 'е': 'e', 'Ф': 'f', 'ф': 'f', 'Ҝ': 'g',
'ҝ': 'g', 'Ғ': 'ğ', 'ғ': 'ğ', 'Һ': 'h', 'һ': 'h',
'Х': 'x', 'х': 'x', 'Ы': 'ı', 'ы': 'ı', 'И': 'i',
'и': 'i', 'Ж': 'j', 'ж': 'j', 'К': 'k', 'к': 'k',
'Г': 'q', 'г': 'q', 'Л': 'l', 'л': 'l', 'М': 'm',
'м': 'm', 'Н': 'n', 'н': 'n', 'О': 'o', 'о': 'o',
'Ө': 'ö', 'ө': 'ö', 'П': 'p', 'п': 'p', 'Р': 'r',
'р': 'r', 'С': 's', 'с': 's', 'Ш': 'ş', 'ш': 'ş',
'Т': 't', 'т': 't', 'У': 'u', 'у': 'u', 'Ү': 'ü',
'ү': 'ü', 'В': 'v', 'в': 'v', 'Ј': 'y', 'ј': 'y',
'З': 'z', 'з': 'z'
})
__lang__ = Azerbaijani
|
from django.core.management.base import BaseCommand
from audit_log.tasks import clear_audit_log_entries
class Command(BaseCommand):
help = "Clear old sent audit log entries from database"
def handle(self, *args, **kwargs):
clear_audit_log_entries()
|
import typing
from pycspr.types import cl_types
from pycspr.types import CL_TypeKey
def encode(entity: cl_types.CL_Type) -> typing.Union[str, dict]:
"""Encodes a CL type as a JSON compatible string or dictionary.
:param entity: A CL type to be encoded.
:returns: A JSON compatible string or dictionary.
"""
try:
encoder = _ENCODERS_COMPLEX[entity.type_key]
except KeyError:
try:
return _ENCODERS_SIMPLE[entity.type_key]
except KeyError:
raise ValueError("Invalid CL type")
else:
return encoder(entity)
def _encode_byte_array(entity: cl_types.CL_Type_ByteArray):
return {
"ByteArray": entity.size
}
def _encode_list(entity: cl_types.CL_Type_List):
return {
"List": encode(entity.inner_type)
}
def _encode_map(entity: cl_types.CL_Type_Map):
return {
"Map": {
"key": encode(entity.key_type),
"value": encode(entity.value_type)
}
}
def _encode_option(entity: cl_types.CL_Type_Option):
return {
"Option": encode(entity.inner_type)
}
def _encode_tuple_1(entity: cl_types.CL_Type_Tuple1):
return {
"Tuple1": encode(entity.t0_type)
}
def _encode_tuple_2(entity: cl_types.CL_Type_Tuple1):
return {
"Tuple2": [
encode(entity.t0_type),
encode(entity.t1_type),
]
}
def _encode_tuple_3(entity: cl_types.CL_Type_Tuple1):
return {
"Tuple3": [
encode(entity.t0_type),
encode(entity.t1_type),
encode(entity.t2_type)
]
}
_ENCODERS_COMPLEX: dict = {
CL_TypeKey.BYTE_ARRAY: _encode_byte_array,
CL_TypeKey.LIST: _encode_list,
CL_TypeKey.MAP: _encode_map,
CL_TypeKey.OPTION: _encode_option,
CL_TypeKey.TUPLE_1: _encode_tuple_1,
CL_TypeKey.TUPLE_2: _encode_tuple_2,
CL_TypeKey.TUPLE_3: _encode_tuple_3,
}
_ENCODERS_SIMPLE: dict = {
CL_TypeKey.ANY: "Any",
CL_TypeKey.BOOL: "Bool",
CL_TypeKey.I32: "I32",
CL_TypeKey.I64: "I64",
CL_TypeKey.KEY: "Key",
CL_TypeKey.PUBLIC_KEY: "PublicKey",
CL_TypeKey.RESULT: "Result",
CL_TypeKey.STRING: "String",
CL_TypeKey.U8: "U8",
CL_TypeKey.U32: "U32",
CL_TypeKey.U64: "U64",
CL_TypeKey.U128: "U128",
CL_TypeKey.U256: "U256",
CL_TypeKey.U512: "U512",
CL_TypeKey.UNIT: "Unit",
CL_TypeKey.UREF: "URef",
}
|
"""This module defines a Pulumi component resource for building ECS Fargate Services.
This module can accept a load balancer, if the caller chooses, but it is not required.
Included:
- ECS Cluster
- ECS Service
- ECS Task Definition - Single image task or multiple images (sidecar)
- EC2 Security Group(s)
Optional:
- EC2 Load Balancer
- EC2 Listener
- EC2 Target Group
- Route53 Zone
- ACM Certificate
Required On Input:
- VPC
- Subnets (Implicit)
"""
import json
from enum import Enum, unique
from typing import List, Optional, Union
import pulumi
from pulumi_aws.ec2 import SecurityGroup, get_subnet_ids
from pulumi_aws.ecs import (
Cluster,
Service,
ServiceDeploymentCircuitBreakerArgs,
ServiceDeploymentControllerArgs,
ServiceLoadBalancerArgs,
ServiceNetworkConfigurationArgs,
TaskDefinition,
)
from pulumi_aws.iam import ManagedPolicy, Role, RolePolicyAttachment
from pydantic import PositiveInt
from ol_infrastructure.lib.aws.ecs.task_definition_config import (
OLFargateTaskDefinitionConfig,
)
from ol_infrastructure.lib.ol_types import AWSBase
@unique
class DeploymentControllerTypes(str, Enum): # noqa: WPS600
ecs = "ECS"
code_deploy = "CODE_DEPLOY"
external = "EXTERNAL"
@unique
class LaunchTypes(str, Enum): # noqa: WPS600
fargate = "FARGATE"
ec2 = "EC2"
external = "EXTERNAL"
class OLFargateServiceConfig(AWSBase):
"""Configuration for constructing an ECS Fargate Service."""
# base name for all resources
service_name: str
# ECS cluster that will be parent of ECS Service. Will be created if not provided
cluster: Optional[Cluster]
# Determines whether ECS Fargate service will have public IP or not.
assign_public_ip: bool = True
# IAM Role for ECS Service to use for Load Balancer communication
service_role: Optional[Role]
# Desired count for number of tasks on ECS Service
desired_count: PositiveInt = PositiveInt(1)
# Max amount, as percentage, of running tasks that can run during a deployment
deployment_max_percent: PositiveInt = PositiveInt(100)
# Minimum amount, as percentage, of running and healthy tasks required during a
# deployment
deployment_min_percent: PositiveInt = PositiveInt(50) # noqa: WPS432
# Seconds to ignore failing load balancer health checks on newly created tasks. Only
# applies when LB exists
health_check_grace_period_seconds: PositiveInt = PositiveInt(60)
# If enabled, circuit breaker will automatically roll Service back to last
# successful deployment, if error occurs during deployment
deployment_circuit_breaker_enabled: bool = False
# If enabled, ECS Managed Tags will be enabled for tasks within service
enable_ecs_managed_tags: bool = False
# VPC service will be deployed into. Service and tasks will be deployed into public
# subnets, from this VPC
vpc_id: Union[pulumi.Output[str], str]
# Security groups associated with the service and tasks
security_groups: List[SecurityGroup]
# Force a new task deploymennt of the service
force_new_deployment: bool = False
# Task Definition(s) to be used with ECS Service
task_definition_config: OLFargateTaskDefinitionConfig
# Type of Deployment Controller used for service and tasks. Only ECS supported
_deployment_controller: DeploymentControllerTypes = DeploymentControllerTypes.ecs
# Lastest Fargate version will always be used
_fargate_platform_version: str = "LATEST"
# Launch type for service and tasks. Only FARGATE is supported
_launch_type: LaunchTypes = LaunchTypes.fargate
# Load balancer configuration that will be used to attach containers to target
# groups
load_balancer_configuration: Optional[List[ServiceLoadBalancerArgs]] = None
# Retrieve all subnets from the provided VPC (vpc id). NOTE: No filtering is made
# upon subnets
def get_service_network_configuration(self) -> ServiceNetworkConfigurationArgs:
pulumi.log.debug(f"retrieving all subnets from VPC '{self.vpc_id}'")
subnets = get_subnet_ids(
vpc_id=self.vpc_id,
)
pulumi.log.debug(
f"assign public IP addresses is set to {self.assign_public_ip}"
)
return ServiceNetworkConfigurationArgs(
subnets=subnets.ids,
assign_public_ip=self.assign_public_ip,
security_groups=[group.id for group in self.security_groups],
)
def get_deployment_controller( # noqa: WPS615
self,
) -> ServiceDeploymentControllerArgs:
pulumi.log.debug(
f"ECS deployment controller type is {self._deployment_controller}"
)
return ServiceDeploymentControllerArgs(type=self._deployment_controller)
class Config: # noqa: WPS431, D106
arbitrary_types_allowed = True
class OLFargateService(pulumi.ComponentResource):
def __init__(
self, config: OLFargateServiceConfig, opts: pulumi.ResourceOptions = None
):
super().__init__(
"ol:infrastructure:aws:ecs:OLFargateService",
config.service_name,
None,
opts,
)
self.resource_options = pulumi.ResourceOptions(parent=self).merge(
opts
) # type: ignore
if config.cluster:
pulumi.log.debug(
"using existing ECS Cluster '{}' provided in arguments".format(
config.cluster.id
)
)
self.cluster = config.cluster
else:
pulumi.log.debug("creating new ECS cluster")
self.cluster = Cluster(
f"{config.service_name}_cluster",
tags=config.tags,
opts=self.resource_options,
)
# We'll enable rollback, as well as, circuit breaker if caller opts in
circuit_breaker = None
if config.deployment_circuit_breaker_enabled:
pulumi.log.debug("ecs service deployment service breaker enabled")
circuit_breaker = ServiceDeploymentCircuitBreakerArgs(
enable=True, rollback=True
)
task_config = config.task_definition_config
container_definition = self.build_container_definition(config)
pulumi.log.debug("container definitions constructed")
self.task_definition = TaskDefinition(
f"{config.service_name}_task_def",
family=task_config.task_def_name,
cpu=task_config.cpu,
execution_role_arn=self.get_execution_role_arn(config),
memory=task_config.memory_mib,
tags=config.tags,
task_role_arn=task_config.task_execution_role_arn,
network_mode="awsvpc",
requires_compatibilities=["FARGATE"],
container_definitions=container_definition,
opts=self.resource_options,
)
service_role_arn = ""
if config.service_role:
pulumi.log.debug(f"Attaching existing service role {config.service_role}")
service_role_arn = config.service_role.arn
health_check_grace_period = None
if config.load_balancer_configuration:
pulumi.log.debug(
"Setting health check grace period to "
f"{config.health_check_grace_period_seconds} seconds"
)
health_check_grace_period = config.health_check_grace_period_seconds
self.service = Service(
f"{config.service_name}_service",
name=f"{config.service_name}_service",
cluster=self.cluster.id,
desired_count=config.desired_count,
iam_role=service_role_arn,
deployment_maximum_percent=config.deployment_max_percent,
deployment_minimum_healthy_percent=config.deployment_min_percent,
deployment_controller=config.get_deployment_controller(),
deployment_circuit_breaker=circuit_breaker,
health_check_grace_period_seconds=health_check_grace_period,
launch_type=config._launch_type, # noqa: WPS437
network_configuration=config.get_service_network_configuration(),
load_balancers=config.load_balancer_configuration,
task_definition=self.task_definition.arn,
platform_version=config._fargate_platform_version, # noqa: WPS437
force_new_deployment=config.force_new_deployment,
enable_ecs_managed_tags=config.enable_ecs_managed_tags,
tags=config.tags,
opts=self.resource_options,
)
component_outputs = {
"cluster": self.cluster,
"service": self.service,
"task_definition": self.task_definition,
}
self.register_outputs(component_outputs)
def build_container_definition(self, config: OLFargateServiceConfig) -> str:
"""Create task defition string from provided arguments.
:param config: Configuration object for parameterizing deployment of Fargate
services
:type config: OLFargateServiceConfig
:raises ValueError: If not configured with a task or container definition
configuration
:returns: An AWS ECS container definition object encoded as a string for
including in a task definition.
:rtype: str
"""
if ( # noqa: WPS337
not config.task_definition_config
or not config.task_definition_config.container_definition_configs
):
raise ValueError("At least one container definition must be defined")
pulumi.log.debug("Creating container task definitions")
outputs = []
for container in config.task_definition_config.container_definition_configs:
log_config = None
if container.log_configuration:
log_config = {
"logDriver": container.log_configuration.log_driver,
"options": container.log_configuration.options,
"secretOptions": container.log_configuration.secret_options,
}
environment = []
if container.environment:
for key in container.environment.keys():
environment.append(
{"name": key, "value": container.environment[key]}
)
outputs.append(
{
"name": container.container_name,
"image": container.image,
"portMappings": [
{
"containerPort": container.container_port,
"containerName": container.container_name,
"protocol": "tcp",
}
],
"memory": container.memory,
"command": container.command,
"cpu": container.cpu,
"environment": environment,
"essential": container.is_essential,
"logConfiguration": log_config,
}
)
pulumi.log.debug(f"container definitions: {outputs}")
return json.dumps(outputs)
def get_execution_role_arn(self, config: OLFargateServiceConfig) -> str:
"""Build an an execution role arn with base ECS Managed Policy.
:param config: Configuration object for parameterizing deployment of Fargate
services
:type config: OLFargateServiceConfig
:returns: The ARN of an execution role to be used by the ECS service.
:rtype: str
"""
if config.task_definition_config.execution_role_arn:
pulumi.log.debug(
"using task definition execution role arn provided by caller"
)
return config.task_definition_config.execution_role_arn
pulumi.log.debug(
"creating new task definition execution role with "
"AmazonEcsTaskExecutionRolePolicy attached"
)
role = Role(
f"{config.task_definition_config.task_def_name}-role", # noqa: WPS237
assume_role_policy=json.dumps(
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "",
"Effect": "Allow",
"Principal": {"Service": "ecs-tasks.amazonaws.com"},
"Action": "sts:AssumeRole",
}
],
}
),
tags=config.tags,
opts=self.resource_options,
)
RolePolicyAttachment(
f"{config.task_definition_config.task_def_name}-policy-attachment", # noqa: WPS237, E501
role=role.name,
policy_arn=ManagedPolicy.AMAZON_ECS_TASK_EXECUTION_ROLE_POLICY,
opts=self.resource_options,
)
return role.arn
|
import time
from gradient_utils import metrics
class Settings(object):
start_time = None
tensorboard_watchers = []
class Init(object):
def __init__(self):
self._settings = Settings()
def init(self, sync_tensorboard):
self._settings.start_time = time.time()
if sync_tensorboard and len(metrics.patched["tensorboard"]) == 0:
metrics.tensorboard.patch(self._settings)
def finish(self):
for watcher in self._settings.tensorboard_watchers:
watcher.finish()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.finish()
return exc_type is None
def init(sync_tensorboard=False):
i = Init()
i.init(sync_tensorboard=sync_tensorboard)
return i
|
def main():
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import sys, os
sys.path.append("../../..")
import procedural_city_generation
from procedural_city_generation.roadmap.config_functions.Watertools import Watertools
import Image
import numpy as np
img=np.dot(mpimg.imread(os.getcwd() + "/resources/manybodies.png")[..., :3], [0.299, 0.587, 0.144])
w=Watertools(img)
plt.imshow(img, cmap="gray")
plt.show()
f=w.flood(0.95, np.array([80, 2]))
plt.imshow(f, cmap="gray")
plt.show()
if __name__ == '__main__':
main()
|
"""SOMClustering class.
Copyright (c) 2019-2021 Felix M. Riese.
All rights reserved.
"""
import itertools
from typing import List, Optional, Sequence, Tuple
import numpy as np
import scipy.spatial.distance as dist
from joblib import Parallel, delayed, effective_n_jobs
from sklearn.decomposition import PCA
from sklearn.preprocessing import binarize
from sklearn.utils.validation import check_array
from tqdm import tqdm
from .SOMUtils import decreasing_rate, modify_weight_matrix_online
class SOMClustering:
"""Unsupervised self-organizing map for clustering.
Parameters
----------
n_rows : int, optional (default=10)
Number of rows for the SOM grid
n_columns : int, optional (default=10)
Number of columns for the SOM grid
init_mode_unsupervised : str, optional (default="random")
Initialization mode of the unsupervised SOM
n_iter_unsupervised : int, optional (default=1000)
Number of iterations for the unsupervised SOM
train_mode_unsupervised : str, optional (default="online")
Training mode of the unsupervised SOM
neighborhood_mode_unsupervised : str, optional (default="linear")
Neighborhood mode of the unsupervised SOM
learn_mode_unsupervised : str, optional (default="min")
Learning mode of the unsupervised SOM
distance_metric : str, optional (default="euclidean")
Distance metric to compare on feature level (not SOM grid).
Possible metrics: {"euclidean", "manhattan", "mahalanobis",
"tanimoto", "spectralangle"}. Note that "tanimoto" tends to be slow.
.. versionadded:: 1.1.1
Spectral angle metric.
learning_rate_start : float, optional (default=0.5)
Learning rate start value
learning_rate_end : float, optional (default=0.05)
Learning rate end value (only needed for some lr definitions)
nbh_dist_weight_mode : str, optional (default="pseudo-gaussian")
Formula of the neighborhood distance weight. Possible formulas
are: {"pseudo-gaussian", "mexican-hat"}.
n_jobs : int or None, optional (default=None)
The number of jobs to run in parallel.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
verbose : int, optional (default=0)
Controls the verbosity.
Attributes
----------
node_list_ : np.ndarray of (int, int) tuples
List of 2-dimensional coordinates of SOM nodes
radius_max_ : float, int
Maximum radius of the neighborhood function
radius_min_ : float, int
Minimum radius of the neighborhood function
unsuper_som_ : np.ndarray
Weight vectors of the unsupervised SOM
shape = (self.n_rows, self.n_columns, X.shape[1])
X_ : np.ndarray
Input data
fitted_ : boolean
States if estimator is fitted to X
max_iterations_ : int
Maximum number of iterations for the current training
bmus_ : list of (int, int) tuples
List of best matching units (BMUs) of the dataset X
variances_ : array of float
Standard deviations of every feature
"""
def __init__(
self,
n_rows: int = 10,
n_columns: int = 10,
*,
init_mode_unsupervised: str = "random",
n_iter_unsupervised: int = 1000,
train_mode_unsupervised: str = "online",
neighborhood_mode_unsupervised: str = "linear",
learn_mode_unsupervised: str = "min",
distance_metric: str = "euclidean",
learning_rate_start: float = 0.5,
learning_rate_end: float = 0.05,
nbh_dist_weight_mode: str = "pseudo-gaussian",
n_jobs: Optional[int] = None,
random_state=None,
verbose: Optional[int] = 0,
) -> None:
"""Initialize SOMClustering object."""
self.n_rows = n_rows
self.n_columns = n_columns
self.init_mode_unsupervised = init_mode_unsupervised
self.n_iter_unsupervised = n_iter_unsupervised
self.train_mode_unsupervised = train_mode_unsupervised
self.neighborhood_mode_unsupervised = neighborhood_mode_unsupervised
self.learn_mode_unsupervised = learn_mode_unsupervised
self.distance_metric = distance_metric
self.learning_rate_start = learning_rate_start
self.learning_rate_end = learning_rate_end
self.nbh_dist_weight_mode = nbh_dist_weight_mode
self.n_jobs = n_jobs
self.random_state = random_state
self.verbose = verbose
def _init_unsuper_som(self) -> None:
"""Initialize map."""
# init node list
self.node_list_ = np.array(
list(itertools.product(range(self.n_rows), range(self.n_columns))),
dtype=int,
)
self.max_iterations_ = self.n_iter_unsupervised
# init radius parameter
self.radius_max_ = max(self.n_rows, self.n_columns) / 2
self.radius_min_ = 1
# tqdm paramters
self.tqdm_params_ = {"disable": not bool(self.verbose), "ncols": 100}
# init unsupervised SOM in the feature space
if self.init_mode_unsupervised == "random":
som = np.random.rand(self.n_rows, self.n_columns, self.X_.shape[1])
elif self.init_mode_unsupervised == "random_data":
indices = np.random.randint(
low=0, high=self.X_.shape[0], size=self.n_rows * self.n_columns
)
som_list = self.X_[indices]
som = som_list.reshape(
self.n_rows, self.n_columns, self.X_.shape[1]
)
elif self.init_mode_unsupervised == "pca":
# fixed number of components
pca = PCA(n_components=2, random_state=self.random_state)
pca_comp = pca.fit(self.X_).components_
a_row = np.linspace(-1.0, 1.0, self.n_rows)
a_col = np.linspace(-1.0, 1.0, self.n_columns)
som = np.zeros(
shape=(self.n_rows, self.n_columns, self.X_.shape[1])
)
for node in self.node_list_:
som[node[0], node[1], :] = np.add(
np.multiply(a_row[node[0]], pca_comp[0]),
np.multiply(a_col[node[1]], pca_comp[1]),
)
else:
raise ValueError(
f"Invalid init_mode_unsupervised: {self.init_mode_unsupervised}."
)
self.unsuper_som_ = som
def fit(self, X: Sequence, y: Optional[Sequence] = None):
"""Fit unsupervised SOM to input data.
Parameters
----------
X : array-like matrix of shape = [n_samples, n_features]
The training input samples.
y : None
Not used in this class.
Returns
-------
self : object
Examples
--------
Load the SOM and fit it to your input data `X` with:
>>> import susi
>>> som = susi.SOMClustering()
>>> som.fit(X)
"""
np.random.seed(seed=self.random_state)
self.X_ = check_array(X, dtype=np.float64) # TODO accept_sparse
self.sample_weights_ = np.full(fill_value=1.0, shape=(len(self.X_), 1))
self._train_unsupervised_som()
self.fitted_ = True
return self
def _train_unsupervised_som(self) -> None:
"""Train unsupervised SOM."""
self._init_unsuper_som()
if self.train_mode_unsupervised == "online":
for it in tqdm(
range(self.n_iter_unsupervised),
desc="unsuper",
**self.tqdm_params_,
):
# select one input vector & calculate best matching unit (BMU)
dp = np.random.randint(low=0, high=len(self.X_))
bmu_pos = self.get_bmu(self.X_[dp], self.unsuper_som_)
# calculate learning rate and neighborhood function
learning_rate = self._calc_learning_rate(
curr_it=it, mode=self.learn_mode_unsupervised
)
nbh_func = self._calc_neighborhood_func(
curr_it=it, mode=self.neighborhood_mode_unsupervised
)
# calculate distance weight matrix and update weights
dist_weight_matrix = self._get_nbh_distance_weight_matrix(
nbh_func, bmu_pos
)
self.unsuper_som_ = modify_weight_matrix_online(
som_array=self.unsuper_som_,
dist_weight_matrix=dist_weight_matrix,
true_vector=self.X_[dp],
learning_rate=learning_rate * self.sample_weights_[dp],
)
elif self.train_mode_unsupervised == "batch":
for it in tqdm(
range(self.n_iter_unsupervised),
desc="unsuper",
**self.tqdm_params_,
):
# calculate BMUs
bmus = self.get_bmus(self.X_)
# calculate neighborhood function
nbh_func = self._calc_neighborhood_func(
curr_it=it, mode=self.neighborhood_mode_unsupervised
)
# calculate distance weight matrix for all datapoints
dist_weight_block = self._get_nbh_distance_weight_block(
nbh_func, bmus
)
# update weights
self.unsuper_som_ = self._modify_weight_matrix_batch(
self.unsuper_som_, dist_weight_block, self.X_
)
else:
raise NotImplementedError(
"Unsupervised mode not implemented:",
self.train_mode_unsupervised,
)
self._set_bmus(self.X_)
def _calc_learning_rate(self, curr_it: int, mode: str) -> float:
"""Calculate learning rate alpha with 0 <= alpha <= 1.
Parameters
----------
curr_it : int
Current iteration count
mode : str
Mode of the learning rate (min, exp, expsquare)
Returns
-------
float
Learning rate
"""
return decreasing_rate(
self.learning_rate_start,
self.learning_rate_end,
iteration_max=self.max_iterations_,
iteration=curr_it,
mode=mode,
)
def _calc_neighborhood_func(self, curr_it: int, mode: str) -> float:
"""Calculate neighborhood function (= radius).
Parameters
----------
curr_it : int
Current number of iterations
mode : str
Mode of the decreasing rate
Returns
-------
float
Neighborhood function (= radius)
"""
return decreasing_rate(
self.radius_max_,
self.radius_min_,
iteration_max=self.max_iterations_,
iteration=curr_it,
mode=mode,
)
def get_bmu(
self, datapoint: np.ndarray, som_array: np.ndarray
) -> Tuple[int, int]:
"""Get best matching unit (BMU) for datapoint.
Parameters
----------
datapoint : np.ndarray, shape=shape[1]
Datapoint = one row of the dataset X
som_array : np.ndarray
Weight vectors of the SOM
shape = (self.n_rows, self.n_columns, X.shape[1])
Returns
-------
tuple, shape = (int, int)
Position of best matching unit (row, column)
"""
a = self._get_node_distance_matrix(
datapoint.astype(np.float64), som_array
)
return np.argwhere(a == np.min(a))[0]
def get_bmus(
self, X: np.ndarray, som_array: Optional[np.array] = None
) -> Optional[List[Tuple[int, int]]]:
"""Get Best Matching Units for big datalist.
Parameters
----------
X : np.ndarray
List of datapoints
som_array : np.ndarray, optional (default=`None`)
Weight vectors of the SOM
shape = (self.n_rows, self.n_columns, X.shape[1])
Returns
-------
bmus : list of (int, int) tuples
Position of best matching units (row, column) for each datapoint
Examples
--------
Load the SOM, fit it to your input data `X` and transform your input
data with:
>>> import susi
>>> import matplotlib.pyplot as plt
>>> som = susi.SOMClustering()
>>> som.fit(X)
>>> bmu_list = som.get_bmus(X)
>>> plt.hist2d([x[0] for x in bmu_list], [x[1] for x in bmu_list]
"""
if som_array is None:
som_array = self.unsuper_som_
bmus = None
if self.n_jobs == 1:
bmus = [tuple(self.get_bmu(dp, som_array)) for dp in X]
else:
n_jobs, _, _ = self._partition_bmus(X)
bmus = Parallel(n_jobs=n_jobs, verbose=self.verbose)(
delayed(self.get_bmu)(dp, som_array) for dp in X
)
return bmus
def _partition_bmus(
self, X: np.ndarray
) -> Tuple[float, List[int], List[int]]:
"""Private function used to partition bmus between jobs.
Parameters
----------
X : np.ndarray
List of datapoints
Returns
-------
n_jobs : int
Number of jobs
list of int
List of number of datapoints per job
list of int
List of start values for every job list
"""
n_datapoints = len(X)
n_jobs = min(effective_n_jobs(self.n_jobs), n_datapoints)
n_datapoints_per_job = np.full(
n_jobs, n_datapoints // n_jobs, dtype=int
)
n_datapoints_per_job[: n_datapoints % n_jobs] += 1
starts = np.cumsum(n_datapoints_per_job)
return n_jobs, n_datapoints_per_job.tolist(), [0] + starts.tolist()
def _set_bmus(
self, X: np.ndarray, som_array: Optional[np.array] = None
) -> None:
"""Set BMUs in the current SOM object.
Parameters
----------
X : array-like matrix of shape = [n_samples, n_features]
The input samples.
som_array : np.ndarray
Weight vectors of the SOM
shape = (self.n_rows, self.n_columns, X.shape[1])
"""
self.bmus_ = self.get_bmus(X=X, som_array=som_array)
def _get_node_distance_matrix(
self, datapoint: np.ndarray, som_array: np.ndarray
) -> np.ndarray:
"""Get distance of datapoint and node using Euclidean distance.
Parameters
----------
datapoint : np.ndarray, shape=(X.shape[1])
Datapoint = one row of the dataset `X`
som_array : np.ndarray
Weight vectors of the SOM,
shape = (self.n_rows, self.n_columns, X.shape[1])
Returns
-------
distmat : np.ndarray of float
Distance between datapoint and each SOM node
"""
# algorithms on the full matrix
if self.distance_metric == "euclidean":
return np.linalg.norm(som_array - datapoint, axis=2)
# node-by-node algorithms
distmat = np.zeros((self.n_rows, self.n_columns))
if self.distance_metric == "manhattan":
for node in self.node_list_:
distmat[node] = dist.cityblock(
som_array[node[0], node[1]], datapoint
)
elif self.distance_metric == "mahalanobis":
for node in self.node_list_:
som_node = som_array[node[0], node[1]]
cov = np.cov(
np.stack((datapoint, som_node), axis=0), rowvar=False
)
cov_pinv = np.linalg.pinv(cov) # pseudo-inverse
distmat[node] = dist.mahalanobis(datapoint, som_node, cov_pinv)
elif self.distance_metric == "tanimoto":
# Note that this is a binary distance measure.
# Therefore, the vectors have to be converted.
# Source: Melssen 2006, Supervised Kohonen networks for
# classification problems
# VERY SLOW ALGORITHM!!!
threshold = 0.5
for node in self.node_list_:
som_node = som_array[node[0], node[1]]
distmat[node] = dist.rogerstanimoto(
binarize(
datapoint.reshape(1, -1),
threshold=threshold,
copy=True,
),
binarize(
som_node.reshape(1, -1), threshold=threshold, copy=True
),
)
elif self.distance_metric == "spectralangle":
for node in self.node_list_:
distmat[node] = np.arccos(
np.divide(
np.dot(som_array[node[0], node[1]], datapoint),
np.multiply(
np.linalg.norm(som_array),
np.linalg.norm(datapoint),
),
)
)
return distmat
def _get_nbh_distance_weight_matrix(
self, neighborhood_func: float, bmu_pos: Tuple[int, int]
) -> np.ndarray:
"""Calculate neighborhood distance weight.
Parameters
----------
neighborhood_func : float
Current neighborhood function
bmu_pos : tuple, shape=(int, int)
Position of calculated BMU of the current datapoint
Returns
-------
np.array of float, shape=(n_rows, n_columns)
Neighborhood distance weight matrix between SOM and BMU
"""
dist_mat = np.linalg.norm(self.node_list_ - bmu_pos, axis=1)
pseudogaussian = np.exp(
-np.divide(
np.power(dist_mat, 2), (2 * np.power(neighborhood_func, 2))
)
)
if self.nbh_dist_weight_mode == "pseudo-gaussian":
return pseudogaussian.reshape((self.n_rows, self.n_columns, 1))
if self.nbh_dist_weight_mode == "mexican-hat":
mexicanhat = np.multiply(
pseudogaussian,
np.subtract(
1,
np.divide(
np.power(dist_mat, 2), np.power(neighborhood_func, 2)
),
),
)
return mexicanhat.reshape((self.n_rows, self.n_columns, 1))
raise ValueError(
"Invalid nbh_dist_weight_mode: " + str(self.nbh_dist_weight_mode)
)
def _get_nbh_distance_weight_block(
self, nbh_func: float, bmus: List[Tuple[int, int]]
) -> np.ndarray:
"""Calculate distance weight matrix for all datapoints.
The combination of several distance weight matrices is called
"block" in the following.
Parameters
----------
neighborhood_func : float
Current neighborhood function
bmus : list of tuple (int, int)
Positions of calculated BMUs of the datapoints
Returns
-------
dist_weight_block : np.ndarray of float, shape=(n_rows, n_columns)
Neighborhood distance weight block between SOM and BMUs
"""
dist_weight_block = np.zeros((len(bmus), self.n_rows, self.n_columns))
for i, bmu_pos in enumerate(bmus):
dist_weight_block[i] = self._get_nbh_distance_weight_matrix(
nbh_func, bmu_pos
).reshape((self.n_rows, self.n_columns))
return dist_weight_block
def _modify_weight_matrix_batch(
self,
som_array: np.ndarray,
dist_weight_matrix: np.ndarray,
data: np.ndarray,
) -> np.ndarray:
"""Modify weight matrix of the SOM for the online algorithm.
Parameters
----------
som_array : np.ndarray
Weight vectors of the SOM
shape = (self.n_rows, self.n_columns, X.shape[1])
dist_weight_matrix : np.ndarray of float
Current distance weight of the SOM for the specific node
data : np.ndarray
True vector(s)
learning_rate : float
Current learning rate of the SOM
Returns
-------
np.array
Weight vector of the SOM after the modification
"""
# calculate numerator and divisor for the batch formula
numerator = np.sum(
[
np.multiply(
data[i],
dist_weight_matrix[i].reshape(
(self.n_rows, self.n_columns, 1)
),
)
for i in range(len(data))
],
axis=0,
)
divisor = np.sum(dist_weight_matrix, axis=0).reshape(
(self.n_rows, self.n_columns, 1)
)
# update weights
old_som = np.copy(som_array)
new_som = np.divide(
numerator,
divisor,
out=np.full_like(numerator, np.nan),
where=(divisor != 0),
)
# overwrite new nans with old entries
new_som[np.isnan(new_som)] = old_som[np.isnan(new_som)]
return new_som
def transform(
self, X: Sequence, y: Optional[Sequence] = None
) -> np.ndarray:
"""Transform input data.
Parameters
----------
X : array-like matrix of shape = [n_samples, n_features]
The prediction input samples.
y : None, optional
Ignored.
Returns
-------
np.array of tuples (int, int)
Predictions including the BMUs of each datapoint
Examples
--------
Load the SOM, fit it to your input data `X` and transform your input
data with:
>>> import susi
>>> som = susi.SOMClustering()
>>> som.fit(X)
>>> X_transformed = som.transform(X)
"""
# assert(self.fitted_ is True)
self.X_ = check_array(X, dtype=np.float64)
return np.array(self.get_bmus(self.X_))
def fit_transform(
self, X: Sequence, y: Optional[Sequence] = None
) -> np.ndarray:
"""Fit to the input data and transform it.
Parameters
----------
X : array-like matrix of shape = [n_samples, n_features]
The training and prediction input samples.
y : None, optional
Ignored.
Returns
-------
np.array of tuples (int, int)
Predictions including the BMUs of each datapoint
Examples
--------
Load the SOM, fit it to your input data `X` and transform your input
data with:
>>> import susi
>>> som = susi.SOMClustering()
>>> X_transformed = som.fit_transform(X)
"""
self.fit(X)
# assert(self.fitted_ is True)
self.X_ = check_array(X, dtype=np.float64)
return self.transform(X, y)
def get_datapoints_from_node(self, node: Tuple[int, int]) -> List[int]:
"""Get all datapoints of one node.
Parameters
----------
node : tuple, shape (int, int)
Node for which the linked datapoints are calculated
Returns
-------
datapoints : list of int
List of indices of the datapoints that are linked to `node`
"""
datapoints = []
for i in range(len(self.bmus_)):
if np.array_equal(self.bmus_[i], node):
datapoints.append(i)
return datapoints
def get_clusters(self, X: np.ndarray) -> Optional[List[Tuple[int, int]]]:
"""Calculate the SOM nodes on the unsupervised SOM grid per datapoint.
Parameters
----------
X : np.ndarray
Input data
Returns
-------
list of tuples (int, int)
List of SOM nodes, one for each input datapoint
"""
return self.get_bmus(X)
def get_u_matrix(self, mode: str = "mean") -> np.ndarray:
"""Calculate unified distance matrix (u-matrix).
Parameters
----------
mode : str, optional (default="mean)
Choice of the averaging algorithm
Returns
-------
u_matrix : np.ndarray
U-matrix containing the distances between all nodes of the
unsupervised SOM. Shape = (n_rows*2-1, n_columns*2-1)
Examples
--------
Fit your SOM to input data `X` and then calculate the u-matrix with
`get_u_matrix()`. You can plot the u-matrix then with e.g.
`pyplot.imshow()`.
>>> import susi
>>> import numpy as np
>>> import matplotlib.pyplot as plt
>>> som = susi.SOMClustering()
>>> som.fit(X)
>>> umat = som.get_u_matrix()
>>> plt.imshow(np.squeeze(umat))
"""
self.u_mean_mode_ = mode
self.u_matrix = np.zeros(
shape=(self.n_rows * 2 - 1, self.n_columns * 2 - 1, 1), dtype=float
)
# step 1: fill values between SOM nodes
self._calc_u_matrix_distances()
# step 2: fill values at SOM nodes and on diagonals
self._calc_u_matrix_means()
return self.u_matrix
def _calc_u_matrix_distances(self) -> None:
"""Calculate the Eucl. distances between all neighbored SOM nodes."""
for u_node in itertools.product(
range(self.n_rows * 2 - 1), range(self.n_columns * 2 - 1)
):
# neighbor vector
nb = (0, 0)
if not (u_node[0] % 2) and (u_node[1] % 2):
# mean horizontally
nb = (0, 1)
elif (u_node[0] % 2) and not (u_node[1] % 2):
# mean vertically
nb = (1, 0)
self.u_matrix[u_node] = np.linalg.norm(
self.unsuper_som_[u_node[0] // 2][u_node[1] // 2]
- self.unsuper_som_[u_node[0] // 2 + nb[0]][
u_node[1] // 2 + nb[1]
],
axis=0,
)
def _calc_u_matrix_means(self) -> None:
"""Calculate the missing parts of the u-matrix.
After `_calc_u_matrix_distances()`, there are two kinds of entries
missing: the entries at the positions of the actual SOM nodes and the
entries in between the distance nodes. Both types of entries are
calculated in this function.
"""
for u_node in itertools.product(
range(self.n_rows * 2 - 1), range(self.n_columns * 2 - 1)
):
if not (u_node[0] % 2) and not (u_node[1] % 2):
# SOM nodes -> mean over 2-4 values
nodelist = []
if u_node[0] > 0:
nodelist.append((u_node[0] - 1, u_node[1]))
if u_node[0] < self.n_rows * 2 - 2:
nodelist.append((u_node[0] + 1, u_node[1]))
if u_node[1] > 0:
nodelist.append((u_node[0], u_node[1] - 1))
if u_node[1] < self.n_columns * 2 - 2:
nodelist.append((u_node[0], u_node[1] + 1))
self.u_matrix[u_node] = self._get_u_mean(nodelist)
elif (u_node[0] % 2) and (u_node[1] % 2):
# mean over four
self.u_matrix[u_node] = self._get_u_mean(
[
(u_node[0] - 1, u_node[1]),
(u_node[0] + 1, u_node[1]),
(u_node[0], u_node[1] - 1),
(u_node[0], u_node[1] + 1),
]
)
def _get_u_mean(self, nodelist: List[Tuple[int, int]]) -> Optional[float]:
"""Calculate a mean value of the node entries in `nodelist`.
Parameters
----------
nodelist : list of tuple (int, int)
List of nodes on the u-matrix containing distance values
Returns
-------
u_mean : float
Mean value
"""
meanlist = [self.u_matrix[u_node] for u_node in nodelist]
u_mean = None
if self.u_mean_mode_ == "mean":
u_mean = np.mean(meanlist)
elif self.u_mean_mode_ == "median":
u_mean = np.median(meanlist)
elif self.u_mean_mode_ == "min":
u_mean = np.min(meanlist)
elif self.u_mean_mode_ == "max":
u_mean = np.max(meanlist)
return u_mean
def _get_node_neighbors(
self, node: Tuple[int, int], radius: int = 1
) -> List[Tuple[int, int]]:
"""Get neighboring nodes (grid parameters) of `node`.
.. versionadded:: 1.1.3
Parameters
----------
node : Tuple[int, int]
Node position on the SOM grid.
radius : int, optional (default=1)
Radius to calculate the node radius. Is set arbitrarily to 1, can
be changed in future versions.
Returns
-------
[type]
[description]
"""
row_range = range(
max(node[0] - radius, 0),
min(node[0] + radius, self.n_rows - 1) + 1,
)
column_range = range(
max(node[1] - radius, 0),
min(node[1] + radius, self.n_columns - 1) + 1,
)
return list(itertools.product(row_range, column_range))
def get_quantization_error(self, X: Optional[Sequence] = None) -> float:
"""Get quantization error for `X` (or the training data).
Parameters
----------
X : array-like matrix, optional (default=True)
Samples of shape = [n_samples, n_features]. If `None`, the training
data is used for the calculation.
Returns
-------
float
Mean quantization error over all datapoints.
Raises
------
RuntimeError
Raised if the SOM is not fitted yet.
"""
if not self.fitted_:
raise RuntimeError("SOM is not fitted!")
if X is None:
X = self.X_
weights_per_datapoint = [
self.unsuper_som_[bmu[0], bmu[1]] for bmu in self.get_bmus(X)
]
quantization_errors = np.linalg.norm(
np.subtract(weights_per_datapoint, X)
)
return np.mean(quantization_errors)
|
import os.path as op
# import plotting as cyplot
# from biplot import biplot
# import plotting as cyplot
# import cycluster as cy
import matplotlib.pyplot as plt
import scipy.stats.stats
import numpy as np
import palettable
import itertools
import seaborn as sns
import statsmodels as sm
# sys.path.append('C:/Users/liel-/PycharmProjects/LielTools/')
# from write2Excel import writeDF2Excel
# from dillReadWrite import writeDf2Dill
sns.set(style='darkgrid', font_scale=1.5, palette='muted')
####----------------- helpers --------------####
from copy import deepcopy
import seaborn as sns
# from glm_compare import compare_lr_test
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import statsmodels.api as sm
sns.set(style='darkgrid', palette='muted', font_scale=2.0)
def GLMResults(df, outcome, predictors, adj=[], logistic=True):
if logistic:
family = sm.families.Binomial()
coefFunc = np.exp
cols = ['OR', 'LL', 'UL', 'pvalue', 'Diff', 'N']
else:
family = sm.families.Gaussian()
coefFunc = lambda x: x
cols = ['Coef', 'LL', 'UL', 'pvalue', 'Diff', 'N']
k = len(predictors)
assoc = np.zeros((k, 6))
params = []
pvalues = []
resObj = []
for i, predc in enumerate(predictors):
exogVars = list(set([predc] + adj))
tmp = df[[outcome] + exogVars].dropna()
model = sm.GLM(endog=tmp[outcome].astype(float), exog=sm.add_constant(tmp[exogVars].astype(float)),
family=family)
try:
res = model.fit()
assoc[i, 0] = coefFunc(res.params[predc])
assoc[i, 3] = res.pvalues[predc]
assoc[i, 1:3] = coefFunc(res.conf_int().loc[predc])
assoc[i, 4] = tmp[predc].loc[tmp[outcome] == 1].mean() - tmp[predc].loc[tmp[outcome] == 0].mean()
params.append(res.params.to_dict())
pvalues.append(res.pvalues.to_dict())
resObj.append(res)
except sm.tools.sm_exceptions.PerfectSeparationError:
assoc[i, 0] = np.nan
assoc[i, 3] = 0
assoc[i, 1:3] = [np.nan, np.nan]
assoc[i, 4] = tmp[predc].loc[tmp[outcome] == 1].mean() - tmp[predc].loc[tmp[outcome] == 0].mean()
params.append({k: np.nan for k in [predc] + adj})
pvalues.append({k: np.nan for k in [predc] + adj})
resObj.append(None)
print('PerfectSeparationError: %s with %s' % (predc, outcome))
assoc[i, 5] = tmp.shape[0]
outDf = pd.DataFrame(assoc[:, :6], index=predictors, columns=cols)
outDf['params'] = params
outDf['pvalues'] = pvalues
outDf['res'] = resObj
return outDf
def outcomeAnalysis(cytomod_obj, patient_data,
analyzeModules=True,
outcomeVars=[],
adjustmentVars=[],
standardize=True):
"""Do these FLU-positive clusters correlate with outcome,
with/without adjustment for bacterial coinfection?"""
modStr = 'Module' if analyzeModules else 'Analyte'
resL = []
for outcome in outcomeVars:
"""Logistic regression on outcome"""
if analyzeModules:
dataDf = cytomod_obj.modDf
else:
dataDf = cytomod_obj.cyDf
if standardize: # standardize cytokine values
standardizeFunc = lambda col: (col - np.nanmean(col)) / np.nanstd(col)
dataDf = dataDf.apply(standardizeFunc)
predictors = dataDf.columns
data_outcome_Df = patient_data[outcomeVars + adjustmentVars].join(dataDf)
tmpres = GLMResults(data_outcome_Df, outcome, predictors, adj=adjustmentVars, logistic=True)
tmpres['Outcome'] = outcome
tmpres['Compartment'] = cytomod_obj.sampleStr
tmpres['Adjusted'] = 'Yes' if cytomod_obj.adjusted else 'No'
tmpres['Fold-diff'] = np.exp(tmpres['Diff'])
tmpres[modStr] = predictors
resL.append(tmpres)
resDf = pd.concat(resL, axis=0, ignore_index=True)
return resDf
####### outcome #######
def mapColors2Labels(labels, setStr='MapSet', cmap=None):
"""Return pd.Series of colors based on labels"""
if cmap is None:
N = max(3, min(12, len(np.unique(labels))))
cmap = palettable.colorbrewer.get_map(setStr, 'Qualitative', N).mpl_colors
"""Use B+W colormap"""
cmapLookup = {k:col for k, col in zip(sorted(np.unique(labels)), itertools.cycle(cmap))}
return labels.map(cmapLookup.get)
def adjust_pvals(res_df):
res_df = deepcopy(res_df)
res_df.loc[:, 'FWER'] = sm.stats.multipletests(res_df.pvalue.values, method='holm')[1]
res_df.loc[:, 'FDR'] = sm.stats.multipletests(res_df.pvalue.values, method='fdr_bh')[1]
res_df.loc[:, 'Bonferroni'] = sm.stats.multipletests(res_df.pvalue.values, method='bonferroni')[1]
return res_df
def plotResultSummary(cytomod_obj,
mod_res_df,
cy_res_df,
outcomeVars,
fdr_thresh_plot=0.2,
compartmentName='BS',
showScalebar=True,
figsize=(6,9),
save_fig_path=None):
mod_res_df = mod_res_df.copy()
cy_res_df = cy_res_df.copy()
mod_res_df.loc[:, 'Name'] = mod_res_df['Module']
cy_res_df.loc[:, 'Name'] = cy_res_df['Analyte']
cy_res_df = adjust_pvals(cy_res_df)
mod_res_df = adjust_pvals(mod_res_df)
name2mod = lambda a: '%s%1.0f' % (compartmentName, cytomod_obj.labels[a])
cy_res_df.loc[:, 'Module'] = cy_res_df['Analyte'].map(name2mod)
cols = ['Outcome', 'Name', 'Module', 'Fold-diff', 'OR', 'N', 'FWER', 'FDR']
hDf = pd.concat((mod_res_df[cols], cy_res_df[cols]), axis=0)
hDf.loc[:, 'isAnalyte'] = (hDf['Module'] != hDf['Name'])
order = hDf[['Module', 'Name', 'isAnalyte']].drop_duplicates().sort_values(by=['Module', 'isAnalyte', 'Name'])
fdrH = hDf.pivot(index='Name', columns='Outcome', values='FDR').loc[order.Name, outcomeVars]
fdrH = fdrH.fillna(1)
fwerH = hDf.pivot(index='Name', columns='Outcome', values='FWER').loc[order.Name, outcomeVars]
fwerH = fwerH.fillna(1)
foldH = hDf.pivot(index='Name', columns='Outcome', values='Fold-diff').loc[order.Name, outcomeVars]
censorInd = fdrH.values > fdr_thresh_plot
fdrH.values[censorInd] = 1.
foldH.values[censorInd] = 1.
foldH = foldH.fillna(1)
cmap = palettable.colorbrewer.diverging.PuOr_9_r.mpl_colormap
vals = np.log(foldH.values)
pcParams = dict(vmin=-1, vmax=1, cmap=cmap)
scaleLabel = 'OR'
if scaleLabel == 'OR':
scaleLabel = 'Odds Ratio'
ytl = np.array(['1/2.5', '1/2', '1/1.5', 1, 1.5, 2, 2.5])
yt = np.log([1 / 2.5, 1 / 2, 1 / 1.5, 1, 1.5, 2, 2.5])
plt.figure(figsize=figsize)
figh = plt.gcf()
plt.clf()
axh = figh.add_subplot(plt.GridSpec(1, 1, left=0.6, bottom=0.05, right=0.95, top=0.85)[0, 0])
axh.grid(None)
pcolOut = plt.pcolormesh(vals, **pcParams)
plt.yticks(())
plt.xticks(np.arange(fdrH.shape[1]) + 0.5, fdrH.columns, size=11, rotation=90)
axh.xaxis.set_ticks_position('top')
plt.xlim((0, fdrH.shape[1]))
plt.ylim((0, fdrH.shape[0]))
axh.invert_yaxis()
for cyi, cy in enumerate(foldH.index):
for outi, out in enumerate(foldH.columns):
if fwerH.loc[cy, out] < 0.0005:
ann = '***'
elif fwerH.loc[cy, out] < 0.005:
ann = '**'
elif fwerH.loc[cy, out] < 0.05:
ann = '*'
else:
ann = ''
if not ann == '':
plt.annotate(ann, xy=(outi + 0.5, cyi + 0.75), weight='bold', size=14, ha='center', va='center')
"""Colorbar showing module membership: Add labels, make B+W"""
cbAxh = figh.add_subplot(plt.GridSpec(1, 1, left=0.5, bottom=0.05, right=0.59, top=0.85)[0, 0])
cbAxh.grid(None)
cmap = [(0.3, 0.3, 0.3),
(0.7, 0.7, 0.7)]
cbS = mapColors2Labels(order.set_index('Name')['Module'], cmap=cmap)
_ = cbAxh.imshow([[x] for x in cbS.values], interpolation='nearest', aspect='auto', origin='lower')
plt.ylim((0, fdrH.shape[0]))
plt.yticks(np.arange(fdrH.shape[0]), fdrH.index, size=11)
plt.xlim((0, 0.5))
plt.ylim((-0.5, fdrH.shape[0] - 0.5))
plt.xticks(())
cbAxh.invert_yaxis()
for lab in order['Module'].unique():
y = np.mean(np.arange(order.shape[0])[np.nonzero(order['Module'] == lab)]) - 0.5
plt.annotate(lab, xy=(0.25, y), ha='center', va='center', rotation=90, color='white', size=12)
"""Scale colorbar"""
if showScalebar:
scaleAxh = figh.add_subplot(plt.GridSpec(1, 1, left=0.1, bottom=0.87, right=0.2, top=0.98)[0, 0])
cb = figh.colorbar(pcolOut, cax=scaleAxh, ticks=yt)
cb.set_label(scaleLabel, size=9)
cb.ax.set_yticklabels(ytl, fontsize=8)
if save_fig_path is None:
plt.show()
else:
plt.savefig(save_fig_path)
|
# 推出系统
import sys
# 游戏开发模块
import pygame
# 导入配置文件
from settings import Settings
# 导入飞船
from ship import Ship
# 导入 game_function模块
import game_function as gf
# 导入子弹编组
from pygame.sprite import Group
# 导入游戏状态
from game_stats import GameStats
def run_game():
# 初始化游戏,并创建一个屏幕对象
pygame.init()
ai_settings = Settings()
screen = pygame.display\
.set_mode((ai_settings.screen_width,
ai_settings.screen_height)) # 窗口宽高
pygame.display.set_caption(ai_settings.alien_caption) # 窗口title
# 创建一个统计用于存储游戏统计信息的实例
stats = GameStats(ai_settings)
# 创建飞船
ship = Ship(screen, ai_settings)
# 创建一个用于子弹的编组
bullets = Group()
# 创建外星人编组
aliens = Group()
# 创建外星人人群
gf.create_fleet(ai_settings, screen, aliens, ship)
# 开始有些的主循环
while True:
# 监听键盘和鼠标事件
gf.check_events(ai_settings, screen, ship, bullets)
# 如果在游戏中
if stats.game_active:
# 调用每个飞船的update:
ship.update()
# 更新bullets
gf.update_bullets(ai_settings, screen, ship, bullets, aliens)
# 外星人移动
gf.update_aliens(ai_settings, aliens, ship, stats, screen, bullets)
# 更新屏幕
gf.update_screen(ai_settings, screen, ship, bullets, aliens)
run_game()
|
#******************************#
# Project: Dictionary practice
#
# Version: 1.0
# Author: Bruce Stull
# Date: December 6, 2021
#******************************#
import random
# Initialize dictionaries. This isn't neccesarily neccessary, the memory allotment for program space is reset each time script is run. 'car' 'rps' are non-existent.
car = {}
rps = {}
# Create a dictionary for a car.
car = {
'brand': "Ford",
'model': "Mustang",
'year': 1964
}
# Alt way create dictionary. The above format can be a little easier to read.
# car = {'brand': "Ford",'model': "Mustang",'year': 1964}
# print(car) # {'brand': 'Ford', 'model': 'Mustang', 'year': 1964}
# Add a 'color' entry to the 'car' dictionary.
car['color'] = ['red', 'white', 'blue']
# Print out the entire dictionary. The 'color' entry (list) now shows up in 'car'.
# print(car) # {'brand': 'Ford', 'model': 'Mustang', 'year': 1964, 'color': ['red', 'white', 'blue']}
# Print out the value of the 'brand' entry. Select the key 'brand' for the brand of 'car'.
# print(car['brand']) # Ford
# Create rock paper scissors dictionary.
rps = {
'rock': ['paper', 'scissors'],
'paper': ['scissors', 'rock'],
'scissors': ['rock', 'paper']
}
# What is size of dictionary?
# print(len(car)) # 4
# print(len(rps)) # 3
# What is the type of class for a dictionary?
# print(type(car)) #<class 'dict'>
# Specify some variables.
model_of_car = car['model'] # or car.get('model')
color_of_car = car['color'] # or car.get('color')
# Print some variables.
# print(model_of_car) # Mustang
# print(color_of_car) # ['red', 'white', 'blue']
# What are the keys of the 'car' dictionary?
keys_of_car = car.keys()
# print(keys_of_car) # dict_keys(['brand', 'model', 'year', 'color'])
# What are the values of the 'car' dictionary?
values_of_car = car.values()
# print(values_of_car) # dict_values(['Ford', 'Mustang', 1964, ['red', 'white', 'blue']])
# Weirdness:
x = car.values() # https://www.w3schools.com/python/python_dictionaries_access.asp indicates that the returned list (of car.values()) is a 'view'. So, maybe whenever 'x' is used, 'car.values()' is used in 'x's place?
# print(x) # dict_values(['Ford', 'Mustang', 1964, ['red', 'white', 'blue']])
car['year'] = 2020
# print(x) # dict_values(['Ford', 'Mustang', 2020, ['red', 'white', 'blue']]) # The value of 'x' changes, even though we didn't have to re-assign it to car.values().
# print(car.items()) # dict_items([('brand', 'Ford'), ('model', 'Mustang'), ('year', 2020), ('color', ['red', 'white', 'blue'])]) # Returns each item in the dictionary as tuples in a list.
# Check if key 'model' exists for 'car'.
# if 'model' in car:
# print("Yes, 'model' is a key for 'car'.")
# Change a value of the dictionary.
car['year'] = 1970
# print(car['year'])
car.update({'year': 2015})
# print(car['year'])
# Add an item to dictionary.
car['seats'] = 5
# print(car.items())
car.update({'wheels': 4})
# print(car.items()) # dict_items([('brand', 'Ford'), ('model', 'Mustang'), ('year', 2015), ('color', ['red', 'white', 'blue']), ('seats', 5), ('wheels', 4)])
# Remove an item from dictionary.
car.pop('wheels')
# print(car.items()) # dict_items([('brand', 'Ford'), ('model', 'Mustang'), ('year', 2015), ('color', ['red', 'white', 'blue']), ('seats', 5)])
# Add wheels again.
car.update({'wheels': 4})
# print(car.items()) # dict_items([('brand', 'Ford'), ('model', 'Mustang'), ('year', 2015), ('color', ['red', 'white', 'blue']), ('seats', 5), ('wheels', 4)])
# Remove last item interted.
car.popitem() # Removes the last item inserted.
# print(car.items()) # dict_items([('brand', 'Ford'), ('model', 'Mustang'), ('year', 2015), ('color', ['red', 'white', 'blue']), ('seats', 5)])
car.update({'wheels': 4})
# Remove item of key 'model'.
del car['model']
# print(car.items())
car.update({'model': 'Junker'})
# print(car.items())
# Delete whole dictionary.
# del car
# print(car) # Will result in error since dictionary doesn't exist anymore.
# Clear the dictionary.
# car.clear()
# print(car)
# # clear()
# thisdict = {
# "brand": "Ford",
# "model": "Mustang",
# "year": 1964
# }
# print(thisdict)
# thisdict.clear()
# print(thisdict)
# Print all key names in 'car'.
# for item in car:
# print(item)
# Print all the values in 'car'.
# for item in car:
# print(car[item])
# List all the values of the 'car'.
# for item in car.values():
# print(item)
# List all the keys of 'car'.
# for item in car.keys():
# print(item)
# Loop through both keys and values.
# for key, value in car.items():
# print(key, value)
# Copy a dictionary.
copy_of_car = car.copy() # Makes a copy of 'car' and names it 'copy_of_car'.
pointer_to_car = car # Only creates a reference to 'car'.
# print(copy_of_car)
# print(pointer_to_car)
# print(car)
car.update({'seats': 4}) # Affects both 'car' and 'pointer_to_car'.
# print(copy_of_car)
# print(pointer_to_car)
# print(car)
# Copy a dictionary, using dict().
second_car_copy = dict(car)
# print(car)
# print(second_car_copy)
# Nested dictionaries.
family = {
"child1" : {
"name" : "Emil",
"year" : 2004
},
"child2" : {
"name" : "Tobias",
"year" : 2007
},
"child3" : {
"name" : "Linus",
"year" : 2011
}
}
# OR
child1 = {
"name" : "Emil",
"year" : 2004
}
child2 = {
"name" : "Tobias",
"year" : 2007
}
child3 = {
"name" : "Linus",
"year" : 2011
}
family = {
"child1" : child1,
"child2" : child2,
"child3" : child3
}
# print(family) # {'child1': {'name': 'Emil', 'year': 2004}, 'child2': {'name': 'Tobias', 'year': 2007}, 'child3': {'name': 'Linus', 'year': 2011}}
# print(family['child1']) # {'name': 'Emil', 'year': 2004}
# print(family['child1']['name']) # Emil
# print(family[0]) # KeyError: 0
# print(car) # {'brand': 'Ford', 'year': 2015, 'color': ['red', 'white', 'blue'], 'seats': 4, 'wheels': 4, 'model': 'Junker'}
# print(car['color'][0], car['model']) # red # This works since the key 'color' has a value which is a list so we can access the list elements with position numbers.
# List of some Python Dictionary Methods: https://www.w3schools.com/python/python_dictionaries_methods.asp
|
import os
from flask import Flask
from flask_session import Session
from flask_sqlalchemy import SQLAlchemy
ENVIRONMENT = "production"
app = Flask(__name__)
app.secret_key = str(os.urandom(16))
app.config["SEND_FILE_MAX_AGE_DEFAULT"] = 0
# Changes to the HTML files are reflected on the website without having to restart the Flask app.
app.jinja_env.auto_reload = True
if ENVIRONMENT == "production":
database_uri = "postgresql://postgres:test1@localhost/website"
else:
database_uri = "sqlite:///SQLite_database.db"
app.config["SQLALCHEMY_DATABASE_URI"] = database_uri
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
db = SQLAlchemy(app)
SESSION_TYPE = "filesystem"
app.config.from_object(__name__)
Session(app)
from flask_app import routes
|
from docx.table import Table
from sane_doc_reports.conf import SHOULD_HAVE_12_GRID
from sane_doc_reports.populate.Report import Report
from tests import utils
from tests.utils import _transform
def test_pie_chart_in_report():
report = Report(*_transform('elements/pie_chart.json'))
report.populate_report()
d = report.document
table = next(utils.iter_block_items(d))
assert isinstance(table, Table)
if SHOULD_HAVE_12_GRID:
assert len(table.columns) == 12
assert len(table.rows) == 1
else:
assert len(table.columns) == 12
assert len(table.rows) == 5
# Check that there is indeed an image
assert len(d.element.xpath('//pic:pic')) == 3
|
"""Unit tests of authorization records."""
import pytest
from ..utilities.general import is_never_authz, is_no_authz, uses_cataloging, uses_filesystem_only
@pytest.mark.usefixtures("authorization_record_class_fixture", "authorization_record_test_fixture")
class TestAuthorizationRecord(object):
"""Tests for AuthorizationRecord"""
@pytest.mark.usefixtures("authorization_query_record_class_fixture", "authorization_query_record_test_fixture")
class TestAuthorizationQueryRecord(object):
"""Tests for AuthorizationQueryRecord"""
@pytest.mark.usefixtures("authorization_form_record_class_fixture", "authorization_form_record_test_fixture")
class TestAuthorizationFormRecord(object):
"""Tests for AuthorizationFormRecord"""
@pytest.mark.usefixtures("authorization_search_record_class_fixture", "authorization_search_record_test_fixture")
class TestAuthorizationSearchRecord(object):
"""Tests for AuthorizationSearchRecord"""
@pytest.mark.usefixtures("vault_record_class_fixture", "vault_record_test_fixture")
class TestVaultRecord(object):
"""Tests for VaultRecord"""
@pytest.mark.usefixtures("vault_query_record_class_fixture", "vault_query_record_test_fixture")
class TestVaultQueryRecord(object):
"""Tests for VaultQueryRecord"""
@pytest.mark.usefixtures("vault_form_record_class_fixture", "vault_form_record_test_fixture")
class TestVaultFormRecord(object):
"""Tests for VaultFormRecord"""
@pytest.mark.usefixtures("vault_search_record_class_fixture", "vault_search_record_test_fixture")
class TestVaultSearchRecord(object):
"""Tests for VaultSearchRecord"""
|
from typing import Iterable
_sentry = object()
class DoublyLinkedListNode(Iterable):
__slots__ = ("prev", "next", "key", "result")
def __init__(self, key=_sentry, result=None):
self.prev = self
self.next = self
self.key = key
self.result = result
def __iter__(self):
if self.prev.key is _sentry:
return
yield self.prev
yield from self.prev
def __hash__(self):
return hash(self.key)
def __len__(self):
return sum(map(lambda _: 1, self))
def __repr__(self):
return repr(self.result)
def __str__(self):
return str(self.result)
def remove(self):
self.prev.next = self.next
self.next.prev = self.prev
def append_to_tail(self, node: "DoublyLinkedListNode"):
last = self.prev
last.next = self.prev = node
node.prev = last
node.next = self
def mark_to_root(self):
self.key = _sentry
self.result = _sentry
|
from engine.map_tile import *
from engine.map import Map
import os
def local_path(path):
return os.path.abspath(os.path.join(__file__, os.pardir, path))
# Map xcf location
xcf =''
# Map tile layer 0
l0 = [
]
# Map tile layer 1
l1 = [
]
# Warp data
warp = {
None: {
None: (False, None),
}
}
def get_map():
map_ = Map(
local_path(xcf),
spawn=(0, 0),
warp_data=warp
)
map_.tiles[0] += l0
map_.tiles[1] += l1
return map_
|
import os
from c2cgeoform.routes import register_models, table_pregenerator
def includeme(config):
config.add_static_view('node_modules_for_insider', 'c2cgeoportal_admin:node_modules')
config.add_static_view(
'node_modules_for_outsider',
'{}:node_modules'.format(config.root_package.__name__))
path = None
for path_ in [
os.path.join(os.path.dirname(__file__), '..', '..', 'admin', 'node_modules'),
os.path.join(os.path.dirname(__file__), '..', '..', 'node_modules'),
os.path.join(os.path.dirname(__file__), '..', 'admin', 'node_modules'),
os.path.join(os.path.dirname(__file__), '..', 'node_modules'),
'/usr/lib/node_modules/GeoMapFish-Admin/node_modules/',
'/usr/lib/node_modules/',
]:
if os.path.exists(path_):
path = path_
break
if path is None:
raise Exception("Unable to find the node_module from path '{}'.".format(os.path.dirname(__file__)))
config.override_asset(
to_override='c2cgeoportal_admin:node_modules/',
override_with=path
)
config.override_asset(
to_override='{}:node_modules/'.format(config.root_package.__name__),
override_with=path
)
config.add_static_view('static', 'c2cgeoportal_admin:static', cache_max_age=3600)
config.add_route('home', '/')
config.add_route('layertree', '/layertree')
config.add_route('layertree_children', '/layertree/children')
config.add_route('layertree_ordering', '/layertree/ordering')
config.add_route('layertree_unlink', '/layertree/unlink/{group_id}/{item_id}')
config.add_route('layertree_delete', '/layertree/delete/{item_id}')
config.add_route('convert_to_wms',
'/{table:layers_wmts}/{id}/convert_to_wms',
pregenerator=table_pregenerator)
config.add_route('convert_to_wmts',
'/{table:layers_wms}/{id}/convert_to_wmts',
pregenerator=table_pregenerator)
from c2cgeoportal_commons.models.main import (
Role, LayerWMS, LayerWMTS, Theme, LayerGroup, LayerV1, Interface, OGCServer,
Functionality, RestrictionArea)
from c2cgeoportal_commons.models.static import User
register_models(config, (
('themes', Theme),
('layer_groups', LayerGroup),
('layers_wms', LayerWMS),
('layers_wmts', LayerWMTS),
('layers_v1', LayerV1),
('ogc_servers', OGCServer),
('restriction_areas', RestrictionArea),
('users', User),
('roles', Role),
('functionalities', Functionality),
('interfaces', Interface),
))
|
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2015 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
ZMQ server test application. Use CTRL-C to end the application.
NOTE! If connected to a Crazyflie this will power on the motors!
"""
from __future__ import print_function
from threading import Thread
import signal
import time
import sys
try:
import zmq
except ImportError as e:
raise Exception("ZMQ library probably not installed ({})".format(e))
class _LogThread(Thread):
def __init__(self, socket, *args):
super(_LogThread, self).__init__(*args)
self._socket = socket
def run(self):
while True:
log = self._socket.recv_json()
if log["event"] == "data":
print(log)
if log["event"] == "created":
print("Created block {}".format(log["name"]))
if log["event"] == "started":
print("Started block {}".format(log["name"]))
if log["event"] == "stopped":
print("Stopped block {}".format(log["name"]))
if log["event"] == "deleted":
print("Deleted block {}".format(log["name"]))
class _ParamThread(Thread):
def __init__(self, socket, *args):
super(_ParamThread, self).__init__(*args)
self._socket = socket
def run(self):
while True:
param = self._socket.recv_json()
print(param)
class _ConnThread(Thread):
def __init__(self, socket, *args):
super(_ConnThread, self).__init__(*args)
self._socket = socket
def run(self):
while True:
msg = self._socket.recv_json()
print(msg)
class _CtrlThread(Thread):
def __init__(self, socket, *args):
super(_CtrlThread, self).__init__(*args)
self._socket = socket
self._thrust_max = 30000
self._thrust_min = 20000
self._thrust = self._thrust_min
self._thrust_step = 100
self._cmd = {
"version": 1,
"roll": 0.0,
"pitch": 0.0,
"yaw": 0.0,
"thrust": 0.0
}
def run(self):
print("Starting to send control commands!")
while True:
time.sleep(0.01)
self._thrust += self._thrust_step
if (self._thrust >= self._thrust_max or
self._thrust <= self._thrust_min):
self._thrust_step *= -1
self._cmd["thrust"] = self._thrust
self._socket.send_json(self._cmd)
signal.signal(signal.SIGINT, signal.SIG_DFL)
SRV_ADDR = "tcp://127.0.0.1"
CF_URI = "radio://0/10/250K"
context = zmq.Context()
client_conn = context.socket(zmq.REQ)
client_conn.connect("{}:2000".format(SRV_ADDR))
log_conn = context.socket(zmq.SUB)
log_conn.connect("{}:2001".format(SRV_ADDR))
log_conn.setsockopt_string(zmq.SUBSCRIBE, u"")
param_conn = context.socket(zmq.SUB)
param_conn.connect("{}:2002".format(SRV_ADDR))
param_conn.setsockopt_string(zmq.SUBSCRIBE, u"")
conn_conn = context.socket(zmq.SUB)
conn_conn.connect("{}:2003".format(SRV_ADDR))
conn_conn.setsockopt_string(zmq.SUBSCRIBE, u"")
ctrl_conn = context.socket(zmq.PUSH)
ctrl_conn.connect("{}:2004".format(SRV_ADDR))
# Start async threads
log_thread = _LogThread(log_conn)
log_thread.start()
param_thread = _ParamThread(param_conn)
param_thread.start()
conn_thread = _ConnThread(conn_conn)
conn_thread.start()
print("Trying unknown command ...", end=' ')
scan_cmd = {
"version": 1,
"cmd": "blah"
}
client_conn.send_json(scan_cmd)
resp = client_conn.recv_json()
if resp["status"] != 0:
print("fail! {}".format(resp["msg"]))
else:
print("done!")
print("Scanning for Crazyflies ...", end=' ')
scan_cmd = {
"version": 1,
"cmd": "scan"
}
client_conn.send_json(scan_cmd)
resp = client_conn.recv_json()
print("done!")
for i in resp["interfaces"]:
print("\t{} - {}".format(i["uri"], i["info"]))
connect_cmd = {
"version": 1,
"cmd": "connect",
"uri": "{}".format(CF_URI)
}
print("Connecting to {} ...".format(connect_cmd["uri"]), end=' ')
client_conn.send_json(connect_cmd)
resp = client_conn.recv_json()
if resp["status"] != 0:
print("fail! {}".format(resp["msg"]))
sys.exit(1)
print("done!")
# Do logging
print("Loggable variables")
for group in resp["log"]:
print("\t{}".format(group))
for name in resp["log"][group]:
print("\t {} ({})".format(name,
resp["log"][group][name]["type"]))
print("Parameter variables")
for group in resp["param"]:
print("\t{}".format(group))
for name in resp["param"][group]:
print("\t {} ({}, {})= {}".format(
name, resp["param"][group][name]["type"],
resp["param"][group][name]["access"],
resp["param"][group][name]["value"]))
log_cmd = {
"version": 1,
"cmd": "log",
"action": "create",
"name": "Test log block",
"period": 1000,
"variables": [
"pm.vbat",
"stabilizer.roll"
]
}
print("Creating logging {} ...".format(log_cmd["name"]), end=' ')
client_conn.send_json(log_cmd)
resp = client_conn.recv_json()
if resp["status"] == 0:
print("done!")
else:
print("fail! {}".format(resp["msg"]))
log_cmd = {
"version": 1,
"cmd": "log",
"action": "start",
"name": "Test log block"
}
print("Starting logging {} ...".format(log_cmd["name"]), end=' ')
client_conn.send_json(log_cmd)
resp = client_conn.recv_json()
if resp["status"] == 0:
print("done!")
else:
print("fail!")
param_cmd = {
"version": 1,
"cmd": "param",
"name": "system.selftestPassed",
"value": True
}
print("Setting param {} to {}...".format(param_cmd["name"],
param_cmd["value"]), end=' ')
client_conn.send_json(param_cmd)
resp = client_conn.recv_json()
if resp["status"] == 0:
print("done!")
else:
print("fail! {}".format(resp["msg"]))
param_cmd = {
"version": 1,
"cmd": "param",
"name": "flightctrl.xmode",
"value": True
}
print("Setting param {} to {}...".format(param_cmd["name"],
param_cmd["value"]), end=' ')
client_conn.send_json(param_cmd)
resp = client_conn.recv_json()
if resp["status"] == 0:
print("done!")
else:
print("fail! {}".format(resp["msg"]))
# Start sending control commands
ctrl = _CtrlThread(ctrl_conn)
ctrl.start()
# Wait a bit, then stop the logging
time.sleep(5)
log_cmd = {
"version": 1,
"cmd": "log",
"action": "stop",
"name": "No name",
}
print("Stopping logging {} ...".format(log_cmd["name"]), end=' ')
client_conn.send_json(log_cmd)
resp = client_conn.recv_json()
if resp["status"] == 0:
print("done!")
else:
print("fail! {}".format(resp["msg"]))
log_cmd = {
"version": 1,
"cmd": "log",
"action": "stop",
"name": "Test log block",
}
print("Stopping logging {} ...".format(log_cmd["name"]), end=' ')
client_conn.send_json(log_cmd)
resp = client_conn.recv_json()
if resp["status"] == 0:
print("done!")
else:
print("fail!")
log_cmd = {
"version": 1,
"cmd": "log",
"action": "delete",
"name": "Test log block",
}
print("Deleting logging {} ...".format(log_cmd["name"]), end=' ')
client_conn.send_json(log_cmd)
resp = client_conn.recv_json()
if resp["status"] == 0:
print("done!")
else:
print("fail!")
# Wait a bit, then disconnect
time.sleep(5)
connect_cmd = {
"version": 1,
"cmd": "disconnect",
"uri": "{}".format(CF_URI)
}
print("Disconnecting from {} ...".format(connect_cmd["uri"]), end=' ')
client_conn.send_json(connect_cmd)
resp = client_conn.recv_json()
if resp["status"] != 0:
print("fail! {}".format(resp["msg"]))
sys.exit(1)
print("done!")
|
from flask import Blueprint, jsonify
from api.helper import *
from api.exceptions import *
smogonapi = Blueprint('smogonapi', __name__)
@smogonapi.route('/set/<generation>/<pokemon>')
def getDataNoForm(generation, pokemon):
try:
if pokemon.isdigit(): dictval = extractData(int(pokemon), int(generation))
else: dictval = extractDataFromString(pokemon, int(generation))
return jsonify(dictval)
except:
APIError("Report to @thecommondude")
@smogonapi.route('/set/<generation>/<pokemon>/<form>')
def getDataForm(generation, pokemon, form):
try:
formattedpkm = (int(form) + int(pokemon) * 10) /10
dictval = extractData(formattedpkm, int(generation))
return jsonify(dictval)
except:
APIError("Report to @thecommondude")
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Review(Model):
"""The Review object.
:param review_id: Id of the review.
:type review_id: str
:param sub_team: Name of the subteam.
:type sub_team: str
:param status: The status string (<Pending, Complete>).
:type status: str
:param reviewer_result_tags: Array of KeyValue with Reviewer set Tags.
:type reviewer_result_tags:
list[~azure.cognitiveservices.vision.contentmoderator.models.KeyValuePair]
:param created_by: The reviewer name.
:type created_by: str
:param metadata: Array of KeyValue.
:type metadata:
list[~azure.cognitiveservices.vision.contentmoderator.models.KeyValuePair]
:param type: The type of content.
:type type: str
:param content: The content value.
:type content: str
:param content_id: Id of the content.
:type content_id: str
:param callback_endpoint: The callback endpoint.
:type callback_endpoint: str
"""
_attribute_map = {
'review_id': {'key': 'ReviewId', 'type': 'str'},
'sub_team': {'key': 'SubTeam', 'type': 'str'},
'status': {'key': 'Status', 'type': 'str'},
'reviewer_result_tags': {'key': 'ReviewerResultTags', 'type': '[KeyValuePair]'},
'created_by': {'key': 'CreatedBy', 'type': 'str'},
'metadata': {'key': 'Metadata', 'type': '[KeyValuePair]'},
'type': {'key': 'Type', 'type': 'str'},
'content': {'key': 'Content', 'type': 'str'},
'content_id': {'key': 'ContentId', 'type': 'str'},
'callback_endpoint': {'key': 'CallbackEndpoint', 'type': 'str'},
}
def __init__(self, review_id=None, sub_team=None, status=None, reviewer_result_tags=None, created_by=None, metadata=None, type=None, content=None, content_id=None, callback_endpoint=None):
super(Review, self).__init__()
self.review_id = review_id
self.sub_team = sub_team
self.status = status
self.reviewer_result_tags = reviewer_result_tags
self.created_by = created_by
self.metadata = metadata
self.type = type
self.content = content
self.content_id = content_id
self.callback_endpoint = callback_endpoint
|
orders = []
def add_order(name, flavor, observation=None):
order = {}
order['name'] = name
order['flavor'] = flavor
order['observation'] = observation
return order
orders.append(add_order('Mario', 'Pepperoni'))
orders.append(add_order('Pirao', 'Rola', 'Prefiro pegar o Ordonha'))
for order in orders:
if order['observation']:
template = 'Name: {name}\nFlavor: {flavor}\nObservation: {observation}'
else:
template = 'Name: {name}\nFlavor: {flavor}'
print(template.format(**order))
print('-'*30)
|
# -*- coding: utf-8 -*-
import os
import warnings
from setuptools import setup
from setuptools import find_packages
requirements = [
'setuptools',
'networkx',
'jpype1-py3',
'konlpy',
]
if os.name == 'nt':
warnings.warn("See http://konlpy.org/en/latest/install/#id2 to properly install KoNLPy.", RuntimeWarning)
setup(
name='textrankr',
version='0.4',
license='MIT',
author='Jamie Seol',
author_email='theeluwin@gmail.com',
url='https://github.com/theeluwin/textrankr',
description='TextRank for Korean',
packages=find_packages(),
include_package_data=True,
install_requires=requirements,
classifiers=[]
)
|
# pylint: disable=unused-import
# flake8: noqa: F401
try:
# Python 3.8+
from typing import Protocol
except ImportError:
Protocol = object # type: ignore
|
# coding: utf-8
import smtplib
import urllib.request
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import common.lark_common.model.common_model as common_model
import common.lark_common.utils as lark_utils
from jinja2 import Template
from BareMetalControllerBackend.conf import env
class ModelEmailNotification:
def __init__(self):
self.env_config = env.EnvConfig()
self.from_address = None
self.to_address = []
self.cc_address = []
self.bcc_address = []
self.subject = None
self.email_template_uri = None
self.image_uri = {}
self.parameter = {}
self.smtp_server = self.env_config.notification_smtp_server
self.smtp_ssl_port = self.env_config.notification_smtp_ssl_port
self.smtp_login_required = self.env_config.notification_smtp_login_required
self.smtp_ssl_enable = self.env_config.notification_smtp_ssl_enable
self.smtp_username = self.env_config.notification_smtp_username
self.smtp_password = self.env_config.notification_smtp_password
class ProviderEmailNotification(object):
def __init__(self, model_notification=None):
self.model_notification = model_notification
self.env_config = env.EnvConfig()
def __generate_body(self):
"""
根据model notification中的模板内容和参数生成邮件正文
:return: 电子邮件正文
"""
response = common_model.ResponseObj()
try:
email_template_stream = urllib.request.urlopen(self.model_notification.email_template_uri).read().decode('utf-8')
# email_template_stream = urllib.request.install_opener("static/reset_apssword.html").read().decode('utf-8')
email_template = Template(email_template_stream)
parameter_dict = self.model_notification.parameter
if not isinstance(self.model_notification.parameter, dict):
parameter_dict = lark_utils.JsonUtils.convert_object_to_dict(self.model_notification.parameter)
email_body = email_template.render(parameter_dict)
response.is_ok = True
response.content = email_body
except Exception as ex:
# self.env_config.logger.error(ex)
response.is_ok = False
response.message = u"email template is emtpy or can not be downloaded."
return response
return response
def __generate_message(self):
response = common_model.ResponseObj()
msg = MIMEMultipart('related')
response_body = self.__generate_body()
if not response_body.is_ok:
return response_body
content = MIMEText(response_body.content, 'html', 'utf-8')
msg.attach(content)
msg['Subject'] = self.model_notification.subject
msg['From'] = self.model_notification.from_address
msg['To'] = ','.join(self.model_notification.to_address)
msg['Cc'] = ','.join(self.model_notification.cc_address)
msg['Bcc'] = ','.join(self.model_notification.bcc_address)
try:
image_uri_dict = self.model_notification.image_uri
if image_uri_dict and len(image_uri_dict) > 0:
for (image_key, image_uri) in image_uri_dict.items():
image_content = urllib.request.urlopen(image_uri).read()
mime_image = MIMEImage(image_content)
mime_image.add_header('Content-ID', image_key)
msg.attach(mime_image)
except Exception as ex:
return ex
# self.env_config.logger.error("can not download and read image. " + str(ex))
response.content = msg.as_string()
response.is_ok = True
return response
def send(self):
response_obj = common_model.ResponseObj()
try:
# 邮件主题内容
response_message = self.__generate_message()
if not response_message.is_ok:
return response_message
# 收件人列表
receiver = self.model_notification.to_address + self.model_notification.cc_address + self.model_notification.bcc_address
# 链接smtp 服务器
if not self.model_notification.smtp_ssl_enable:
smtp_server = smtplib.SMTP(self.model_notification.smtp_server)
else:
smtp_server = smtplib.SMTP_SSL(self.model_notification.smtp_server, self.model_notification.smtp_ssl_port)
# 登录smtp服务器
if self.model_notification.smtp_login_required:
smtp_server.login(self.model_notification.smtp_username, self.model_notification.smtp_password)
# 邮件发送
smtp_server.sendmail(self.model_notification.from_address, receiver, msg=response_message.content)
smtp_server.quit()
except Exception as ex:
response_obj.message = "Fail to send email! Error : %s " % str(ex)
response_obj.is_ok = False
response_obj.no = 500
return response_obj
response_obj.is_ok = True
response_obj.content = "email send success!"
return response_obj
|
# Copyright 2021 eprbell
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import unittest
from dateutil.tz import tzutc
from rp2.configuration import Configuration
from rp2.entry_types import TransactionType
from rp2.in_transaction import InTransaction
from rp2.out_transaction import OutTransaction
from rp2.plugin.country.us import US
from rp2.rp2_decimal import RP2Decimal
from rp2.rp2_error import RP2TypeError, RP2ValueError
class TestInTransaction(unittest.TestCase):
_configuration: Configuration
@classmethod
def setUpClass(cls) -> None:
TestInTransaction._configuration = Configuration("./config/test_data.config", US())
def setUp(self) -> None:
self.maxDiff = None # pylint: disable=invalid-name
def test_transaction_type(self) -> None:
self.assertEqual(TransactionType.AIRDROP, TransactionType.type_check_from_string("transaction_type", "airdrop"))
self.assertEqual(TransactionType.BUY, TransactionType.type_check_from_string("transaction_type", "buy"))
self.assertEqual(TransactionType.DONATE, TransactionType.type_check_from_string("transaction_type", "dOnAtE"))
self.assertEqual(TransactionType.GIFT, TransactionType.type_check_from_string("transaction_type", "GIFT"))
self.assertEqual(TransactionType.HARDFORK, TransactionType.type_check_from_string("transaction_type", "HardFork"))
self.assertEqual(TransactionType.INTEREST, TransactionType.type_check_from_string("transaction_type", "Interest"))
self.assertEqual(TransactionType.MINING, TransactionType.type_check_from_string("transaction_type", "MiNiNg"))
self.assertEqual(TransactionType.MOVE, TransactionType.type_check_from_string("transaction_type", "MoVe"))
self.assertEqual(TransactionType.SELL, TransactionType.type_check_from_string("transaction_type", "sell"))
self.assertEqual(TransactionType.STAKING, TransactionType.type_check_from_string("transaction_type", "sTaKING"))
self.assertEqual(TransactionType.WAGES, TransactionType.type_check_from_string("transaction_type", "WageS"))
with self.assertRaisesRegex(RP2TypeError, "Parameter name is not a string: .*"):
TransactionType.type_check_from_string(12, "buy") # type: ignore
with self.assertRaisesRegex(RP2TypeError, "Parameter 'transaction_type' has non-string value .*"):
TransactionType.type_check_from_string("transaction_type", 34.6) # type: ignore
with self.assertRaisesRegex(RP2TypeError, "Parameter 'transaction_type' has non-string value .*"):
TransactionType.type_check_from_string("transaction_type", None) # type: ignore
with self.assertRaisesRegex(RP2ValueError, "Parameter 'transaction_type' has invalid transaction type value: .*"):
TransactionType.type_check_from_string("transaction_type", "Cook")
def test_taxable_in_transaction(self) -> None:
in_transaction: InTransaction = InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"inTerest",
RP2Decimal("1000.0"),
RP2Decimal("2.0002"),
RP2Decimal("0"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2000.2"),
internal_id=19,
)
InTransaction.type_check("my_instance", in_transaction)
self.assertTrue(in_transaction.is_taxable())
self.assertEqual(RP2Decimal("2000.2"), in_transaction.fiat_taxable_amount)
self.assertEqual("19", in_transaction.internal_id)
self.assertEqual(2021, in_transaction.timestamp.year)
self.assertEqual(1, in_transaction.timestamp.month)
self.assertEqual(2, in_transaction.timestamp.day)
self.assertEqual(8, in_transaction.timestamp.hour)
self.assertEqual(42, in_transaction.timestamp.minute)
self.assertEqual(43, in_transaction.timestamp.second)
self.assertEqual(882000, in_transaction.timestamp.microsecond)
self.assertEqual(tzutc(), in_transaction.timestamp.tzinfo)
self.assertEqual("B1", in_transaction.asset)
self.assertEqual("BlockFi", in_transaction.exchange)
self.assertEqual("Bob", in_transaction.holder)
self.assertEqual(TransactionType.INTEREST, in_transaction.transaction_type)
self.assertEqual(RP2Decimal("1000"), in_transaction.spot_price)
self.assertEqual(RP2Decimal("2.0002"), in_transaction.crypto_in)
self.assertEqual(RP2Decimal("2000.2"), in_transaction.fiat_in_no_fee)
self.assertEqual(RP2Decimal("2000.2"), in_transaction.fiat_in_with_fee)
self.assertEqual(RP2Decimal("0"), in_transaction.fiat_fee)
self.assertEqual(RP2Decimal("2.0002"), in_transaction.crypto_balance_change)
self.assertEqual(RP2Decimal("2000.2"), in_transaction.fiat_balance_change)
self.assertEqual(
str(in_transaction),
"""InTransaction:
id=19
timestamp=2021-01-02 08:42:43.882000 +0000
asset=B1
exchange=BlockFi
holder=Bob
transaction_type=TransactionType.INTEREST
spot_price=1000.0000
crypto_in=2.00020000
fiat_fee=0.0000
fiat_in_no_fee=2000.2000
fiat_in_with_fee=2000.2000
unique_id=
is_taxable=True
fiat_taxable_amount=2000.2000""",
)
self.assertEqual(
in_transaction.to_string(2, repr_format=False, extra_data=["foobar", "qwerty"]),
""" InTransaction:
id=19
timestamp=2021-01-02 08:42:43.882000 +0000
asset=B1
exchange=BlockFi
holder=Bob
transaction_type=TransactionType.INTEREST
spot_price=1000.0000
crypto_in=2.00020000
fiat_fee=0.0000
fiat_in_no_fee=2000.2000
fiat_in_with_fee=2000.2000
unique_id=
is_taxable=True
fiat_taxable_amount=2000.2000
foobar
qwerty""",
)
self.assertEqual(
in_transaction.to_string(2, repr_format=True, extra_data=["foobar", "qwerty"]),
(
" InTransaction("
"id='19', "
"timestamp='2021-01-02 08:42:43.882000 +0000', "
"asset='B1', "
"exchange='BlockFi', "
"holder='Bob', "
"transaction_type=<TransactionType.INTEREST: 'interest'>, "
"spot_price=1000.0000, "
"crypto_in=2.00020000, "
"fiat_fee=0.0000, "
"fiat_in_no_fee=2000.2000, "
"fiat_in_with_fee=2000.2000, "
"unique_id=, "
"is_taxable=True, "
"fiat_taxable_amount=2000.2000, "
"foobar, "
"qwerty)"
),
)
def test_non_taxable_in_transaction(self) -> None:
in_transaction = InTransaction(
self._configuration,
"1841-01-02T15:22:03Z",
"B2",
"Coinbase",
"Alice",
"BuY",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
internal_id=19,
)
self.assertFalse(in_transaction.is_taxable())
self.assertEqual(RP2Decimal("0"), in_transaction.fiat_taxable_amount)
self.assertEqual("B2", in_transaction.asset)
self.assertEqual(TransactionType.BUY, in_transaction.transaction_type)
self.assertEqual(RP2Decimal("2.0002"), in_transaction.crypto_balance_change)
self.assertEqual(RP2Decimal("2020.2"), in_transaction.fiat_balance_change)
self.assertEqual(
str(in_transaction),
"""InTransaction:
id=19
timestamp=1841-01-02 15:22:03.000000 +0000
asset=B2
exchange=Coinbase
holder=Alice
transaction_type=TransactionType.BUY
spot_price=1000.0000
crypto_in=2.00020000
fiat_fee=20.0000
fiat_in_no_fee=2000.2000
fiat_in_with_fee=2020.2000
unique_id=
is_taxable=False
fiat_taxable_amount=0.0000""",
)
def test_in_transaction_equality_and_hashing(self) -> None:
in_transaction: InTransaction = InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"iNtErEsT",
RP2Decimal("1000.0"),
RP2Decimal("2.0002"),
RP2Decimal("0"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2000.2"),
internal_id=19,
)
in_transaction2: InTransaction = InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"INTEReST",
RP2Decimal("1000.0"),
RP2Decimal("2.0002"),
RP2Decimal("0"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2000.2"),
internal_id=19,
)
in_transaction3: InTransaction = InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"interest",
RP2Decimal("1000.0"),
RP2Decimal("2.0002"),
RP2Decimal("0"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2000.2"),
internal_id=20,
)
self.assertEqual(in_transaction, in_transaction)
self.assertEqual(in_transaction, in_transaction2)
self.assertNotEqual(in_transaction, in_transaction3)
self.assertEqual(hash(in_transaction), hash(in_transaction))
self.assertEqual(hash(in_transaction), hash(in_transaction2))
# These hashes would only be equal in case of hash collision (possible but very unlikey)
self.assertNotEqual(hash(in_transaction), hash(in_transaction3))
def test_bad_to_string(self) -> None:
in_transaction: InTransaction = InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"INteREst",
RP2Decimal("1000.0"),
RP2Decimal("2.0002"),
RP2Decimal("0"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2000.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2TypeError, "Parameter 'indent' has non-integer value"):
in_transaction.to_string(None, repr_format=False, extra_data=["foobar", "qwerty"]) # type: ignore
with self.assertRaisesRegex(RP2ValueError, "Parameter 'indent' has non-positive value.*"):
in_transaction.to_string(-1, repr_format=False, extra_data=["foobar", "qwerty"])
with self.assertRaisesRegex(RP2TypeError, "Parameter 'repr_format' has non-bool value .*"):
in_transaction.to_string(1, repr_format="False", extra_data=["foobar", "qwerty"]) # type: ignore
with self.assertRaisesRegex(RP2TypeError, "Parameter 'extra_data' is not of type List"):
in_transaction.to_string(1, repr_format=False, extra_data="foobar") # type: ignore
def test_bad_in_transaction(self) -> None:
with self.assertRaisesRegex(RP2TypeError, "Parameter name is not a string:.*"):
InTransaction.type_check(None, None) # type: ignore
with self.assertRaisesRegex(RP2TypeError, "Parameter 'my_instance' is not of type InTransaction:.*"):
InTransaction.type_check("my_instance", None) # type: ignore
with self.assertRaisesRegex(RP2TypeError, "Parameter 'my_instance' is not of type InTransaction: OutTransaction"):
InTransaction.type_check(
"my_instance",
OutTransaction(
self._configuration,
"2021-01-12T11:51:38Z",
"B1",
"BlockFi",
"Bob",
"SELL",
RP2Decimal("10000"),
RP2Decimal("1"),
RP2Decimal("0"),
internal_id=45,
),
)
with self.assertRaisesRegex(RP2TypeError, "Parameter 'configuration' is not of type Configuration: .*"):
# Bad configuration
InTransaction(
None, # type: ignore
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"interest",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2TypeError, "Parameter 'configuration' is not of type Configuration: .*"):
# Bad configuration
InTransaction(
"config", # type: ignore
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"INTEREST",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2ValueError, "Parameter 'internal_id' has non-positive value .*"):
# Bad internal_id
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"Interest",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=-19,
)
with self.assertRaisesRegex(RP2TypeError, "Parameter 'internal_id' has non-integer .*"):
# Bad internal_id
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"buy",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id="19", # type: ignore
)
with self.assertRaisesRegex(RP2ValueError, "Error parsing parameter 'timestamp': Unknown string format: .*"):
# Bad timestamp
InTransaction(
self._configuration,
"abcdefg",
"B1",
"BlockFi",
"Bob",
"BUY",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2ValueError, "Parameter 'timestamp' value has no timezone info: .*"):
# Bad timestamp
InTransaction(
self._configuration,
"2021-01-02T08:42:43",
"B1",
"BlockFi",
"Bob",
"intErest",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2TypeError, "Parameter 'timestamp' has non-string value .*"):
# Bad timestamp
InTransaction(
self._configuration,
1111, # type: ignore
"B1",
"BlockFi",
"Bob",
"intERest",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2ValueError, "Parameter 'asset' value is not known: .*"):
# Bad asset
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"yyy",
"BlockFi",
"Bob",
"intEResT",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2TypeError, "Parameter 'asset' has non-string value .*"):
# Bad asset
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
1111, # type: ignore
"BlockFi",
"Bob",
"IntEResT",
RP2Decimal("1000.0"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2ValueError, "Parameter 'exchange' value is not known: .*"):
# Bad exchange
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"blockfi",
"Bob",
"INtEResT",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2TypeError, "Parameter 'exchange' has non-string value .*"):
# Bad exchange
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
1111, # type: ignore
"Bob",
"INtEresT",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2ValueError, "Parameter 'holder' value is not known: .*"):
# Bad holder
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"qwerty",
"INTEresT",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2TypeError, "Parameter 'holder' has non-string value .*"):
# Bad holder
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
1111, # type: ignore
"iNTEresT",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2ValueError, ".*InTransaction .*, id.*invalid transaction type.*"):
# Bad transaction type
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"seLl",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2ValueError, "Parameter 'transaction_type' has invalid transaction type value: .*"):
# Bad transaction type
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2ValueError, "Parameter .* has invalid transaction type value: .*"):
# Bad transaction type
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"cook",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2TypeError, "Parameter .* has non-string value .*"):
# Bad transaction type
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
1111, # type: ignore
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2ValueError, ".*InTransaction .*, id.*parameter 'spot_price' cannot be 0"):
# Bad spot price
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"iNTErest",
RP2Decimal("0"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2ValueError, ".*InTransaction .*, id.*parameter 'spot_price' cannot be 0"):
# Bad spot price
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"iNTerest",
RP2Decimal("0.00000000000001"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2ValueError, "Parameter 'spot_price' has non-positive value .*"):
# Bad spot price
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"iNterest",
RP2Decimal("-1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2TypeError, "Parameter 'spot_price' has non-RP2Decimal value .*"):
# Bad spot price
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"iNtereSt",
"1000", # type: ignore
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2ValueError, "Parameter 'crypto_in' has zero value"):
# Bad crypto in
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"intereSt",
RP2Decimal("1000"),
RP2Decimal("0"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2ValueError, "Parameter 'crypto_in' has non-positive value .*"):
# Bad crypto in
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"interESt",
RP2Decimal("1000"),
RP2Decimal("-2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2TypeError, "Parameter 'crypto_in' has non-RP2Decimal value .*"):
# Bad crypto in
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"interEst",
RP2Decimal("1000.0"),
"2.0002", # type: ignore
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2ValueError, "Parameter 'fiat_fee' has non-positive value .*"):
# Bad fiat fee
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"inteREst",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("-20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2TypeError, "Parameter 'fiat_fee' has non-RP2Decimal value .*"):
# Bad fiat fee
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"intEREst",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
"20", # type: ignore
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2ValueError, "Parameter 'fiat_in_no_fee' has non-positive value .*"):
# Bad fiat in no fee
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"inTEREst",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("-2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2TypeError, "Parameter 'fiat_in_no_fee' has non-RP2Decimal value .*"):
# Bad fiat in no fee
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"inTERESt",
RP2Decimal("1000.0"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee="2000.2", # type: ignore
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2ValueError, "Parameter 'fiat_in_with_fee' has non-positive value .*"):
# Bad fiat in with fee
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"iNTERESt",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("-2020.2"),
internal_id=19,
)
with self.assertRaisesRegex(RP2TypeError, "Parameter 'fiat_in_with_fee' has non-RP2Decimal value .*"):
# Bad fiat in with fee
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"INTERESt",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=(1, 2, 3), # type: ignore
internal_id=19,
)
with self.assertRaisesRegex(RP2TypeError, "Parameter 'notes' has non-string value .*"):
# Bad notes
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"INTEREST",
RP2Decimal("1000.0"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
notes=35.6, # type: ignore
)
with self.assertRaisesRegex(RP2TypeError, "Parameter 'notes' has non-string value .*"):
# Bad notes
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"INTeREST",
RP2Decimal("1000.0"),
RP2Decimal("2.0002"),
RP2Decimal("20"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
notes=[1, 2, 3], # type: ignore
)
with self.assertLogs(level="WARNING") as log:
# Crypto in * spot price != fiat in (without fee)
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"INTerEST",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("1000"),
fiat_in_no_fee=RP2Decimal("1900.2"),
fiat_in_with_fee=RP2Decimal("2000.2"),
internal_id=19,
)
self.assertTrue(re.search(".* InTransaction .*, id.*crypto_in.*spot_price != fiat_in_no_fee:.*", log.output[0])) # type: ignore
with self.assertLogs(level="WARNING") as log:
# fiat in (with fee) != fiat in (without fee) + fiat fee
InTransaction(
self._configuration,
"2021-01-02T08:42:43.882Z",
"B1",
"BlockFi",
"Bob",
"INTerESt",
RP2Decimal("1000"),
RP2Decimal("2.0002"),
RP2Decimal("18"),
fiat_in_no_fee=RP2Decimal("2000.2"),
fiat_in_with_fee=RP2Decimal("2020.2"),
internal_id=19,
)
self.assertTrue(re.search(".* InTransaction .*, id.*fiat_in_with_fee != fiat_in_no_fee.*fiat_fee:.*", log.output[0])) # type: ignore
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/python3
"""Platform for light integration."""
import logging
from abc import ABC
from datetime import timedelta
from typing import Callable, List, Any
import homeassistant.components.lock
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.core import HomeAssistant, callback
from wyzeapy import Wyzeapy, LockService
from wyzeapy.services.lock_service import Lock
from wyzeapy.types import DeviceTypes
from .token_manager import token_exception_handler
from .const import DOMAIN, CONF_CLIENT
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by Wyze"
SCAN_INTERVAL = timedelta(seconds=10)
MAX_OUT_OF_SYNC_COUNT = 5
@token_exception_handler
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry,
async_add_entities: Callable[[List[Any], bool], None]) -> None:
"""
This function sets up the config_entry
:param hass: Home Assistant instance
:param config_entry: The current config_entry
:param async_add_entities: This function adds entities to the config_entry
:return:
"""
_LOGGER.debug("""Creating new WyzeApi lock component""")
client: Wyzeapy = hass.data[DOMAIN][config_entry.entry_id][CONF_CLIENT]
lock_service = await client.lock_service
locks = [WyzeLock(lock_service, lock) for lock in await lock_service.get_locks()]
async_add_entities(locks, True)
class WyzeLock(homeassistant.components.lock.LockEntity, ABC):
"""Representation of a Wyze Lock."""
def __init__(self, lock_service: LockService, lock: Lock):
"""Initialize a Wyze lock."""
self._lock = lock
if self._lock.type not in [
DeviceTypes.LOCK
]:
raise AttributeError("Device type not supported")
self._lock_service = lock_service
self._out_of_sync_count = 0
@property
def device_info(self):
return {
"identifiers": {
(DOMAIN, self._lock.mac)
},
"name": self.name,
"manufacturer": "WyzeLabs",
"model": self._lock.product_model
}
def lock(self, **kwargs):
raise NotImplementedError
def unlock(self, **kwargs):
raise NotImplementedError
@property
def should_poll(self) -> bool:
return False
@token_exception_handler
async def async_lock(self, **kwargs):
_LOGGER.debug("Turning on lock")
await self._lock_service.lock(self._lock)
self._lock.unlocked = False
self.async_schedule_update_ha_state()
@token_exception_handler
async def async_unlock(self, **kwargs):
await self._lock_service.unlock(self._lock)
self._lock.unlocked = True
self.async_schedule_update_ha_state()
@property
def is_locked(self):
return not self._lock.unlocked
@property
def name(self):
"""Return the display name of this lock."""
return self._lock.nickname
@property
def unique_id(self):
return self._lock.mac
@property
def available(self):
"""Return the connection status of this lock"""
return self._lock.available
@property
def device_state_attributes(self):
"""Return device attributes of the entity."""
dev_info = {
ATTR_ATTRIBUTION: ATTRIBUTION,
"state": self.state,
"available": self.available,
"door_open": self._lock.door_open,
"device_model": self._lock.product_model,
"mac": self.unique_id
}
# Add the lock battery value if it exists
if self._lock.raw_dict.get("power"):
dev_info["lock battery"] = str(self._lock.raw_dict.get("power")) + "%"
# Add the keypad's battery value if it exists
if self._lock.raw_dict.get("keypad", {}).get("power"):
dev_info["keypad battery"] = str(self._lock.raw_dict.get("keypad", {}).get("power")) + "%"
return dev_info
@property
def supported_features(self):
return None
@token_exception_handler
async def async_update(self):
"""
This function updates the entity
"""
lock = await self._lock_service.update(self._lock)
if lock.unlocked == self._lock.unlocked or self._out_of_sync_count >= MAX_OUT_OF_SYNC_COUNT:
self._lock = lock
self._out_of_sync_count = 0
else:
self._out_of_sync_count += 1
@callback
def async_update_callback(self, lock: Lock):
"""Update the switch's state."""
self._lock = lock
self.async_schedule_update_ha_state()
async def async_added_to_hass(self) -> None:
"""Subscribe to update events."""
self._lock.callback_function = self.async_update_callback
self._lock_service.register_updater(self._lock, 10)
await self._lock_service.start_update_manager()
return await super().async_added_to_hass()
async def async_will_remove_from_hass(self) -> None:
self._lock_service.unregister_updater()
|
from flask_restx import Namespace, fields
product_ns = Namespace(
"Products",
description="Products related operations",
# # path="/products",
# path="/api/v1/products"
path="/api/v1"
)
products = product_ns.model("products", {
"id": fields.Integer(readonly=True),
"product_name": fields.String(required=True, description="The product product_name", example='Layer H3n'),
"product_category": fields.String(required=True, description="The product product_category", example='Raw Chicken'),
"user_id": fields.String(required=True, description="The product user_id"),
"created_at": fields.String(required=True, description="The product creation date")
})
post_products = product_ns.model(
"post_products",
{
"product_name": fields.String(
required=True,
description="products product_name",
example='This is my first product.'),
"product_category": fields.String(
required=True,
description="products product_category",
example='This is my first category.')})
product_mod = product_ns.model('product model', {
# 'product_name': fields.String(required=True, description='products Name', example='Layer H3n'),
'inventory': fields.Integer(required=True, description='Products inventory', example=4),
'min_quantity': fields.Integer(required=True, description='Minimum Inventory Quantity Allowed', example=0),
'category': fields.String(required=True, description='Category of product', example='Raw Chicken'),
'price': fields.Integer(required=True, description='Price of each product', example=1000),
})
product_update_resp = product_ns.model('product model', {
'product_name': fields.String(required=True, description='products Name', example='Layer H3n'),
'inventory': fields.Integer(required=True, description='Products inventory', example=4),
'min_quantity': fields.Integer(required=True, description='Minimum Inventory Quantity Allowed', example=0),
'category': fields.String(required=True, description='Category of product', example='Raw Chicken'),
'price': fields.Integer(required=True, description='Price of each product', example=1000),
})
product_resp = product_ns.model('Expected response for finding by id', {
'product_name': fields.String(required=True, description='products Name', example='Layer H3n'),
'inventory': fields.Integer(required=True, description='Products inventory', example=4),
'min_quantity': fields.Integer(required=True, description='Minimum Inventory Quantity Allowed', example=0),
'category': fields.String(required=True, description='Category of product', example='Raw Chicken'),
'price': fields.Integer(required=True, description='Price of each product', example=1000),
# 'product_id': fields.Integer(description='Unique Identification for products'),
# 'date_created': fields.DateTime(dt_format='rfc822', description='Date product was created'),
# 'date_modified': fields.DateTime(dt_format='rfc822', description='Date product was modified'),
})
# product_update_resp = product_ns.model('Expected response for finding by id', {
# 'inventory': fields.Integer(required=True, description='Products inventory', example=4),
# 'min_quantity': fields.Integer(required=True, description='Minimum Inventory Quantity Allowed', example=0),
# 'category': fields.String(required=True, description='Category of product', example='Raw Chicken'),
# 'price': fields.Integer(required=True, description='Price of each product', example=1000),
# # 'date_modified': fields.DateTime(dt_format='rfc822', description='Date product was modified'),
# })
|
'''
Original code:
https://github.com/idgmatrix/pygame-physics/blob/master/pygame_bouncing_ball.py
@author: kaswan
Modified:
@author: mandaw2014
'''
from mandaw import *
mandaw = Mandaw("Bouncing Ball!")
ball = GameObject(mandaw, "ellipse")
ball.center()
ball.ballx, ball.bally = mandaw.width / 2, mandaw.height / 2
ball.vx, ball.vy = 300, 300
@mandaw.draw
def draw():
ball.draw()
@mandaw.update
def update(dt):
ball.ballx += ball.vx * dt
ball.bally += ball.vy * dt
if ball.ballx < 0 or ball.ballx > mandaw.width - 20:
ball.vx = -ball.vx
if ball.bally < 0 or ball.bally > mandaw.height - 20:
ball.vy = -ball.vy
ball.x = ball.ballx
ball.y = ball.bally
mandaw.loop()
|
#!/usr/bin/env python
# example dragndrop.py
import pygtk
pygtk.require('2.0')
import gtk
import string, time
import gtkxpm
class DragNDropExample:
HEIGHT = 600
WIDTH = 600
TARGET_TYPE_TEXT = 80
TARGET_TYPE_PIXMAP = 81
fromImage = [ ( "text/plain", 0, TARGET_TYPE_TEXT ),
( "image/x-xpixmap", 0, TARGET_TYPE_PIXMAP ) ]
toButton = [ ( "text/plain", 0, TARGET_TYPE_TEXT ) ]
toCanvas = [ ( "image/x-xpixmap", 0, TARGET_TYPE_PIXMAP ) ]
def layout_resize(self, widget, event):
x, y, width, height = widget.get_allocation()
if width > self.lwidth or height > self.lheight:
self.lwidth = max(width, self.lwidth)
self.lheight = max(height, self.lheight)
widget.set_size(self.lwidth, self.lheight)
def makeLayout(self):
self.lwidth = self.WIDTH
self.lheight = self.HEIGHT
box = gtk.VBox(False,0)
box.show()
table = gtk.Table(2, 2, False)
table.show()
box.pack_start(table, True, True, 0)
layout = gtk.Layout()
self.layout = layout
layout.set_size(self.lwidth, self.lheight)
layout.connect("size-allocate", self.layout_resize)
layout.show()
table.attach(layout, 0, 1, 0, 1, gtk.FILL|gtk.EXPAND,
gtk.FILL|gtk.EXPAND, 0, 0)
# create the scrollbars and pack into the table
vScrollbar = gtk.VScrollbar(None)
vScrollbar.show()
table.attach(vScrollbar, 1, 2, 0, 1, gtk.FILL|gtk.SHRINK,
gtk.FILL|gtk.SHRINK, 0, 0)
hScrollbar = gtk.HScrollbar(None)
hScrollbar.show()
table.attach(hScrollbar, 0, 1, 1, 2, gtk.FILL|gtk.SHRINK,
gtk.FILL|gtk.SHRINK,
0, 0)
# tell the scrollbars to use the layout widget's adjustments
vAdjust = layout.get_vadjustment()
vScrollbar.set_adjustment(vAdjust)
hAdjust = layout.get_hadjustment()
hScrollbar.set_adjustment(hAdjust)
layout.connect("drag_data_received", self.receiveCallback)
layout.drag_dest_set(gtk.DEST_DEFAULT_MOTION |
gtk.DEST_DEFAULT_HIGHLIGHT |
gtk.DEST_DEFAULT_DROP,
self.toCanvas, gtk.gdk.ACTION_COPY)
self.addImage(gtkxpm.gtk_xpm, 0, 0)
button = gtk.Button("Text Target")
button.show()
button.connect("drag_data_received", self.receiveCallback)
button.drag_dest_set(gtk.DEST_DEFAULT_MOTION |
gtk.DEST_DEFAULT_HIGHLIGHT |
gtk.DEST_DEFAULT_DROP,
self.toButton, gtk.gdk.ACTION_COPY)
box.pack_start(button, False, False, 0)
return box
def addImage(self, xpm, xd, yd):
hadj = self.layout.get_hadjustment()
vadj = self.layout.get_vadjustment()
style = self.window.get_style()
pixmap, mask = gtk.gdk.pixmap_create_from_xpm_d(
self.window.window, style.bg[gtk.STATE_NORMAL], xpm)
image = gtk.Image()
image.set_from_pixmap(pixmap, mask)
button = gtk.Button()
button.add(image)
button.connect("drag_data_get", self.sendCallback)
button.drag_source_set(gtk.gdk.BUTTON1_MASK, self.fromImage,
gtk.gdk.ACTION_COPY)
button.show_all()
# have to adjust for the scrolling of the layout - event location
# is relative to the viewable not the layout size
self.layout.put(button, int(xd+hadj.value), int(yd+vadj.value))
return
def sendCallback(self, widget, context, selection, targetType, eventTime):
if targetType == self.TARGET_TYPE_TEXT:
now = time.time()
str = time.ctime(now)
selection.set(selection.target, 8, str)
elif targetType == self.TARGET_TYPE_PIXMAP:
selection.set(selection.target, 8,
string.join(gtkxpm.gtk_xpm, '\n'))
def receiveCallback(self, widget, context, x, y, selection, targetType,
time):
if targetType == self.TARGET_TYPE_TEXT:
label = widget.get_children()[0]
label.set_text(selection.data)
elif targetType == self.TARGET_TYPE_PIXMAP:
self.addImage(string.split(selection.data, '\n'), x, y)
def __init__(self):
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.window.set_default_size(300, 300)
self.window.connect("destroy", lambda w: gtk.main_quit())
self.window.show()
layout = self.makeLayout()
self.window.add(layout)
def main():
gtk.main()
if __name__ == "__main__":
DragNDropExample()
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import configparser
import os
with open(os.path.join(os.path.dirname(__file__), "pygetpapers", "config.ini")) as f:
config_file = f.read()
config = configparser.RawConfigParser(allow_no_value=True)
config.read_string(config_file)
version = config.get("pygetpapers", "version")
with open('README.md') as readme_file:
readme = readme_file.read()
requirements = ['requests', 'pandas',
'lxml', 'xmltodict', 'configargparse', 'habanero', 'arxiv', 'dict2xml', 'tqdm', 'coloredlogs']
setup(
name='pygetpapers',
version=f"{version}",
description='Automated Download of Research Papers from various scientific repositories',
long_description=readme,
author='Ayush Garg',
author_email='ayush@science.org.in',
url='https://github.com/petermr/pygetpapers',
packages=[
'pygetpapers',
],
package_dir={'pygetpapers':
'pygetpapers'},
include_package_data=True,
install_requires=requirements,
license='Apache License',
zip_safe=False,
keywords='research automation',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
entry_points={
'console_scripts': [
'pygetpapers=pygetpapers.pygetpapers:main',
],
},
)
|
from typing import Any, Optional
from automation.models import Automation
from celery import shared_task
from automation.actions.action_mqtt_publish import mqtt_publish
from automation.actions import Triggers
from automation.dataclasses import OnMotionData
from devices.models import Device
def _run_automations(trigger_name: Triggers, data: Optional[Any] = None) -> None:
automations = Automation.objects.filter(trigger_name__contains=[trigger_name.name])
for automation in automations:
actions = automation.actions_mqtt_publish.all()
mqtt_publish(actions, data)
def _motion_data(device_id: str) -> OnMotionData:
device = Device.objects.get(device_id=device_id)
return OnMotionData(device=device)
@shared_task()
def on_motion_detected(*_args, device_id: str) -> None:
d = _motion_data(device_id)
_run_automations(Triggers.ON_MOTION_DETECTED, d)
@shared_task()
def on_motion_left(*_args, device_id: str) -> None:
d = _motion_data(device_id)
_run_automations(Triggers.ON_MOTION_LEFT, d)
@shared_task()
def on_alarm_status_changed(status: bool, device_id: str) -> None:
d = _motion_data(device_id)
trigger = Triggers.ON_ALARM_ON if status else Triggers.ON_ALARM_OFF
_run_automations(trigger, d)
|
# Generated by Django 2.2.2 on 2019-11-27 16:54
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Verification',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=6)),
('username_signature', models.TextField(null=True)),
('code_signature', models.TextField(null=True)),
('verified', models.BooleanField(default=False)),
('recovery', models.BooleanField(default=True)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
# from django.contrib.auth import views as auth_views
from django.urls import path, include
from .views import IndexView
urlpatterns = [
path('api/', include('api.urls', namespace='api')),
path('forms/', include('forms.urls', namespace='forms')),
path('inbox/', include('inputs.urls', namespace='inbox')),
path('routes/', include('routes.urls', namespace='routes')),
path('admin/', admin.site.urls),
path('accounts/', include('django.contrib.auth.urls')),
path('', IndexView.as_view()),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from pathlib import Path
import pickle
import numpy as np
import pandas
from paddle.io import Dataset, DataLoader
from parakeet.data.batch import batch_spec, batch_wav
from parakeet.data import dataset
from parakeet.audio import AudioProcessor
class LJSpeech(Dataset):
"""A simple dataset adaptor for the processed ljspeech dataset."""
def __init__(self, root):
self.root = Path(root).expanduser()
meta_data = pandas.read_csv(
str(self.root / "metadata.csv"),
sep="\t",
header=None,
names=["fname", "frames", "samples"])
records = []
for row in meta_data.itertuples():
mel_path = str(self.root / "mel" / (row.fname + ".npy"))
wav_path = str(self.root / "wav" / (row.fname + ".npy"))
records.append((mel_path, wav_path))
self.records = records
def __getitem__(self, i):
mel_name, wav_name = self.records[i]
mel = np.load(mel_name)
wav = np.load(wav_name)
return mel, wav
def __len__(self):
return len(self.records)
class LJSpeechCollector(object):
"""A simple callable to batch LJSpeech examples."""
def __init__(self, padding_value=0.):
self.padding_value = padding_value
def __call__(self, examples):
mels = [example[0] for example in examples]
wavs = [example[1] for example in examples]
mels = batch_spec(mels, pad_value=self.padding_value)
wavs = batch_wav(wavs, pad_value=self.padding_value)
return mels, wavs
class LJSpeechClipCollector(object):
def __init__(self, clip_frames=65, hop_length=256):
self.clip_frames = clip_frames
self.hop_length = hop_length
def __call__(self, examples):
mels = []
wavs = []
for example in examples:
mel_clip, wav_clip = self.clip(example)
mels.append(mel_clip)
wavs.append(wav_clip)
mels = np.stack(mels)
wavs = np.stack(wavs)
return mels, wavs
def clip(self, example):
mel, wav = example
frames = mel.shape[-1]
start = np.random.randint(0, frames - self.clip_frames)
mel_clip = mel[:, start:start + self.clip_frames]
wav_clip = wav[start * self.hop_length:(start + self.clip_frames) *
self.hop_length]
return mel_clip, wav_clip
|
from typing import Any, Sequence
from click import style
def list2str(seq: Sequence[Any]) -> str:
# Source: https://stackoverflow.com/a/53981846
# seq = [str(s) for s in seq]
seq = [style(str(s), underline=True) for s in seq]
if len(seq) < 3:
return " and ".join(seq)
return ", ".join(seq[:-1]) + ", and " + seq[-1]
|
#!/usr/bin/env python3
# to run this, run the following command from the top level directory output by benchmark
# find . -name "synth-*.log" -print0 | sort -z | xargs -0 tail -n 1 | python dump.py "x" > out.csv
# the regular expression assumes that relevant outputs are in "x-[machine].log"
# where x should be specified as an argument to the script
import re
import sys
import math
# use cases and their directory names
tests = [
"IS", "GD", "CD", "CL", "CH", "SA",
"SJ", "LJ", "CRT-i", "CRT-s", "SYS", "IRQ", "TS", "TS-e", "TS-s", "TS-l", "TS-c"
]
testnames = {
"IS" : "initial_stack",
"GD" : "get_disp",
"CD" : "disp_disabled",
"CL" : "check_low",
"CH" : "check_high",
"SA" : "set_area",
"SJ" : "setjmp-full",
"LJ" : "longjmp-full",
"CRT-i" : "crt0_initregs",
"CRT-s" : "crt0_savevals",
"SYS" : "syscalls_loadnum",
"IRQ" : "cpu_irqoff",
"TS" : "switch_all",
"TS-e" : "switch_enter",
"TS-s" : "switch_save",
"TS-l" : "switch_load",
"TS-c" : "switch_leave"
}
# architectures and their directory names
archs = ["ARM", "MIPS", "x86-64"]
archnames = {
"ARM" : "arm32",
"MIPS" : "mips",
"x86-64" : "amd64"
}
lengths = {}
lengths["ARM"] = {
"IS" : 2,
"GD" : 3,
"CD" : 2,
"CL" : 3,
"CH" : 4,
"SA" : 3,
"SJ" : 12,
"CRT-i" : 0,
"CRT-s" : 4,
"SYS" : 1,
"IRQ" : 1,
"TS-e" : 1,
"TS-s" : 1,
"TS-l" : 1,
"TS-c" : 1
}
lengths["MIPS"] = {
"IS" : 2,
"GD" : 3,
"CD" : 2,
"CL" : 3,
"CH" : 2,
"SA" : 3,
"SJ" : 12,
"CRT-i" : 1,
"CRT-s" : 2,
"SYS" : 1,
"IRQ" : 3,
"TS-e" : 1,
"TS-s" : 1,
"TS-l" : 1,
"TS-c" : 1
}
lengths["x86-64"] = {
"IS" : 1,
"GD" : 2,
"CD" : 1,
"CL" : 1,
"CH" : 1,
"SA" : 4,
"SJ" : 9,
"CRT-i" : 0,
"CRT-s" : 2,
"SYS" : 1,
"IRQ" : 1,
"TS-e" : 1,
"TS-s" : 1,
"TS-l" : 1,
"TS-c" : 1
}
# table to collect measurements
allinfo = {}
while True:
try:
line = input() # header
line2 = input() # time
except EOFError:
break
try:
_ = input() # blank (not present for last case)
except EOFError:
pass
# parse it
info = re.findall("==> ./benchmark-(\d*)/usecase_\w*/(.*)/"+sys.argv[1]+"-(\w*).log", line)
time = re.findall("Execution Time: ([0-9.]*)s", line2)
print(len(info))
if len(info) > 0 and len(time) > 0:
info = info[0]
time = time[0]
# NOTE trim "full"
if info[2].endswith("full"):
info = (info[0], info[1], info[2][:-4])
# stick it in the table
allinfo[info[1]] = allinfo.get(info[1], {})
allinfo[info[1]][info[2]] = allinfo[info[1]].get(info[2], []) + [float(time)]
# print a CSV
def show_csv():
for test in tests:
for arch in archs:
testname = testnames[test]
archname = archnames[arch]
if testname in allinfo and archname in allinfo[testname]:
times = allinfo[testname][archname]
print(test + "," + arch + "," + str(sum(times) / len(times)))
def sig_figs(x, n):
# from stackoverflow
return round(x, -int(math.floor(math.log10(x))) + (n - 1))
# print a TeX table
def show_tex(which_tests):
# header
print("\\begin{tabular}{c|" + ("r@{.}l@{\\,}l" * len(which_tests)) + "}")
for test in which_tests:
print(" & \\multicolumn{3}{c}{" + test + "}", end="")
print(" \\\\ \\hline")
for arch in archs:
print(arch, end="")
for test in which_tests:
testname = testnames[test]
archname = archnames[arch]
if testname in allinfo and archname in allinfo[testname]:
times = allinfo[testname][archname]
time = sum(times) / len(times)
if time < 1:
print(" & 0 & %d" % int(time * 100 + 0.5), end="")
elif time < 10:
ipart = int(time)
dpart = time - int(time)
print(" & %d & %d" % (ipart, int(dpart * 10 + 0.5)), end="")
elif time < 100:
print(" & %d &" % int(time + 0.5), end="")
elif time < 1000:
print(" & %d &" % (int(time / 10. + 0.5) * 10), end="")
elif time < 10000:
print(" & %d &" % (int(time / 100. + 0.5) * 100), end="")
else:
exit(0)
print(" & (%d)" % lengths[arch][test], end="")
else:
print(" & \\multicolumn{3}{c}{---}", end="")
print(" \\\\")
print("\\end{tabular}")
show_csv()
tests = [
"IS", "GD", "CD", "CL", "CH", "SA",
"SJ", "LJ", "CRT-i", "CRT-s", "SYS", "IRQ", "TS", "TS-e", "TS-s", "TS-l", "TS-c"
]
show_tex(["IS", "GD", "CD", "CL", "CH"])
show_tex(["SA", "SJ", "LJ", "CRT-i", "CRT-s", "SYS"])
show_tex(["IRQ", "TS", "TS-e", "TS-s", "TS-l", "TS-c"])
|
from biothings.web import connections
import logging
logging.basicConfig(level='DEBUG')
def test_es_1():
client = connections.es.get_client('localhost:9200')
print(client.info())
def test_es_2(): # see if the client is reused
client1 = connections.es.get_client('localhost:9200')
client2 = connections.es.get_client('localhost:9200', timeout=20)
client3 = connections.es.get_client('localhost:9200', timeout=20)
print(id(client1))
print(id(client2))
print(id(client3))
def test_es_3(): # async
import asyncio
connections.es.get_async_client('localhost:9200')
loop = asyncio.get_event_loop()
loop.run_forever()
def test_es_4():
import asyncio
connections.es.get_async_client('localhost:9200') # es7
connections.es.get_async_client('localhost:9201') # es6
loop = asyncio.get_event_loop()
loop.run_forever()
def test_mongo():
client = connections.mongo.get_client("mongodb://su05:27017/genedoc")
collection = client["mygene_allspecies_20210510_yqynv8db"]
print(next(collection.find()))
def test_sql():
client = connections.sql.get_client("mysql+pymysql://<USER>:<PASSWORD>@localhost/album")
result = client.execute("SELECT * FROM track")
print(result.all())
if __name__ == '__main__':
test_es_4()
|
*** Error at (7,9): can not reference a non-static field 'i' from static method 'main'
*** Error at (9,24): can not reference a non-static field 'i' from static method 'main'
*** Error at (13,24): can not reference a non-static field 'i' from static method 'main'
|
"""
Cosmology.py
Author: Jordan Mirocha
Affiliation: University of Colorado at Boulder
Created on 2010-03-01.
Description:
"""
import os
import numpy as np
from scipy.misc import derivative
from scipy.optimize import fsolve
from scipy.integrate import quad, ode
from ..util.Math import interp1d
from ..util.ParameterFile import ParameterFile
from .InitialConditions import InitialConditions
from .Constants import c, G, km_per_mpc, m_H, m_He, sigma_SB, g_per_msun, \
cm_per_mpc, cm_per_kpc, k_B, m_p
_ares_to_planck = \
{
'omega_m_0': 'omegam*',
'omega_b_0': 'omegabh2',
'hubble_0': 'H_0',
'omega_l_0': 'omegal*',
'sigma_8': 'sigma8',
'primordial_index': 'ns',
}
class Cosmology(InitialConditions):
def __init__(self, pf=None, **kwargs):
if pf is not None:
self.pf = pf
else:
self.pf = ParameterFile(**kwargs)
# Load "raw" cosmological parameters
########################################################################
if self.pf['cosmology_name'] != 'user':
self._load_cosmology()
else:
self.omega_m_0 = self.pf['omega_m_0']
self.omega_b_0 = self.pf['omega_b_0']
self.hubble_0 = self.pf['hubble_0'] * 100. / km_per_mpc
self.omega_l_0 = self.pf['omega_l_0']
self.sigma_8 = self.sigma8 = self.pf['sigma_8']
self.omega_cdm_0 = self.omega_m_0 - self.omega_b_0
self.h70 = self.pf['hubble_0']
self.helium_by_mass = self.Y = self.pf['helium_by_mass']
####################################################################
# Everything beyond this point is a derived quantity of some sort.
self.cmb_temp_0 = self.pf['cmb_temp_0']
self.approx_highz = self.pf['approx_highz']
self.approx_lowz = False
self.primordial_index = self.pf['primordial_index']
self.CriticalDensityNow = self.rho_crit_0 = \
(3. * self.hubble_0**2) / (8. * np.pi * G)
self.mean_density0 = self.omega_m_0 * self.rho_crit_0 \
* cm_per_mpc**3 / g_per_msun
self.helium_by_number = self.y = 1. / (1. / self.Y - 1.) / 4.
self.X = 1. - self.Y
self.g_per_baryon = self.g_per_b = m_H / (1. - self.Y) / (1. + self.y)
self.b_per_g = 1. / self.g_per_baryon
self.baryon_per_Msun = self.b_per_msun = g_per_msun / self.g_per_baryon
# Decoupling (gas from CMB) redshift
self.zdec = 150. * (self.omega_b_0 * self.h70**2 / 0.023)**0.4 - 1.
# Matter/Lambda equality
#if self.omega_l_0 > 0:
self.a_eq = (self.omega_m_0 / self.omega_l_0)**(1./3.)
self.z_eq = 1. / self.a_eq - 1.
# Common
self.Omh2 = self.omega_b_0 * self.h70**2
# Hydrogen, helium, electron, and baryon densities today (z = 0)
self.rho_b_z0 = self.MeanBaryonDensity(0)
self.rho_m_z0 = self.MeanMatterDensity(0)
self.rho_cdm_z0 = self.rho_m_z0 - self.rho_b_z0
self.nH0 = (1. - self.Y) * self.rho_b_z0 / m_H
self.nHe0 = self.y * self.nH0
self.ne0 = self.nH0 + 2. * self.nHe0
#self.n0 = self.nH0 + self.nHe0 + self.ne0
self.delta_c0 = 1.686
self.TcmbNow = self.cmb_temp_0
self.fbaryon = self.omega_b_0 / self.omega_m_0
self.fcdm = self.omega_cdm_0 / self.omega_m_0
self.fbar_over_fcdm = self.fbaryon / self.fcdm
# Used in hmf
#self.pars_for_hmf = {'omega_lambda':self.omega_l_0,
# 'omega_b':self.omega_b_0,
# 'omega_M':self.omega_m_0,
# 'sigma_8':self.sigma8,
# 'n': self.primordial_index}
def nH(self, z):
return self.nH0 * (1. + z)**3
def nHe(self, z):
return self.nHe0 * (1. + z)**3
@property
def path_ARES(self):
if not hasattr(self, '_path_ARES'):
self._path_ARES = os.environ.get('ARES')
return self._path_ARES
@property
def path_Planck(self):
if not hasattr(self, '_path_Planck'):
name = self.pf['cosmology_name'].replace('planck_', '')
self._path_Planck = self.path_ARES \
+ '/input/planck/base/plikHM_{}'.format(name)
return self._path_Planck
def _load_cosmology(self):
if self.pf['cosmology_name'].startswith('planck'):
self._load_planck()
else:
raise NotImplemented('Only know how to read Planck cosmologies!')
def _load_planck(self):
name = self.pf['cosmology_name'].replace('planck_', '')
path = self.path_Planck
prefix = 'base_plikHM_{}'.format(name)
if self.pf['cosmology_id'] == 'best':
data = {}
with open('{}/{}.minimum'.format(path, prefix), 'r') as f:
for i, line in enumerate(f):
if i < 2:
continue
if not line.strip():
continue
if line.startswith(' -log(Like)'):
break
# Parse line
_line = line.split()
row = int(_line[0])
val = float(_line[1])
name = _line[2]
name_latex = str(_line[3:])
data[name] = val
self._planck_raw = data
self.h70 = data['H0'] / 100.
self.omega_b_0 = data['omegabh2'] / self.h70**2
self.omega_cdm_0 = data['omegach2'] / self.h70**2
self.hubble_0 = data['H0'] / km_per_mpc
self.omega_l_0 = data['omegal']
self.omega_k_0 = data['omegak']
self.omega_m_0 = 1. - self.omega_l_0 - self.omega_k_0
self.sigma_8 = self.sigma8 = data['sigma8']
self.helium_by_mass = self.Y = data['yhe']
else:
num = self.pf['cosmology_id']
assert type(num) in [int, np.int32, np.int64]
##
# Load chains as one long concatenated super-array
data = []
for filenum in range(1, 5):
chain_fn = '{}/{}_{}.txt'.format(path, prefix, filenum)
data.append(np.loadtxt(chain_fn, unpack=True))
data = np.concatenate(data, axis=1)
##
# Load parameter names
pars = []
for line in open('{}/{}.paramnames'.format(path, prefix)):
if not line.strip():
continue
chunks = line.split()
pars.append(chunks[0].strip().replace('*', ''))
pars_in = {}
for line in open('{}/{}.inputparams'.format(path, prefix)):
if not line.strip():
continue
if not line.startswith('param['):
continue
_pre, _post = line.split('=')
pre = _pre.strip()
post = _post.split()
pars_in[pre.replace('param', '')[1:-1]] = \
np.array([float(elem) for elem in post])
##
# Just need to map to right array element. Remember that first
# two rows in Planck chains are (weights -loglike).
self.h70 = data[pars.index('H0')+2,num] / 100.
self.omega_b_0 = data[pars.index('omegabh2')+2,num] / self.h70**2
self.omega_cdm_0 = data[pars.index('omegach2')+2,num] / self.h70**2
self.hubble_0 = data[pars.index('H0')+2,num] / km_per_mpc
self.omega_l_0 = data[pars.index('omegal')+2,num]
# In another file
if 'omegak' not in pars:
self.omega_k_0 = pars_in['omegak'][0]
else:
self.omega_k_0 = data[pars.index('omegak')+2]
self.omega_m_0 = 1. - self.omega_l_0 - self.omega_k_0
self.sigma_8 = self.sigma8 = data[pars.index('sigma8')+2,num]
if 'yhe' not in pars:
self.helium_by_mass = self.Y = pars_in['yhe'][0]
else:
self.helium_by_mass = data[pars.index('yhe')+2,num]
if self.pf['verbose']:
s = "# Set cosmological parameters to values in {}th element of".format(num)
s += " concatenated array made from the following files:"
print(s)
path_str = path.replace(self.path_ARES, '$ARES')
print("# {}_{}_?.txt".format(path_str, prefix))
return
# Can override cosmological parameters using specified cosmologies.
# Cosmology names:
# 'plikHM_TTTEEE_lowl_lowE_lensing/base_plikHM_TTTEEE_lowl_lowE_lensing_4'
# Cosmology numbers are the row numbers in the cosmo file.
# Checks if a folder/cosmology name is provided
if self.pf['cosmology_name'] is not None:
# Checks if a MCMC row number is provided
if self.pf['cosmology_number'] is not None:
self.pf['cosmology_number']=int(self.pf['cosmology_number'])
# If a MCMC row number is provided, saves the cosmology
# in a variable pb as a string of the form "name-number"
pb = '{}-{}'.format(self.pf['cosmology_name'],
str(self.pf['cosmology_number']).zfill(5))
else:
self.pf['cosmology_number'] = 0
# If no row number is provided, uses the first row as default"
pb = '{}-{}'.format(self.pf['cosmology_name'],
str(self.pf['cosmology_number']).zfill(5))
self.cosmology_prefix = pb
else:
if self.pf['cosmology_number'] is not None:
print('No cosmology name provided')
self.cosmology_prefix = None
# If a hmf table is specified and has the words Cosmology
# and Number in it, it creates a corresponding cosmology prefix.
cosmology_marker = None
number_marker = None
if self.pf['hmf_table'] is not None:
for i in range(len(self.pf['hmf_table'])):
if self.pf['hmf_table'][i:i+9] == 'Cosmology':
cosmology_marker = i
if self.pf['hmf_table'][i:i+6] == 'Number':
number_marker = i
if cosmology_marker is not None and number_marker is not None:
cosmology_name, cosmology_number =\
self.pf['hmf_table'][cosmology_marker + 10:number_marker - 1],\
self.pf['hmf_table'][number_marker + 7:number_marker + 12]
self.cosmology_prefix =\
cosmology_name + '-' + cosmology_number
print('Cosmology recognized from the hmf table')
else:
if self.pf['cosmology_name'] is None:
print('Cosmology not recognized from hmf table name')
# Creates the path variable for the MCMC chains
#ARES = self.ARES = os.environ.get('ARES')
cosmo_path = (self.path_ARES + '/input/cosmo_params/COM_CosmoParams_base-'
+ 'plikHM-TTTEEE-lowl-lowE_R3.00/base/')
# If no hmf table is specified but a matching table exists
if self.pf['cosmology_name'] is not None:
if self.pf['hmf_table'] is None:
if self.pf['hmf_cosmology_location'] is not None:
self.pf['hmf_table'] = (self.pf['hmf_cosmology_location']
+ '/{}.hdf5'.format(self.pf['cosmology_number']))
if self.cosmology_prefix:
cosmo_file = (cosmo_path
+ self.cosmology_prefix[:-6]
+ '.txt')
# Finds the specific cosmological row
# The first two rows are not MCMC chains
# Reset the `pf` elements. Even though they won't reach other
# ARES objects, the run_CosmoRec script needs the pf to
# be accurate.
cosmo_rows = np.loadtxt(cosmo_file)[:,2:]
row = cosmo_rows[self.pf['cosmology_number']]
self.omega_m_0 = self.pf['omega_m_0'] = row[29]
self.omega_l_0 = self.pf['omega_l_0'] = row[28]
self.sigma_8 = self.sigma8 = self.pf['sigma_8'] = row[33]
self.hubble_0 = row[27] / km_per_mpc
self.pf['hubble_0'] = row[27] / 100.
self.omega_b_0 = self.pf['omega_b_0'] = row[0]*(row[27]/100.)**-2.
self.omega_cdm_0 = self.omega_m_0 - self.omega_b_0
else:
self.omega_m_0 = self.pf['omega_m_0']
self.omega_b_0 = self.pf['omega_b_0']
self.hubble_0 = self.pf['hubble_0'] * 100. / km_per_mpc
self.omega_l_0 = self.pf['omega_l_0']
self.sigma_8 = self.sigma8 = self.pf['sigma_8']
self.omega_cdm_0 = self.omega_m_0 - self.omega_b_0
def get_prefix(self):
name = self.pf['cosmology_name']
if self.pf['cosmology_id'] is None:
pass
elif type(self.pf['cosmology_id']) == str:
name += '_' + self.pf['cosmology_id']
else:
assert type(self.pf['cosmology_id']) in [int, np.int32, np.int64]
name += '_{}'.format(str(self.pf['cosmology_id']).zfill(5))
return name
@property
def inits(self):
if not hasattr(self, '_inits'):
self._inits = self.get_inits_rec()
return self._inits
def TimeToRedshiftConverter(self, t_i, t_f, z_i):
"""
High redshift approximation under effect.
"""
return ((1. + z_i)**-1.5 + (3. * self.hubble_0 *
np.sqrt(self.omega_m_0) * (t_f - t_i) / 2.))**(-2. / 3.) - 1.
def LookbackTime(self, z_i, z_f):
"""
Returns lookback time from z_i to z_f in seconds, where z_i < z_f.
"""
return self.t_of_z(z_i) - self.t_of_z(z_f)
def TCMB(self, z):
return self.cmb_temp_0 * (1. + z)
def UCMB(self, z):
""" CMB energy density. """
return 4.0 * sigma_SB * self.TCMB(z)**4 / c
def t_of_z(self, z):
"""
Time-redshift relation for a matter + lambda Universe.
References
----------
Ryden, Equation 6.28
Returns
-------
Time since Big Bang in seconds.
"""
#if self.approx_highz:
# pass
#elif self.approx_lowz:
# pass
# Full calculation
a = 1. / (1. + z)
t = (2. / 3. / np.sqrt(1. - self.omega_m_0)) \
* np.log((a / self.a_eq)**1.5 + np.sqrt(1. + (a / self.a_eq)**3.)) \
/ self.hubble_0
return t
def z_of_t(self, t):
C = np.exp(1.5 * self.hubble_0 * t * np.sqrt(1. - self.omega_m_0))
a = self.a_eq * (C**2 - 1.)**(2./3.) / (2. * C)**(2./3.)
return (1. / a) - 1.
def _Tgas_CosmoRec(self, z):
if not hasattr(self, '_Tgas_CosmoRec_'):
self._Tgas_CosmoRec_ = interp1d(self.inits['z'], self.inits['Tk'],
kind='cubic', bounds_error=False)
return self._Tgas_CosmoRec_(z)
def Tgas(self, z):
"""
Gas kinetic temperature at z in the absence of heat sources.
"""
if self.pf['approx_thermal_history'] == 'piecewise':
if type(z) == np.ndarray:
T = np.zeros_like(z)
hiz = z >= self.zdec
loz = z < self.zdec
T[hiz] = self.TCMB(z[hiz])
T[loz] = self.TCMB(self.zdec) * (1. + z[loz])**2 \
/ (1. + self.zdec)**2
return T
if z >= self.zdec:
return self.TCMB(z)
else:
return self.TCMB(self.zdec) * (1. + z)**2 / (1. + self.zdec)**2
elif self.pf['approx_thermal_history']:
return np.interp(z, self.thermal_history['z'],
self.thermal_history['Tk']) * 1.
#if not hasattr(self, '_Tgas'):
# self._Tgas = interp1d(self.thermal_history['z'],
# self.thermal_history['Tk'], kind='cubic',
# bounds_error=False)
#
#return self._Tgas(z)
elif not self.pf['approx_thermal_history']:
if not hasattr(self, '_Tgas'):
self._Tgas = interp1d(self.inits['z'], self.inits['Tk'],
kind='cubic', bounds_error=False)
# Make sure this is a float
return self._Tgas(z) * 1.
@property
def thermal_history(self):
if not hasattr(self, '_thermal_history'):
if not self.pf['approx_thermal_history']:
self._thermal_history = self.inits
return self._thermal_history
z0 = self.inits['z'][-2]
solver = ode(self.cooling_rate)
solver.set_integrator('vode', method='bdf')
solver.set_initial_value([np.interp(z0, self.inits['z'],
self.inits['Tk'])], z0)
dz = self.pf['inits_Tk_dz']
zf = final_redshift = 2.
zall = []; Tall = []
while solver.successful() and solver.t > zf:
#print(solver.t, solver.y[0])
if solver.t-dz < 0:
break
zall.append(solver.t)
Tall.append(solver.y[0])
solver.integrate(solver.t-dz)
self._thermal_history = {}
self._thermal_history['z'] = np.array(zall)[-1::-1]
self._thermal_history['Tk'] = np.array(Tall)[-1::-1]
self._thermal_history['xe'] = 2e-4 * np.ones_like(zall)
return self._thermal_history
@property
def cooling_pars(self):
if not hasattr(self, '_cooling_pars'):
self._cooling_pars = [self.pf['inits_Tk_p{}'.format(i)] for i in range(6)]
return self._cooling_pars
def cooling_rate(self, z, T=None):
"""
This is dTk/dz.
"""
if self.pf['approx_thermal_history'] in ['exp', 'tanh', 'exp+gauss', 'exp+pl']:
# This shouldn't happen! Argh.
if type(z) is np.ndarray:
assert np.all(np.isfinite(z))
else:
if z < 0:
return np.nan
if T is None:
T = self.Tgas(z)
t = self.t_of_z(z)
dtdz = self.dtdz(z)
return (T / t) * self.log_cooling_rate(z) * -1. * dtdz
elif self.pf['approx_thermal_history'] in ['propto_xe']:
#raise NotImplemented('help')
if type(z) is np.ndarray:
assert np.all(np.isfinite(z))
else:
if z < 0:
return np.nan
# Start from CosmoRec
##
# Need to do this self-consistently?
##s
#func = lambda zz: np.interp(zz, self.inits['z'], self.inits['Tk'])
dTdz = derivative(self._Tgas_CosmoRec, z, dx=1e-2)
xe = np.interp(z, self.inits['z'], self.inits['xe'])
#raise ValueError('help')
pars = self.cooling_pars
xe_cool = np.maximum(1. - xe, 0.0)
mult = pars[0] * ((1. + z) / pars[1])**pars[2]
#print(z, T, self.dtdz(z), self.HubbleParameter(z))
dtdz = self.dtdz(z)
if T is None:
T = self.Tgas(z)
hubble = 2. * self.HubbleParameter(z) * T * dtdz
print(z, dTdz, xe_cool * mult / dTdz)
return dTdz + xe_cool * mult
else:
return derivative(self.Tgas, z)
def log_cooling_rate(self, z):
if self.pf['approx_thermal_history'] == 'exp':
pars = self.cooling_pars
norm = -(2. + pars[2]) # Must be set so high-z limit -> -2/3
return norm * (1. - np.exp(-(z / pars[0])**pars[1])) / 3. \
+ pars[2] / 3.
elif self.pf['approx_thermal_history'] == 'exp+gauss':
pars = self.cooling_pars
return 2. * (1. - np.exp(-(z / pars[0])**pars[1])) / 3. \
- (4./3.) * (1. + pars[2] * np.exp(-((z - pars[3]) / pars[4])**2))
elif self.pf['approx_thermal_history'] == 'tanh':
pars = self.cooling_pars
return (-2./3.) - (2./3.) * 0.5 * (np.tanh((pars[0] - z) / pars[1]) + 1.)
elif self.pf['approx_thermal_history'] == 'exp+pl':
pars = self.cooling_pars
norm = -(2. + pars[2]) # Must be set so high-z limit -> -2/3
exp = norm * (1. - np.exp(-(z / pars[0])**pars[1])) / 3. \
+ pars[2] / 3.
pl = pars[4] * ((1. + z) / pars[0])**pars[5]
if type(total) is np.ndarray:
total[z >= 1100] = -2./3.
elif z >= 1100:
total = -2. / 3.
# FIX ME
#raise ValueError('help')
xe_cool = np.maximum(1. - xe, 0.0) * pars[0] * ((1. + z) / pars[1])**pars[2]
total = exp + pl
return total
else:
return -1. * self.cooling_rate(z, self.Tgas(z)) \
* (self.t_of_z(z) / self.Tgas(z)) / self.dtdz(z)
@property
def z_dec(self):
if not hasattr(self, '_z_dec'):
to_min = lambda zz: np.abs(self.log_cooling_rate(zz) + 1.)
self._z_dec = fsolve(to_min, 150.)[0]
return self._z_dec
@property
def Tk_dec(self):
return self.Tgas(self.z_dec)
def EvolutionFunction(self, z):
return self.omega_m_0 * (1.0 + z)**3 + self.omega_l_0
def HubbleParameter(self, z):
if self.approx_highz:
return self.hubble_0 * np.sqrt(self.omega_m_0) * (1. + z)**1.5
return self.hubble_0 * np.sqrt(self.EvolutionFunction(z))
def HubbleLength(self, z):
return c / self.HubbleParameter(z)
def HubbleTime(self, z):
return 1. / self.HubbleParameter(z)
def OmegaMatter(self, z):
if self.approx_highz:
return 1.0
return self.omega_m_0 * (1. + z)**3 / self.EvolutionFunction(z)
def OmegaLambda(self, z):
if self.approx_highz:
return 0.0
return self.omega_l_0 / self.EvolutionFunction(z)
def MeanMatterDensity(self, z):
return self.OmegaMatter(z) * self.CriticalDensity(z)
def MeanBaryonDensity(self, z):
return (self.omega_b_0 / self.omega_m_0) * self.MeanMatterDensity(z)
def MeanHydrogenNumberDensity(self, z):
return (1. - self.Y) * self.MeanBaryonDensity(z) / m_H
def MeanHeliumNumberDensity(self, z):
return self.Y * self.MeanBaryonDensity(z) / m_He
def MeanBaryonNumberDensity(self, z):
return self.MeanBaryonDensity(z) / (m_H * self.MeanHydrogenNumberDensity(z) +
4. * m_H * self.y * self.MeanHeliumNumberDensity(z))
def CriticalDensity(self, z):
return (3.0 * self.HubbleParameter(z)**2) / (8.0 * np.pi * G)
def dtdz(self, z):
return 1. / self.HubbleParameter(z) / (1. + z)
def LuminosityDistance(self, z):
"""
Returns luminosity distance in cm. Assumes we mean distance from
us (z = 0).
"""
integr = quad(lambda z: self.hubble_0 / self.HubbleParameter(z),
0.0, z)[0]
return integr * c * (1. + z) / self.hubble_0
def DifferentialRedshiftElement(self, z, dl):
"""
Given a redshift and a LOS distance, return the corresponding dz.
Parameters
----------
z0 : int, float
Redshift.
dl : int, float
Distance in Mpc.
"""
if not self.approx_highz:
raise NotImplemented('sorry!')
dz = ((1. + z)**-0.5 \
- dl * cm_per_mpc * self.hubble_0 * np.sqrt(self.omega_m_0) / 2. / c)**-2 \
- (1. + z)
return dz
def DeltaZed(self, z0, dR):
f = lambda z2: self.ComovingRadialDistance(z0, z2) / cm_per_mpc - dR
return fsolve(f, x0=z0+0.1)[0] - z0
def ComovingRadialDistance(self, z0, z):
"""
Return comoving distance between redshift z0 and z, z0 < z.
"""
if self.approx_highz:
return 2. * c * ((1. + z0)**-0.5 - (1. + z)**-0.5) \
/ self.hubble_0 / np.sqrt(self.omega_m_0)
# Otherwise, do the integral - normalize to H0 for numerical reasons
integrand = lambda z: self.hubble_0 / self.HubbleParameter(z)
return c * quad(integrand, z0, z)[0] / self.hubble_0
def ProperRadialDistance(self, z0, z):
return self.ComovingRadialDistance(z0, z) / (1. + z0)
def ComovingLineElement(self, z):
"""
Comoving differential line element at redshift z.
"""
return c / self.HubbleParameter(z)
def ProperLineElement(self, z):
"""
Proper differential line element at redshift z (i.e. dl/dz).
"""
return self.ComovingLineElement(z) / (1. + z)
def dldz(self, z):
""" Proper differential line element. """
return self.ProperLineElement(z)
def CriticalDensityForCollapse(self, z):
"""
Generally denoted (in LaTeX format) \Delta_c, fit from
Bryan & Norman (1998).
"""
d = self.OmegaMatter(z) - 1.
return 18. * np.pi**2 + 82. * d - 39. * d**2
def ProjectedVolume(self, z, angle, dz=1.):
"""
Compute the co-moving volume of a spherical shell of `area` and
thickness `dz`.
Parameters
----------
z : int, float
Redshift of shell center.
area : int, float
Angular scale in degrees.
dz : int, float
Shell thickness in differential redshift element.
Returns
-------
Volume in comoving Mpc^3.
"""
d_cm = self.ComovingRadialDistance(0., z)
angle_rad = (np.pi / 180.) * angle
dA = angle_rad * d_cm
dldz = quad(self.ComovingLineElement, z-0.5*dz, z+0.5*dz)[0]
return dA**2 * dldz / cm_per_mpc**3
def JeansMass(self, z, Tgas=None, mu=0.6):
if Tgas is None:
Tgas = self.Tgas(z)
k_J = (2. * k_B * Tgas / 3. / mu / m_p)**-0.5 \
* np.sqrt(self.OmegaMatter(z)) * self.hubble_0
l_J = 2. * np.pi / k_J
return 4. * np.pi * (l_J / 2)**3 * self.rho_b_z0 / 3. / g_per_msun
def ComovingLengthToAngle(self, z, R):
"""
Convert a length scale (co-moving) to an observed angle [arcmin].
"""
f = lambda ang: self.AngleToComovingLength(z, ang) - R
return fsolve(f, x0=0.1)[0]
def AngleToComovingLength(self, z, angle):
return self.AngleToProperLength(z, angle) * (1. + z)
def AngleToProperLength(self, z, angle):
"""
Convert an angle to a co-moving length-scale at the observed redshift.
Parameters
----------
z : int, float
Redshift of interest
angle : int, float
Angle in arcminutes.
Returns
-------
Length scale in Mpc.
"""
d = self.LuminosityDistance(z) / (1. + z)**2# cm
in_rad = (angle / 60.) * np.pi / 180.
x = np.tan(in_rad) * d / cm_per_mpc
return x
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
import sys
import argparse
import ipaddress
import syslog
import json
import time
from enum import Enum
from swsssdk import ConfigDBConnector
os.environ['PYTHONUNBUFFERED']='True'
PREFIX_SEPARATOR = '/'
IPV6_SEPARATOR = ':'
MIN_SCAN_INTERVAL = 10 # Every 10 seconds
MAX_SCAN_INTERVAL = 3600 # An hour
class Level(Enum):
ERR = 'ERR'
INFO = 'INFO'
DEBUG = 'DEBUG'
def __str__(self):
return self.value
report_level = syslog.LOG_ERR
def set_level(lvl):
global report_level
if (lvl == Level.INFO):
report_level = syslog.LOG_INFO
if (lvl == Level.DEBUG):
report_level = syslog.LOG_DEBUG
def print_message(lvl, *args):
if (lvl <= report_level):
msg = ""
for arg in args:
msg += " " + str(arg)
print(msg)
syslog.syslog(lvl, msg)
def add_prefix(ip):
if ip.find(IPV6_SEPARATOR) == -1:
ip = ip + PREFIX_SEPARATOR + "32"
else:
ip = ip + PREFIX_SEPARATOR + "128"
return ip
def add_prefix_ifnot(ip):
return ip if ip.find(PREFIX_SEPARATOR) != -1 else add_prefix(ip)
def is_local(ip):
t = ipaddress.ip_address(ip.split("/")[0].decode('utf-8'))
return t.is_link_local
def is_default_route(ip):
t = ipaddress.ip_address(ip.split("/")[0].decode('utf-8'))
return t.is_unspecified and ip.split("/")[1] == "0"
def cmps(s1, s2):
if (s1 == s2):
return 0
if (s1 < s2):
return -1
return 1
def do_diff(t1, t2):
t1_x = t2_x = 0
t1_miss = []
t2_miss = []
t1_len = len(t1);
t2_len = len(t2);
while t1_x < t1_len and t2_x < t2_len:
d = cmps(t1[t1_x], t2[t2_x])
if (d == 0):
t1_x += 1
t2_x += 1
elif (d < 0):
t1_miss.append(t1[t1_x])
t1_x += 1
else:
t2_miss.append(t2[t2_x])
t2_x += 1
while t1_x < t1_len:
t1_miss.append(t1[t1_x])
t1_x += 1
while t2_x < t2_len:
t2_miss.append(t2[t2_x])
t2_x += 1
return t1_miss, t2_miss
def get_routes():
db = ConfigDBConnector()
db.db_connect('APPL_DB')
print_message(syslog.LOG_DEBUG, "APPL DB connected for routes")
keys = db.get_keys('ROUTE_TABLE')
valid_rt = []
for k in keys:
if not is_local(k):
valid_rt.append(add_prefix_ifnot(k.lower()))
print_message(syslog.LOG_DEBUG, json.dumps({"ROUTE_TABLE": sorted(valid_rt)}, indent=4))
return sorted(valid_rt)
def get_route_entries():
db = ConfigDBConnector()
db.db_connect('ASIC_DB')
print_message(syslog.LOG_DEBUG, "ASIC DB connected")
keys = db.get_keys('ASIC_STATE:SAI_OBJECT_TYPE_ROUTE_ENTRY', False)
rt = []
for k in keys:
e = k.lower().split("\"", -1)[3]
if not is_local(e):
rt.append(e)
print_message(syslog.LOG_DEBUG, json.dumps({"ASIC_ROUTE_ENTRY": sorted(rt)}, indent=4))
return sorted(rt)
def get_interfaces():
db = ConfigDBConnector()
db.db_connect('APPL_DB')
print_message(syslog.LOG_DEBUG, "APPL DB connected for interfaces")
intf = []
keys = db.get_keys('INTF_TABLE')
for k in keys:
lst = re.split(':', k.lower(), maxsplit=1)
if len(lst) == 1:
# No IP address in key; ignore
continue
ip = add_prefix(lst[1].split("/", -1)[0])
if not is_local(ip):
intf.append(ip)
print_message(syslog.LOG_DEBUG, json.dumps({"APPL_DB_INTF": sorted(intf)}, indent=4))
return sorted(intf)
def filter_out_local_interfaces(keys):
rt = []
local_if = set(['eth0', 'lo', 'docker0'])
db = ConfigDBConnector()
db.db_connect('APPL_DB')
for k in keys:
e = db.get_entry('ROUTE_TABLE', k)
if not e:
# Prefix might have been added. So try w/o it.
e = db.get_entry('ROUTE_TABLE', k.split("/")[0])
if not e or (e['ifname'] not in local_if):
rt.append(k)
return rt
def filter_out_default_routes(lst):
upd = []
for rt in lst:
if not is_default_route(rt):
upd.append(rt)
return upd
def check_routes():
intf_appl_miss = []
rt_appl_miss = []
rt_asic_miss = []
results = {}
err_present = False
rt_appl = get_routes()
rt_asic = get_route_entries()
intf_appl = get_interfaces()
# Diff APPL-DB routes & ASIC-DB routes
rt_appl_miss, rt_asic_miss = do_diff(rt_appl, rt_asic)
# Check missed ASIC routes against APPL-DB INTF_TABLE
_, rt_asic_miss = do_diff(intf_appl, rt_asic_miss)
rt_asic_miss = filter_out_default_routes(rt_asic_miss)
# Check APPL-DB INTF_TABLE with ASIC table route entries
intf_appl_miss, _ = do_diff(intf_appl, rt_asic)
if (len(rt_appl_miss) != 0):
rt_appl_miss = filter_out_local_interfaces(rt_appl_miss)
if (len(rt_appl_miss) != 0):
results["missed_ROUTE_TABLE_routes"] = rt_appl_miss
err_present = True
if (len(intf_appl_miss) != 0):
results["missed_INTF_TABLE_entries"] = intf_appl_miss
err_present = True
if (len(rt_asic_miss) != 0):
results["Unaccounted_ROUTE_ENTRY_TABLE_entries"] = rt_asic_miss
err_present = True
if err_present:
print_message(syslog.LOG_ERR, "results: {", json.dumps(results, indent=4), "}")
print_message(syslog.LOG_ERR, "Failed. Look at reported mismatches above")
return -1
else:
print_message(syslog.LOG_INFO, "All good!")
return 0
def main(argv):
interval = 0
parser=argparse.ArgumentParser(description="Verify routes between APPL-DB & ASIC-DB are in sync")
parser.add_argument('-m', "--mode", type=Level, choices=list(Level), default='ERR')
parser.add_argument("-i", "--interval", type=int, default=0, help="Scan interval in seconds")
args = parser.parse_args()
set_level(args.mode)
if args.interval:
if (args.interval < MIN_SCAN_INTERVAL):
interval = MIN_SCAN_INTERVAL
elif (args.interval > MAX_SCAN_INTERVAL):
interval = MAX_SCAN_INTERVAL
else:
interval = args.interval
while True:
ret = check_routes()
if interval:
time.sleep(interval)
else:
sys.exit(ret)
if __name__ == "__main__":
main(sys.argv[1:])
|
import time
import os
import requests
import unittest
import numpy as np
import json
import sys
import numba
import uproot
import hepaccelerate
import hepaccelerate.backend_cpu as backend_cpu
import hepaccelerate.kernels as kernels
from hepaccelerate.utils import Results, Dataset, Histogram, choose_backend, LumiMask
USE_CUDA = int(os.environ.get("HEPACCELERATE_CUDA", 0)) == 1
def download_file(filename, url):
"""
Download an URL to a file
"""
print("downloading {0}".format(url))
with open(filename, "wb") as fout:
response = requests.get(url, stream=True, verify=False)
response.raise_for_status()
# Write response data to file
iblock = 0
for block in response.iter_content(4096):
if iblock % 10000 == 0:
sys.stdout.write(".")
sys.stdout.flush()
iblock += 1
fout.write(block)
def download_if_not_exists(filename, url):
"""
Download a URL to a file if the file
does not exist already.
Returns
-------
True if the file was downloaded,
False if it already existed
"""
if not os.path.exists(filename):
download_file(filename, url)
return True
return False
def load_dataset(numpy_lib, num_iter=1):
print("loading dataset")
download_if_not_exists(
"data/nanoaod_test.root",
"https://jpata.web.cern.ch/jpata/opendata_files/DY2JetsToLL-merged/1.root",
)
datastructures = {
"Muon": [
("Muon_pt", "float32"),
("Muon_eta", "float32"),
("Muon_phi", "float32"),
("Muon_mass", "float32"),
("Muon_charge", "int32"),
("Muon_pfRelIso03_all", "float32"),
("Muon_tightId", "bool"),
],
"Electron": [
("Electron_pt", "float32"),
("Electron_eta", "float32"),
("Electron_phi", "float32"),
("Electron_mass", "float32"),
("Electron_charge", "int32"),
("Electron_pfRelIso03_all", "float32"),
("Electron_pfId", "bool"),
],
"Jet": [
("Jet_pt", "float32"),
("Jet_eta", "float32"),
("Jet_phi", "float32"),
("Jet_mass", "float32"),
("Jet_btag", "float32"),
("Jet_puId", "bool"),
],
"EventVariables": [
("HLT_IsoMu24", "bool"),
("MET_pt", "float32"),
("MET_phi", "float32"),
("MET_sumet", "float32"),
("MET_significance", "float32"),
("MET_CovXX", "float32"),
("MET_CovXY", "float32"),
("MET_CovYY", "float32"),
],
}
dataset = Dataset(
"nanoaod",
num_iter * ["./data/nanoaod_test.root"],
datastructures,
treename="Events",
datapath="",
)
dataset.load_root(verbose=True)
dataset.merge_inplace(verbose=True)
print(
"dataset has {0} events, {1:.2f} MB".format(
dataset.numevents(), dataset.memsize() / 1000 / 1000
)
)
dataset.move_to_device(numpy_lib, verbose=True)
return dataset
@numba.njit
def verify_set_in_offsets(offsets_np, inds_np, arr_np, target_np):
for iev in range(len(offsets_np) - 1):
nmu = 0
for imu in range(offsets_np[iev], offsets_np[iev + 1]):
if nmu == inds_np[iev]:
if arr_np[imu] != target_np[imu]:
print("Mismatch detected in iev,imu", iev, imu)
return False
nmu += 1
return True
@numba.njit
def verify_get_in_offsets(offsets_np, inds_np, arr_np, target_np, z_np):
for iev in range(len(offsets_np) - 1):
nmu = 0
# Event that had no muons
if offsets_np[iev] == offsets_np[iev + 1]:
if z_np[iev] != 0:
print("Mismatch detected", iev)
return False
for imu in range(offsets_np[iev], offsets_np[iev + 1]):
if nmu == inds_np[iev]:
a = target_np[iev] != z_np[iev]
b = z_np[iev] != arr_np[imu]
if a or b:
print("Mismatch detected", iev, imu)
return False
nmu += 1
return True
@numba.njit
def verify_broadcast(offsets_np, vals_ev_np, vals_obj_np):
for iev in range(len(offsets_np) - 1):
for iobj in range(offsets_np[iev], offsets_np[iev + 1]):
if vals_obj_np[iobj] != vals_ev_np[iev]:
print("Mismatch detected in ", iev, iobj)
return False
return True
class TestKernels(unittest.TestCase):
@classmethod
def setUpClass(self):
self.NUMPY_LIB, self.ha = choose_backend(use_cuda=USE_CUDA)
self.use_cuda = USE_CUDA
self.dataset = load_dataset(self.NUMPY_LIB)
# def time_kernel(self, test_kernel):
# test_kernel()
#
# t0 = time.time()
# for i in range(5):
# n = test_kernel()
# t1 = time.time()
#
# dt = (t1 - t0) / 5.0
# speed = float(n)/dt
# return speed
def test_kernel_sum_in_offsets(self):
dataset = self.dataset
muons = dataset.structs["Muon"][0]
sel_ev = self.NUMPY_LIB.ones(muons.numevents(), dtype=self.NUMPY_LIB.bool)
sel_mu = self.NUMPY_LIB.ones(muons.numobjects(), dtype=self.NUMPY_LIB.bool)
z = kernels.sum_in_offsets(
self.ha,
muons.offsets,
muons.pt,
sel_ev,
sel_mu,
dtype=self.NUMPY_LIB.float32,
)
return muons.numevents()
def test_kernel_max_in_offsets(self):
dataset = self.dataset
muons = dataset.structs["Muon"][0]
sel_ev = self.NUMPY_LIB.ones(muons.numevents(), dtype=self.NUMPY_LIB.bool)
sel_mu = self.NUMPY_LIB.ones(muons.numobjects(), dtype=self.NUMPY_LIB.bool)
z = kernels.max_in_offsets(self.ha, muons.offsets, muons.pt, sel_ev, sel_mu)
return muons.numevents()
def test_kernel_get_in_offsets(self):
dataset = self.dataset
muons = dataset.structs["Muon"][0]
sel_ev = self.NUMPY_LIB.ones(muons.numevents(), dtype=self.NUMPY_LIB.bool)
sel_mu = self.NUMPY_LIB.ones(muons.numobjects(), dtype=self.NUMPY_LIB.bool)
inds = self.NUMPY_LIB.zeros(muons.numevents(), dtype=self.NUMPY_LIB.int8)
inds[:] = 0
z = kernels.get_in_offsets(
self.ha, muons.offsets, muons.pt, inds, sel_ev, sel_mu
)
return muons.numevents()
def test_kernel_set_get_in_offsets(self):
print("kernel_set_get_in_offsets")
dataset = self.dataset
muons = dataset.structs["Muon"][0]
arr = muons.pt.copy()
sel_ev = self.NUMPY_LIB.ones(muons.numevents(), dtype=self.NUMPY_LIB.bool)
sel_mu = self.NUMPY_LIB.ones(muons.numobjects(), dtype=self.NUMPY_LIB.bool)
inds = self.NUMPY_LIB.zeros(muons.numevents(), dtype=self.NUMPY_LIB.int8)
# set the pt of the first muon in each event to 1
inds[:] = 0
target = self.NUMPY_LIB.ones(muons.numevents(), dtype=muons.pt.dtype)
kernels.set_in_offsets(
self.ha, muons.offsets, arr, inds, target, sel_ev, sel_mu
)
print("checking set_in_offsets")
asnp = self.NUMPY_LIB.asnumpy
self.assertTrue(
verify_set_in_offsets(
asnp(muons.offsets), asnp(inds), asnp(arr), asnp(target)
)
)
print("checking get_in_offsets")
z = kernels.get_in_offsets(self.ha, muons.offsets, arr, inds, sel_ev, sel_mu)
self.assertTrue(
verify_get_in_offsets(
asnp(muons.offsets), asnp(inds), asnp(arr), asnp(target), asnp(z)
)
)
return muons.numevents()
def test_kernel_simple_cut(self):
print("kernel_simple_cut")
dataset = self.dataset
muons = dataset.structs["Muon"][0]
sel_mu = muons.pt > 30.0
return muons.numevents()
def test_kernel_broadcast(self):
print("kernel_broadcast")
dataset = self.dataset
muons = dataset.structs["Muon"][0]
met_pt = dataset.eventvars[0]["MET_pt"]
met_pt_permuon = self.NUMPY_LIB.zeros(
muons.numobjects(), dtype=self.NUMPY_LIB.float32
)
kernels.broadcast(self.ha, muons.offsets, met_pt, met_pt_permuon)
self.assertTrue(
verify_broadcast(
self.NUMPY_LIB.asnumpy(muons.offsets),
self.NUMPY_LIB.asnumpy(met_pt),
self.NUMPY_LIB.asnumpy(met_pt_permuon),
)
)
return muons.numevents()
def test_kernel_mask_deltar_first(self):
print("kernel_mask_deltar_first")
dataset = self.dataset
muons = dataset.structs["Muon"][0]
jet = dataset.structs["Jet"][0]
sel_ev = self.NUMPY_LIB.ones(muons.numevents(), dtype=self.NUMPY_LIB.bool)
sel_mu = self.NUMPY_LIB.ones(muons.numobjects(), dtype=self.NUMPY_LIB.bool)
sel_jet = jet.pt > 10
muons_matched_to_jet = kernels.mask_deltar_first(
self.ha,
{"offsets": muons.offsets, "eta": muons.eta, "phi": muons.phi},
sel_mu,
{"offsets": jet.offsets, "eta": jet.eta, "phi": jet.phi},
sel_jet,
0.3,
)
self.assertEqual(len(muons_matched_to_jet), muons.numobjects())
self.assertEqual(muons_matched_to_jet.sum(), 1698542)
return muons.numevents()
def test_kernel_select_opposite_sign(self):
print("kernel_select_opposite_sign")
dataset = self.dataset
muons = dataset.structs["Muon"][0]
sel_ev = self.NUMPY_LIB.ones(muons.numevents(), dtype=self.NUMPY_LIB.bool)
sel_mu = self.NUMPY_LIB.ones(muons.numobjects(), dtype=self.NUMPY_LIB.bool)
muons_passing_os = kernels.select_opposite_sign(
self.ha, muons.offsets, muons.charge, sel_mu
)
return muons.numevents()
def test_kernel_histogram_from_vector(self):
print("kernel_histogram_from_vector")
dataset = self.dataset
muons = dataset.structs["Muon"][0]
weights = 2 * self.NUMPY_LIB.ones(
muons.numobjects(), dtype=self.NUMPY_LIB.float32
)
ret = kernels.histogram_from_vector(
self.ha,
muons.pt,
weights,
self.NUMPY_LIB.linspace(0, 200, 100, dtype=self.NUMPY_LIB.float32),
)
self.assertEqual(ret[0][20], 112024.0)
self.assertEqual(ret[1][20], 2 * 112024.0)
self.assertEqual(ret[0][0], 0)
self.assertEqual(ret[0][-1], 7856.0)
self.assertEqual(ret[0].sum(), 3894188.0)
self.assertEqual(ret[1].sum(), 2 * 3894188.0)
return muons.numevents()
def test_kernel_histogram_from_vector_masked(self):
print("kernel_histogram_from_vector_masked")
dataset = self.dataset
muons = dataset.structs["Muon"][0]
weights = 2 * self.NUMPY_LIB.ones(
muons.numobjects(), dtype=self.NUMPY_LIB.float32
)
mask = self.NUMPY_LIB.ones(muons.numobjects(), dtype=self.NUMPY_LIB.bool)
mask[:100] = False
ret = kernels.histogram_from_vector(
self.ha,
muons.pt,
weights,
self.NUMPY_LIB.linspace(0, 200, 100, dtype=self.NUMPY_LIB.float32),
mask=mask,
)
self.assertEqual(ret[0][20], 112014.0)
self.assertEqual(ret[1][20], 2 * 112014.0)
self.assertEqual(ret[0][0], 0)
self.assertEqual(ret[0][-1], 7856.0)
self.assertEqual(ret[0].sum(), 3893988.0)
self.assertEqual(ret[1].sum(), 2 * 3893988.0)
return muons.numevents()
def test_kernel_histogram_from_vector_several(self):
print("kernel_histogram_from_vector_several")
dataset = self.dataset
muons = dataset.structs["Muon"][0]
mask = self.NUMPY_LIB.ones(muons.numobjects(), dtype=self.NUMPY_LIB.bool)
mask[:100] = False
weights = 2 * self.NUMPY_LIB.ones(
muons.numobjects(), dtype=self.NUMPY_LIB.float32
)
variables = [
(
muons.pt,
self.NUMPY_LIB.linspace(0, 200, 100, dtype=self.NUMPY_LIB.float32),
),
(
muons.eta,
self.NUMPY_LIB.linspace(-4, 4, 100, dtype=self.NUMPY_LIB.float32),
),
(
muons.phi,
self.NUMPY_LIB.linspace(-4, 4, 100, dtype=self.NUMPY_LIB.float32),
),
(
muons.mass,
self.NUMPY_LIB.linspace(0, 200, 100, dtype=self.NUMPY_LIB.float32),
),
(
muons.charge,
self.NUMPY_LIB.array([-1, 0, 1, 2], dtype=self.NUMPY_LIB.float32),
),
]
ret = kernels.histogram_from_vector_several(self.ha, variables, weights, mask)
# number of individual histograms
self.assertEqual(len(ret), len(variables))
# weights, weights2, bins
self.assertEqual(len(ret[0]), 3)
# bin edges
for ivar in range(len(variables)):
self.assertEqual(len(ret[ivar][0]), len(variables[ivar][1]) - 1)
# bin contents
for ivar in range(len(variables)):
ret2 = kernels.histogram_from_vector(
self.ha, variables[ivar][0], weights, variables[ivar][1], mask=mask
)
for ibin in range(len(ret[ivar][0])):
self.assertEqual(ret[ivar][0][ibin], ret2[0][ibin])
self.assertEqual(ret[ivar][1][ibin], ret2[1][ibin])
return muons.numevents()
def test_coordinate_transformation(self):
print("coordinate_transformation")
# Don't test the scalar ops on GPU
if not USE_CUDA:
px, py, pz, e = kernels.spherical_to_cartesian(
self.ha, 100.0, 0.2, 0.4, 100.0
)
pt, eta, phi, mass = kernels.cartesian_to_spherical(self.ha, px, py, pz, e)
self.assertAlmostEqual(pt, 100.0, 2)
self.assertAlmostEqual(eta, 0.2, 2)
self.assertAlmostEqual(phi, 0.4, 2)
self.assertAlmostEqual(mass, 100.0, 2)
pt_orig = self.NUMPY_LIB.array([100.0, 20.0], dtype=self.NUMPY_LIB.float32)
eta_orig = self.NUMPY_LIB.array([0.2, -0.2], dtype=self.NUMPY_LIB.float32)
phi_orig = self.NUMPY_LIB.array([0.4, -0.4], dtype=self.NUMPY_LIB.float32)
mass_orig = self.NUMPY_LIB.array([100.0, 20.0], dtype=self.NUMPY_LIB.float32)
px, py, pz, e = kernels.spherical_to_cartesian(
self.ha, pt_orig, eta_orig, phi_orig, mass_orig
)
pt, eta, phi, mass = kernels.cartesian_to_spherical(self.ha, px, py, pz, e)
self.assertAlmostEqual(self.NUMPY_LIB.asnumpy(pt[0]), 100.0, 2)
self.assertAlmostEqual(self.NUMPY_LIB.asnumpy(eta[0]), 0.2, 2)
self.assertAlmostEqual(self.NUMPY_LIB.asnumpy(phi[0]), 0.4, 2)
self.assertAlmostEqual(self.NUMPY_LIB.asnumpy(mass[0]), 100, 2)
self.assertAlmostEqual(self.NUMPY_LIB.asnumpy(pt[1]), 20, 2)
self.assertAlmostEqual(self.NUMPY_LIB.asnumpy(eta[1]), -0.2, 2)
self.assertAlmostEqual(self.NUMPY_LIB.asnumpy(phi[1]), -0.4, 2)
self.assertAlmostEqual(self.NUMPY_LIB.asnumpy(mass[1]), 20, 2)
# Don't test the scalar ops on GPU
if not USE_CUDA:
pt_tot, eta_tot, phi_tot, mass_tot = self.ha.add_spherical(
pt_orig, eta_orig, phi_orig, mass_orig
)
self.assertAlmostEqual(pt_tot, 114.83390378237536)
self.assertAlmostEqual(eta_tot, 0.13980652560764573)
self.assertAlmostEqual(phi_tot, 0.2747346427265487)
self.assertAlmostEqual(mass_tot, 126.24366428840153)
def test_lumi_mask(self):
lumimask = LumiMask("tests/samples/Cert_271036-284044_13TeV_ReReco_07Aug2017_Collisions16_JSON.txt", np, backend_cpu)
runs = np.array([279931, 279931, 279931], dtype=self.NUMPY_LIB.uint32)
lumis = np.array([1, 83, 743], dtype=self.NUMPY_LIB.uint32)
mask = lumimask(runs, lumis)
#"279931": [[84, 628], [630, 743], [746, 801], [803, 1043], [1045, 3022]]
#[ 83 628 629 743 745 801 802 1043 1044 3022]
self.assertFalse(mask[0])
self.assertFalse(mask[1])
self.assertTrue(mask[2])
if __name__ == "__main__":
if "--debug" in sys.argv:
unittest.findTestCases(sys.modules[__name__]).debug()
else:
unittest.main()
|
import argparse
import os
import time
import pybullet_planning as pp
from termcolor import cprint
import integral_timber_joints
from integral_timber_joints.planning.robot_setup import load_RFL_world
from integral_timber_joints.planning.rhino_interface import get_ik_solutions
from integral_timber_joints.planning.state import set_state
from integral_timber_joints.planning.utils import beam_ids_from_argparse_seq_n
from integral_timber_joints.process import RobotClampAssemblyProcess, RoboticMovement
from integral_timber_joints.planning.parsing import parse_process, get_process_path
from compas.rpc import Proxy
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--design_dir', default='210605_ScrewdriverTestProcess',
help='problem json\'s containing folder\'s name.')
parser.add_argument('--problem', default='nine_pieces_process.json', # pavilion_process.json
help='The name of the problem to solve')
parser.add_argument('--problem_subdir', default='.', # pavilion.json
help='subdir of the process file, default to `.`. Popular use: `YJ_tmp`, `<time stamp>`')
#
parser.add_argument('--seq_n', nargs='+', type=int, help='Zero-based index according to the Beam sequence in process.assembly.sequence. If only provide one number, `--seq_n 1`, we will only plan for one beam. If provide two numbers, `--seq_n start_id end_id`, we will plan from #start_id UNTIL #end_id. If more numbers are provided. By default, all the beams will be checked.')
parser.add_argument('--movement_id', default=None, type=str, help='Compute only for movement with a specific tag or movement id, e.g. `A54_M0` or `53`. ! We support both str-like movement id or global list index.')
#
parser.add_argument('--gantry_attempt', type=int, default=30, help='Number of gantry attempt. Default 20.')
#
parser.add_argument('-p', '--proxy', action='store_true', help='Use Proxy to call checker. Default False')
parser.add_argument('-v', '--viewer', action='store_true', help='Enables the viewer during planning. Default False')
# parser.add_argument('--reinit_tool', action='store_true', help='Regenerate tool URDFs.')
parser.add_argument('--debug', action='store_true', help='debug mode.')
args = parser.parse_args()
print('Arguments:', args)
##########################
path_to_json = os.path.realpath(os.path.join(os.path.dirname(integral_timber_joints.__file__), '..', '..', 'external', 'itj_design_study', '210605_ScrewdriverTestProcess', 'nine_pieces_process.json'))
# process = load_process(path_to_json) # type: RobotClampAssemblyProcess
process = parse_process(args.design_dir, args.problem, subdir=args.problem_subdir)
# * use shared client instance
# Connect to path planning backend and initialize robot parameters
if not args.proxy:
client, robot, _ = load_RFL_world(viewer=args.viewer)
process.set_initial_state_robot_config(process.robot_initial_config)
set_state(client, robot, process, process.initial_state, initialize=True,
options={'include_env' : True, 'reinit_tool' : False})
start_time = time.time()
all_movements = process.movements
failed_state_indices = []
# Connecton to Proxy Server
if args.proxy:
rhino_interface = Proxy('integral_timber_joints.planning.rhino_interface', autoreload=False)
# * Construct options
options = {
'viewer': args.viewer,
'debug': args.debug,
'diagnosis': args.debug,
'ik_gantry_attempts': args.gantry_attempt,
# ! shared client in options so we avoid recreating client every call
}
if not args.proxy:
options.update(
{'client' : client, 'robot' : robot,}
)
print('options: ' , options)
beam_ids = beam_ids_from_argparse_seq_n(process, args.seq_n, args.movement_id)
for beam_id in beam_ids:
actions = process.assembly.get_beam_attribute(beam_id, 'actions')
for action in actions:
for i, movement in enumerate(action.movements):
# Skip non RoboticMovement
if not isinstance(movement, RoboticMovement):
continue
global_movement_id = all_movements.index(movement)
# Filter by movement id
if args.movement_id is not None and \
(movement.movement_id != args.movement_id and args.movement_id != str(global_movement_id)):
continue
print('='*10)
print("Mov#%i/State#%i (%s) A(%s) M(%s) %s" % (global_movement_id, global_movement_id + 1, movement.movement_id, action.__class__.__name__, movement.__class__.__name__, movement.tag))
# * Proxy call or normal call
if args.proxy:
result = rhino_interface.get_ik_solutions(process, global_movement_id, options)
else:
result = get_ik_solutions(process, global_movement_id, options)
# * Inteprete Result
success, conf, msg = result
if success:
cprint("IK Success: %s" % msg, 'green')
else:
cprint("- - - WARNING - - - IK Failed: %s" % msg, 'red')
failed_state_indices.append((movement.movement_id, global_movement_id + 1, msg))
if failed_state_indices:
print('='*10)
cprint("Failed Movement id | State Indices | Error msg", 'yellow')
for fmsg in failed_state_indices:
print(fmsg)
print('Total checking time: {:.3f}'.format(pp.elapsed_time(start_time)))
|
from time import sleep
print ('Bem vindo a seleção de aluguel de viaturas! Por favor, indique a classe do carro que pretende alugar :')
carro = int (input("""De momento apenas temos 2 classes :
[0] Carro Popular
[1] Carro de Luxo
"""))
if carro == 0:
Carropopular=90
print ('Carro Popular - $90 por dia')
pkms = 0.20
pmkms = 0.10
else:
Carrodeluxo=150
print ('Carro de luxo - $150 por dia')
pkms = 0.30
pmkms =0.25
dias = int (input('Por quantos dias deseja ficar com a viatura?'))
kms = int (input('Quantos kilometros pretende fazer com a viatura?'))
print ('A calcular...')
sleep (2)
print ('\033c')
if carro == 0 and kms >= 200:
valorest = ((Carropopular*dias)+(kms*pkms))
print ('O seu valor estimado é de :', valorest)
elif carro == 1 and kms >=200:
valorest = ((Carrodeluxo*dias)+(kms*pkms))
print ('O seu valoe estivado é de :', valorest)
if carro == 0 and kms <= 200:
valorest = ((Carropopular*dias)+(kms*pmkms))
print ('O seu valor estimado é de :', valorest)
elif carro == 1 and kms <=200:
valorest = ((Carrodeluxo*dias)+(kms*pmkms))
print ('O seu valoe estivado é de :', valorest)
|
"""Create collage from images in a folder."""
from argparse import ArgumentParser
from pathlib import Path
from collamake.make import CollaMake
if __name__ == "__main__":
parser = ArgumentParser(description="A simple collage maker")
parser.add_argument("-f", "--file", type=Path, required=True)
args = parser.parse_args()
cm = CollaMake(args.file)
cm.generate()
|
import asyncio
from binance_asyncio.endpoints import MarketDataEndpoints
async def main():
api_key = '<insert your api key here>'
market_data = MarketDataEndpoints(api_key=api_key)
# the first parameter is required, the rest are all optional and shawows the names of the binance api
# https://github.com/binance/binance-spot-api-docs/blob/master/rest-api.md#klinecandlestick-data
code, result = await market_data.get_klines('btcusdt', interval="5m", start_time="2 hours ago", end_time="now")
print(code, result)
asyncio.run(main())
|
from django.conf.urls import url, include
from django.contrib.auth.decorators import login_required
from boards.views import BoardUpdateView, BoardDeleteView, BoardCreateView, \
BoardListView, DashboardView
urlpatterns = [
url(r'^b/', include([
url(r'^(?P<pk>\d+)/', include([
url(r'^edit/$', login_required(BoardUpdateView.as_view()),
name='edit_board'),
url(r'^delete/$', login_required(BoardDeleteView.as_view()),
name='delete_board'),
])),
url(r'', login_required(BoardListView.as_view()),
name='browse_boards', ),
])),
url(r'^create_board/', login_required(BoardCreateView.as_view()),
name='add_board', ),
url(r'', login_required(DashboardView.as_view()),
name='dashboard', ),
]
|
import random
import os
import time
from wabbit_wappa import *
def test_namespace():
namespace = Namespace('MetricFeatures', 3.28, [('height', 1.5), ('length', 2.0), 'apple', '1948'])
namespace_string = namespace.to_string()
assert namespace_string == 'MetricFeatures:3.28 height:1.5 length:2.0 apple 1948 '
namespace = Namespace(None, 3.28, ['height', 'length'])
namespace_string = namespace.to_string()
assert namespace_string == ' height length '
def test_validation():
try:
namespace = Namespace('Metric Features', 3.28, [('height|', 1.5), ('len:gth', 2.0)],
escape=False)
except WabbitInvalidCharacter:
pass # This is the correct behavior
else:
assert False, "to_string() should error out for these inputs when escape==False"
def test_escaping():
namespace = Namespace('Metric Features', 3.28, [('height|', 1.5), ('len:gth', 2.0)])
namespace_string = namespace.to_string()
assert 'Metric Features' not in namespace_string
assert '|' not in namespace_string
assert 'len:gth' not in namespace_string
def test_command():
command = make_command_line(predictions='/dev/stdout',
quiet=True,
save_resume=True,
compressed=True,
q_colon=['a', 'b'],
b=20,
)
# Test that command has all expected elements
assert 'vw ' in command
assert '--predictions /dev/stdout' in command
assert '--quiet' in command
assert '--save_resume' in command
assert '--compressed' in command
assert '--q: a' in command
assert '--q: a' in command
assert '-b 20' in command
assert '--b 20' not in command
# Test that VW runs with this command
vw = VW(command)
def test_training():
# TODO: pytest probably has a framework for testing hyperparameters like this
for active_mode in [False, True]:
vw = VW(loss_function='logistic', active_mode=active_mode)
# Train with an easy case
for i in range(20):
# Positive example
vw.send_example(response=1.,
importance=2.,
tag='positive',
features=[('a', 1 + random.random()),
('b', -1 - random.random())]
)
vw.send_example(response=-1.,
importance=.5,
tag='negative',
features=[('lungfish', 1 + random.random()),
('palooka', -1 - random.random())]
)
prediction1 = vw.get_prediction([('a', 1),
('b', -2)]).prediction
# Prediction should be definitively positive
assert prediction1 > 1.
prediction2 = vw.get_prediction([('lungfish', 3)]).prediction
# Prediction should be negative
assert prediction2 < 0
prediction3 = vw.get_prediction([('a', 1),
('b', -2)]).prediction
# Making predictions shouldn't affect the trained model
assert prediction1 == prediction3
# Continue training with very different examples
for i in range(20):
# Positive example
vw.add_namespace('space1',
1.0,
['X', 'Y', 'Z'],
)
vw.send_example(response=1.)
# Negative example
vw.add_namespace('space2',
2.0,
['X', 'Y', 'Z'],
)
vw.send_example(response=-1.)
vw.add_namespace('space1',
1.0,
['X'],
)
prediction4 = vw.get_prediction().prediction
# Prediction should be positive
assert prediction4 > 0
vw.add_namespace('space2',
1.0,
['X'],
)
prediction5 = vw.get_prediction().prediction
# Prediction should be negative
assert prediction5 < 0
# Save the model to a temporary file
filename = '__temp.model'
vw.save_model(filename)
# This sleep is required only in active_mode, in the (unusual) case
# that the model file is used immediately
time.sleep(0.1)
# Load a new VW instance from that model
vw2 = VW(loss_function='logistic', i=filename)
# Make the same prediction with each model (testing cache_string to boot)
namespace1 = Namespace(features=[('a', 1), ('b', -2)], cache_string=True)
namespace2 = Namespace('space1', 1.0, ['X', 'Y'], cache_string=True)
prediction1 = vw.get_prediction(namespaces=[namespace1, namespace2]).prediction
prediction2 = vw2.get_prediction(namespaces=[namespace1, namespace2]).prediction
assert prediction1 == prediction2
assert prediction1 > 1.
# Clean up
vw.close()
vw2.close()
os.remove(filename)
|
# -*- coding: utf-8 -*-
"""
Created on Fri Dec 29 13:48:55 2017
@author: Administrator
"""
import config
import time
from globalMap import globalMap
from multiprocessing import Manager
class resultCount:
__threadCount=Manager().Value('c',0)
__totalCount=Manager().Value('i',0)
__successCount=Manager().Value('j',0)
__totalSuccessTime=Manager().Value('k',0)
__startExecTime=Manager().Value('l',0)
__costTime=Manager().Value('m',0)
__totalCostTime=Manager().Value('n',0)
__resultCountName=Manager().Value('str','')
__startDictTime=Manager().dict()
__log=""
def __init__(self,resultCountName):
self.__totalCount.value=config.threadCount*config.threadExecTimes
self.__threadCount.value=config.threadCount
self.__resultCountName.value=resultCountName
self.__startExecTime=time.time()
self.__log=globalMap().get("log")
def startRecord(self,index,lock):
lock.acquire()
try:
self.__startDictTime[index]=time.time()
finally:
lock.release()
def endRecord(self,index,lock):
lock.acquire()
try:
self.__costTime.value=time.time()-self.__startDictTime.get(index)
self.__totalSuccessTime.value=self.__totalSuccessTime.value+self.__costTime.value
self.__successCount.value+=1
finally:
lock.release()
def totalCostTime(self):
self.__totalCostTime.value=time.time()-self.__startExecTime.value
self.__log.info("totalCostTime :"+str(self.__totalCostTime.value)+" s")
return self.__totalCostTime.value
def avgCostTime(self):
avgTime=self.__totalSuccessTime.value/self.__successCount.value
self.__log.info("avgCostTime :"+str(avgTime)+" s")
return avgTime
def totalCount(self):
self.__log.info("totalCount :"+str(self.__totalCount.value))
return self.__totalCount.value
def threadCount(self):
self.__log.info("threadCount :"+str(self.__threadCount.value))
return self.__threadCount.value
def successCount(self):
self.__log.info("successCount :"+str(self.__successCount.value))
return self.__successCount.value
|
#! /usr/bin/env python3
# Jan-18-2019
import os, sys
import pysam
from collections import defaultdict
def alignment_reads_snp_count(line,snp,quality_score_threshold=30):
strain1, strain2, match, mismatch = 0, 0, 0, 0
for align in line.get_aligned_pairs(matches_only=True):
if not align[1] in snp[line.reference_name]: # no snp in this position
continue
if ord(line.qual[align[0]])-33 < quality_score_threshold:
continue
if line.seq[align[0]] in snp[line.reference_name][align[1]][0]:
strain1 += 1
match += 1
elif line.seq[align[0]] in snp[line.reference_name][align[1]][1]:
strain2 += 1
match += 1
else:
mismatch += 1
return strain1, strain2, match, mismatch
def bam_split(filterSNP,inputBAM,outputBAM1,outputBAM2,quality_score_threshold=30):
def calculate_plines(pre_lines, outputFhd1, outputFhd2, snp, statistics, quality_score_threshold=30):
'''statistics = [match, mismatch, strain1, strain2, s1_reads, s2_reads, mixed, nosnp]
'''
match, mismatch, strain1, strain2 = 0, 0, 0, 0
for line in pre_lines:
if line.is_unmapped or line.mapping_quality < 30 or not line.reference_name.startswith("chr") or '_' in line.reference_name:
continue
if not line.reference_name in snp:
continue
strain1_, strain2_, match_, mismatch_ = alignment_reads_snp_count(line,snp,quality_score_threshold)
match += match_
mismatch += mismatch_
strain1 += strain1_
strain2 += strain2_
statistics[0] += match
statistics[1] += mismatch
statistics[2] += strain1
statistics[3] += strain2
if strain2 > 0: # s2 reads group
statistics[5] += 1
for line in pre_lines:
outputFhd2.write(line)
elif strain1 == 0: # no snp
statistics[7] += 1
else: # s1 reads group
statistics[4] += 1
for line in pre_lines:
outputFhd1.write(line)
#if strain1 + strain2 < 2: # no snp
# statistics[7] += 1
#elif 1.0 * strain1 / (strain1 + strain2) >= 2.0 / 3: # s1 reads group
# statistics[4] += 1
# for line in pre_lines:
# outputFhd1.write(line)
#elif 1.0 * strain2 / (strain1 + strain2) >= 2.0 / 3: # s2 reads group
# statistics[5] += 1
# for line in pre_lines:
# outputFhd2.write(line)
#else:
# statistics[6] += 1
# load snp
snp = defaultdict(dict)
with open(filterSNP) as fhd:
for line in fhd:
chrom, start, end, n1, n2, *_ = line.strip().split()
snp[chrom][int(start)] = [n1,n2]
total = 0
statistics = [0] * 8 # match, mismatch, strain1, strain2, s1_reads, s2_reads, mixed, nosnp
with pysam.Samfile(inputBAM,'rb') as inputFhd, \
pysam.Samfile(outputBAM1,'wb',template=inputFhd) as outputFhd1, \
pysam.Samfile(outputBAM2,'wb',template=inputFhd) as outputFhd2:
pre_lines = []
for line in inputFhd:
if pre_lines and pre_lines[0].query_name == line.query_name: # same reads group
pre_lines.append(line)
else: # new reads group
if pre_lines:
total += 1
calculate_plines(pre_lines, outputFhd1, outputFhd2, snp, statistics,quality_score_threshold)
pre_lines = [line]
if pre_lines:
total += 1
calculate_plines(pre_lines, outputFhd1, outputFhd2, snp, statistics,quality_score_threshold)
statistics.append(total)
return statistics
def alignment_reads_snp_count_inspect(line,snp,quality_score_threshold=30):
strain1, strain2, match, mismatch = 0, 0, 0, 0
for align in line.get_aligned_pairs(matches_only=True,with_seq=True):
if not align[1] in snp[line.reference_name]: # no snp in this position
if align[2].upper() != line.seq[align[0]].upper():
sys.stdout.write(f'chr: {line.reference_name} pos: {align[1]}. No snp in this position. ref: {align[2]} query: {line.seq[align[0]]}\n')
pass
continue
if ord(line.qual[align[0]])-33 < quality_score_threshold:
sys.stdout.write(f'Low sequencing position quality. ref: {snp[line.reference_name][align[1]][0]}, {snp[line.reference_name][align[1]][1]} query: {line.seq[align[0]]}\n')
continue
if line.seq[align[0]] in snp[line.reference_name][align[1]][0]:
sys.stdout.write(f'chr: {line.reference_name} pos: {align[1]}. Match. ref: {snp[line.reference_name][align[1]][0]}, {snp[line.reference_name][align[1]][1]} query: {line.seq[align[0]]}\n')
strain1 += 1
match += 1
elif line.seq[align[0]] in snp[line.reference_name][align[1]][1]:
sys.stdout.write(f'chr: {line.reference_name} pos: {align[1]}. Match. ref: {snp[line.reference_name][align[1]][0]}, {snp[line.reference_name][align[1]][1]} query: {line.seq[align[0]]}\n')
strain2 += 1
match += 1
else:
sys.stdout.write(f'chr: {line.reference_name} pos: {align[1]}. Misatch. ref: {snp[line.reference_name][align[1]][0]}, {snp[line.reference_name][align[1]][1]} query: {line.seq[align[0]]}\n')
mismatch += 1
return strain1, strain2, match, mismatch
def bam_inspect(filterSNP,inputBAM,quality_score_threshold=30):
def calculate_plines(pre_lines, snp, quality_score_threshold=30):
match, mismatch, strain1, strain2 = 0, 0, 0, 0
for line in pre_lines:
sys.stdout.write(f'{line.to_string()}\n')
if line.is_unmapped or line.mapping_quality < 30 or not line.reference_name.startswith("chr") or '_' in line.reference_name:
#sys.stdout.write('Umapped or small chromsome\n')
continue
if not line.reference_name in snp:
#sys.stdout.write('Non-snp chromsome\n')
continue
strain1_, strain2_, match_, mismatch_ = alignment_reads_snp_count_inspect(line,snp,quality_score_threshold)
match += match_
mismatch += mismatch_
strain1 += strain1_
strain2 += strain2_
sys.stdout.write(f'{strain1}\t{strain2}\n')
if strain2 > 0: # s2 reads group
sys.stdout.write('Strain2\n')
elif strain1 == 0: # no snp
sys.stdout.write('No SNP\n')
else: # s1 reads group
sys.stdout.write('Strain1\n')
#if strain1 + strain2 < 2: # no snp
# sys.stdout.write('No SNP\n')
#elif 1.0 * strain1 / (strain1 + strain2) >= 2.0 / 3: # s1 reads group
# sys.stdout.write('Strain1\n')
#elif 1.0 * strain2 / (strain1 + strain2) >= 2.0 / 3: # s2 reads group
# sys.stdout.write('Strain2\n')
#else:
# sys.stdout.write('Mixed\n')
# load snp
snp = defaultdict(dict)
with open(filterSNP) as fhd:
for line in fhd:
chrom, start, end, n1, n2, *_ = line.strip().split()
snp[chrom][int(start)] = [n1,n2]
with pysam.Samfile(inputBAM,'rb') as inputFhd:
pre_lines = []
for line in inputFhd:
if pre_lines and pre_lines[0].query_name == line.query_name: # same reads group
pre_lines.append(line)
else: # new reads group
if pre_lines:
calculate_plines(pre_lines, snp)
yield
pre_lines = [line]
if pre_lines:
calculate_plines(pre_lines, snp)
yield
# ----- parameters -----
filterSNP = sys.argv[1]
inputBAM = sys.argv[2]
outputBAM1 = sys.argv[3]
outputBAM2 = sys.argv[4]
match, mismatch, strain1, strain2, s1_reads, s2_reads, mixed, nosnp, total = bam_split(filterSNP,inputBAM,outputBAM1,outputBAM2)
output_template = f'''\
Total: {total:,}
match: {match:,}
strain1: {strain1:,}
strain2: {strain2:,}
mismatch: {mismatch:,}
strain1 reads group: {s1_reads:,}
strain2 reads group: {s2_reads:,}
mixed: {mixed:,}
nosnp: {nosnp:,}
'''
sys.stdout.write(output_template)
|
# Time: seat: O(logn), amortized
# leave: O(logn)
# Space: O(n)
# In an exam room, there are N seats in a single row,
# numbered 0, 1, 2, ..., N-1.
#
# When a student enters the room,
# they must sit in the seat that maximizes the distance to the closest person.
# If there are multiple such seats, they sit in the seat with
# the lowest number.
# (Also, if no one is in the room, then the student sits at seat number 0.)
#
# Return a class ExamRoom(int N) that exposes two functions:
# ExamRoom.seat() returning an int representing what seat the student sat in,
# and ExamRoom.leave(int p) representing that the student in seat number p now
# leaves the room.
# It is guaranteed that any calls to ExamRoom.leave(p) have a student sitting
# in seat p.
#
# Example 1:
#
# Input: ["ExamRoom","seat","seat","seat","seat","leave","seat"],
# [[10],[],[],[],[],[4],[]]
# Output: [null,0,9,4,2,null,5]
# Explanation:
# ExamRoom(10) -> null
# seat() -> 0, no one is in the room, then the student sits at seat number 0.
# seat() -> 9, the student sits at the last seat number 9.
# seat() -> 4, the student sits at the last seat number 4.
# seat() -> 2, the student sits at the last seat number 2.
# leave(4) -> null
# seat() -> 5, the student sits at the last seat number 5.
#
# Note:
# - 1 <= N <= 10^9
# - ExamRoom.seat() and ExamRoom.leave() will be called at most 10^4 times
# across all test cases.
# - Calls to ExamRoom.leave(p) are guaranteed to have a student currently
# sitting in seat number p.
import heapq
class ExamRoom(object):
def __init__(self, N):
"""
:type N: int
"""
self.__num = N
self.__seats = {-1: [-1, self.__num], self.__num: [-1, self.__num]}
self.__max_heap = [(-self.__distance((-1, self.__num)), -1, self.__num)]
def seat(self):
"""
:rtype: int
"""
while self.__max_heap[0][1] not in self.__seats or \
self.__max_heap[0][2] not in self.__seats or \
self.__seats[self.__max_heap[0][1]][1] != self.__max_heap[0][2] or \
self.__seats[self.__max_heap[0][2]][0] != self.__max_heap[0][1]:
heapq.heappop(self.__max_heap) # lazy deletion
_, left, right = heapq.heappop(self.__max_heap)
mid = 0 if left == -1 \
else self.__num-1 if right == self.__num \
else (left+right) // 2
self.__seats[mid] = [left, right]
heapq.heappush(self.__max_heap, (-self.__distance((left, mid)), left, mid))
heapq.heappush(self.__max_heap, (-self.__distance((mid, right)), mid, right))
self.__seats[left][1] = mid
self.__seats[right][0] = mid
return mid
def leave(self, p):
"""
:type p: int
:rtype: void
"""
left, right = self.__seats[p]
self.__seats.pop(p)
self.__seats[left][1] = right
self.__seats[right][0] = left
heapq.heappush(self.__max_heap, (-self.__distance((left, right)), left, right))
def __distance(self, segment):
return segment[1]-segment[0]-1 if segment[0] == -1 or segment[1] == self.__num \
else (segment[1]-segment[0]) // 2:
class ExamRoom_sortedArray(object): # USE THIS
# Partition the whole array into sorted available ranges.
# Time: seat: O(s), leave: O(s), s is the number of students already seated
# Space: O(s)
def __init__(self, N):
self.N = N
self.seated = []
def seat(self):
import bisect
if not self.seated:
self.seated.append(0)
return 0
# can sit in the mid of each pair of adjacent students, and left-most and right-most seat.
ans, maxdis = 0, self.seated[0]
for i in xrange(1, len(self.seated)):
dis = (self.seated[i] - self.seated[i-1]) // 2
if dis > maxdis: # don't need to check (dis == maxdis and cur < ans), because cur is mono-increasing
maxdis, ans = dis, self.seated[i-1] + dis
if self.N - 1 - self.seated[-1] > maxdis:
ans = self.N - 1
bisect.insort(self.seated, ans)
return ans
def leave(self, p):
self.seated.remove(p)
# Your ExamRoom object will be instantiated and called as such:
# obj = ExamRoom(N)
# param_1 = obj.seat()
# obj.leave(p)
|
gocardless.client.new_bill_url(30.00, name="Example payment")
|
from TextRank import Textrank
import pickle
from keras.preprocessing.text import Tokenizer
from gensim.models import word2vec
import numpy as np
from scipy import spatial
import re
import nltk
nltk.download('stopwords')
def cleanTex(descrlist):
REGEX_URL = r"(http(s)?:\/\/.)?(www\.)?[-a-zA-Z0-9@:%._\+~#=]{2,256}\.[a-z]{2,6}\b([-a-zA-Z0-9@:%_\+.~#?&//=]*)"
okdesc = list(filter(lambda x: [] if 'home' in x.lower() and len(x) < 50 else x, descrlist))
out = []
for a in okdesc:
a = re.sub(REGEX_URL, '', a) #remove urls
a = re.sub('[?!]', '' , a) #remove question marks
a = re.sub(r"(\w)\1{2,}", '', a) #remove wrong words #aaaa fffff
a = ' '.join(a.split()) #remove trailing spaces
out.append(a)
return out
def train_word2vec(strings):
basepath = 'TextRank/'
def fit_get_tokenizer(data, max_words):
tokenizer = Tokenizer(num_words=max_words, filters='!"#%&()*+,-./:;<=>?@[\\]^_`{|}~\t\n')
tokenizer.fit_on_texts(data)
return tokenizer
def get_embeddings(inp_data, vocabulary_inv, size_features=100,
mode='skipgram',
min_word_count=2,
context=5):
num_workers = 15 # Number of threads to run in parallel
downsampling = 1e-3 # Downsample setting for frequent words
print('Training Word2Vec model...')
sentences = [[vocabulary_inv[w] for w in s] for s in inp_data]
if mode == 'skipgram':
sg = 1
print('Model: skip-gram')
elif mode == 'cbow':
sg = 0
print('Model: CBOW')
embedding_model = word2vec.Word2Vec(sentences, workers=num_workers,
sg=sg,
size=size_features,
min_count=min_word_count,
window=context,
sample=downsampling)
embedding_model.init_sims(replace=True)
embedding_weights = np.zeros((len(vocabulary_inv) + 1, size_features))
embedding_weights[0] = 0
for i, word in vocabulary_inv.items():
if word in embedding_model:
embedding_weights[i] = embedding_model[word]
else:
embedding_weights[i] = np.random.uniform(-0.25, 0.25, embedding_model.vector_size)
return embedding_weights
tokenizer = fit_get_tokenizer(strings, max_words=150000)
print("Total number of words: ", len(tokenizer.word_index))
tagged_data = tokenizer.texts_to_sequences(strings)
vocabulary_inv = {}
for word in tokenizer.word_index:
vocabulary_inv[tokenizer.word_index[word]] = word
embedding_mat = get_embeddings(tagged_data, vocabulary_inv)
pickle.dump(tokenizer, open(basepath + "tokenizer.pkl", "wb"))
pickle.dump(embedding_mat, open(basepath + "embedding_matrix.pkl", "wb"))
return embedding_mat, tokenizer
with open('TextRank/verticals-dataset.pkl', 'rb') as f:
df = pickle.load(f)
embedding, tokenizer = train_word2vec(df.about)
def cosine_similarity(a, b):
return 1 - spatial.distance.cosine(a, b)
wordsstr = list(tokenizer.word_index.keys())[30:80]
man = embedding[tokenizer.word_index['manufacture']]
prod = embedding[tokenizer.word_index['production']]
cosine_similarity(man, prod)
for i in wordsstr:
print(i,cosine_similarity(man, embedding[tokenizer.word_index[i]]))
tr4w = Textrank.TextRank4Keyword()
tr4w.analyze(text, candidate_pos = ['NOUN', 'PROPN'], window_size=4, lower=False)
tr4w.get_keywords(10)
|
import numpy as np
from sklearn.base import BaseEstimator, ClassifierMixin, RegressorMixin
from sklearn.utils.validation import check_X_y, check_array, check_is_fitted
from sklearn.linear_model import LinearRegression as SKLR
'''
A simple Gaussian Bayesian Classifier
'''
class LinearRegression(BaseEstimator, RegressorMixin):
def __init__(self, regularization=None, max_iter=250000):
self._has_been_fit = False
self.n_examples = None
self.n_dims = None
self.weights = None
self.regularization = regularization
self.max_iter = max_iter
def fit(self, X, y, mode='normal', learning_rate=1e-4):
X, y = check_X_y(X, y)
self.n_examples, self.n_dims = X.shape
fit_X = np.ones(shape=(self.n_examples, self.n_dims + 1))
fit_X[:, 1:] = X
if mode == 'normal':
if self.regularization is not None:
regularization_term = self.regularization * np.eye(self.n_dims)
solution = np.dot(np.dot(np.linalg.pinv(np.dot(fit_X.T, fit_X) + regularization_term),
fit_X.T),
y)
else:
solution = np.dot(np.dot(np.linalg.pinv(np.dot(fit_X.T, fit_X)),
fit_X.T),
y)
elif mode == 'gd':
solution = np.zeros(shape=self.n_dims+1, dtype=float)
error = np.ones_like(solution)
count = 0
while not np.allclose(error, 0) and count < self.max_iter:
y_pred = np.dot(fit_X, solution)
regularization_term = self.regularization * solution if self.regularization is not None else 0
error = (y_pred - y)[:, np.newaxis] * fit_X
error = np.sum(error, axis=0) + regularization_term
if np.any(np.isnan(error)):
raise OverflowError("Optimization has diverged. Try again with lower learning rate or stronger regularization.")
# Minimizing MSE.
solution -= learning_rate * error
#print(f'Error it {count}: ', error)
count += 1
self.weights = solution
self._has_been_fit = True
return self
def predict(self, X):
check_is_fitted(self, [])
X = check_array(X)
n_examples, n_dims = X.shape
if n_dims != self.n_dims:
raise ValueError(f"Invalid number of dimensions. Expected {self.n_dims}, got {n_dims}")
extended_X = np.ones(shape=(n_examples, n_dims+1), dtype=X.dtype)
extended_X[:, 1:] = X
return np.dot(extended_X, self.weights)[:, np.newaxis]
|
import unittest
import responses
import urllib
from ksql.api import BaseAPI
class TestBaseApi(unittest.TestCase):
@responses.activate
def test_base_api_query(self):
responses.add(responses.POST, "http://dummy.org/query", body="test", status=200, stream=True)
base = BaseAPI("http://dummy.org")
result = base.query("so")
with self.assertRaises(urllib.error.HTTPError):
for entry in result:
entry
|
"""
Potentials in ultracold atom experiments are typically constructed using
Gaussian laser beams, here we provide some definitions that will make it easy
to assemble a generic optical dipole potential.
"""
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
from vec3 import vec3, cross
import scipy.constants as C
from mpl_toolkits.mplot3d import axes3d
def beam(xb,yb,zb,wx,wy,wavelen):
"""
Normalized intensity profile of a Gaussian beam which propagates along z and
has its waist at the origin
Parameters
----------
xb, yb, zb : These can be single floats, or can be array-like for full
vectorization.
Returns
-------
intensity : The intensity of the gaussian beam. Normalized, so it is
equal to 1. at the origin.
Notes
----
Examples
--------
"""
zRx = np.pi * wx**2 / wavelen
zRy = np.pi * wy**2 / wavelen
sqrtX = np.sqrt( 1 + np.power(zb/zRx,2) )
sqrtY = np.sqrt( 1 + np.power(zb/zRy,2) )
intensity = np.exp( -2.*( np.power(xb/(wx*sqrtX ),2) \
+ np.power(yb/(wy*sqrtY),2) )) / sqrtX / sqrtY
return intensity
def uL( wavelen, **kwargs ):
"""
Calculates the factor uL which allows conversion from intensity to depth
in microKelvin.
Parameters
----------
wavelen : wavelength of the light used to create the dipole potential.
Returns
-------
uL : intensity to uK factor
Notes
----
Examples
--------
"""
Cc = C.c * 1e6 # speed of light um s^-1
Gamma = kwargs.get('Gamma', 2*np.pi *5.9e6 ) # linewidth s^-1
lambda0 = kwargs.get('lambda0', 0.671 ) # transition wavelength in microns
omega0 = 2*np.pi*Cc / lambda0
omegaL = 2*np.pi*Cc / wavelen
intensity = 1.0
depthJ = (intensity)* -3*np.pi* Cc**2*Gamma / ( 2*omega0**3) * \
( 1/(omega0 - omegaL ) + 1/(omega0 + omegaL ) ) # Joule
depthuK = depthJ / C.k *1e6 # C.k is Boltzmann's constant
return depthuK
def Erecoil( wavelen, mass):
"""
Calculates the recoil energy in microKelvin for a given photon wavelength
and atom mass
Parameters
----------
wavelen : wavelength of the photon in microns
mass : mass of the atom in atomic mass units
Returns
-------
Er : recoil energy in microKelvin
Notes
----
Examples
--------
"""
inJ = C.h**2 / ( 2* \
mass*C.physical_constants['unified atomic mass unit'][0] * \
(wavelen*1e-6)**2 )
inuK = inJ / C.k *1e6
return inuK
class GaussBeam:
"""
This class defines the potential created for a Gaussian beam. The
initialization parameters can be given to the class as keyword arguments
(kwargs)
Parameters
----------
mW : float. Power in the beam in milliWatts
waists : tuple. ( waistx, waisty )
wavelength : float. wavelength of the light
axis : tuple. ( theta, pi ) polar coordinates specifying
direction of propagation of the beam
origin : tuple. ( x, y, z ) cartesian coordinates spicifying the
location of the beam waist
"""
def __init__( self,
**kwargs ):
self.mW = kwargs.get('mW',1000.0 )
self.w = kwargs.get('waists', (30.,30.) )
self.l = kwargs.get('wavelength', 1.064 )
#
self.axis = kwargs.get('axis', (np.pi/2,0.) )
self.origin = kwargs.get('origin', vec3(0,0,0) )
# Make sure vectors are of type(vec3)
self.axisvec = vec3()
th = self.axis[0]
ph = self.axis[1]
self.axisvec.set_spherical( 1, th, ph)
self.origin = vec3(self.origin)
# Calculate two orthogonal directions
# which will be used to specify the beam waists
self.orth0 = vec3( np.cos(th)*np.cos(ph) , \
np.cos(th)*np.sin(ph), -1.*np.sin(th) )
self.orth1 = vec3( -1.*np.sin(ph), np.cos(ph), 0. )
def transform(self, X, Y, Z):
# coordinates into beam coordinates
zb = X*self.axisvec[0] + Y*self.axisvec[1] + Z*self.axisvec[2]
xb = X*self.orth0[0] + Y*self.orth0[1] + Z*self.orth0[2]
yb = X*self.orth1[0] + Y*self.orth1[1] + Z*self.orth1[2]
return xb,yb,zb
def __call__( self, X, Y, Z):
"""
Returns the depth in microKelvin of the potential produced by the
Gaussian beam.
Parameters
----------
X, Y, Z : can be floats or array-like. The potential is calculated
in a vectorized way.
Returns
-------
potential in microKelvin
Notes
----
Examples
--------
"""
xb,yb,zb = self.transform( X,Y,Z)
gauss = beam( xb,yb,zb, self.w[0], self.w[1], self.l)
intensity = (2/np.pi)* self.mW/1000. /self.w[0]/self.w[1] *gauss # W um^-2
return uL(self.l)*intensity
class LatticeBeam(GaussBeam):
"""
This class defines the lattice potential created by two retroreflected
Gaussian beams.
The initialization parameters can be given to the class as keyword
arguments (kwargs).
It is assumed that the input beam and retro beam have the same beam waists
Parameters
----------
waists : tuple. ( waistx, waisty )
wavelength : float. wavelength of the light
axis : tuple. ( theta, pi ) polar coordinates specifying
direction of propagation of the beam
origin : tuple. ( x, y, z ) cartesian coordinates spicifying the
location of the beam waist
s0 : float. The lattice depth at the waist in units of the
recoil energy Er
scale : The periodicity of the lattice potential is increased by
this scale, for visibility when plotting it. This does not
affect other results, just the plotting.
mass : float. The mass of the atom in atomic mass units.
retro : The retro factor. This is the percentage of power that is
retroreflected. The losses on the retro-reflection amount
to losses = 1 - retro .
alpha : Used to specify the amount of the retro beam that can
interfere with the input beam to form a lattice.
If alpha=1 all of the retro beam intereres, if alpha = 0
none of the retro beam interferes.
"""
def __init__(self, **kwargs):
"""Lattice beam, with retro factor and polarization """
GaussBeam.__init__(self, **kwargs)
self.scale = kwargs.get('scale',10.)
self.mass = kwargs.get('mass', 6.0)
self.s0 = kwargs.get('s0', 7.0)
self.retro = kwargs.get('retro', 1.0)
self.alpha = kwargs.get('alpha', 1.0)
self.Er0 = Erecoil( self.l , self.mass)
self.mW = 1000 * (self.s0 * self.Er0 ) \
* np.abs( np.pi / 8. / uL(self.l) )\
* self.w[0]*self.w[1] / self.retro
def __call__( self, X, Y, Z):
"""
Returns the lattice potential in microKelvin.
Parameters
----------
X, Y, Z : can be floats or array-like. The potential is calculated
in a vectorized way.
Returns
-------
lattice potential in microKelvin
Notes
----
Examples
--------
"""
xb,yb,zb = self.transform( X,Y,Z)
gauss = beam( xb,yb,zb, self.w[0], self.w[1], self.l)
intensity = (2/np.pi)* self.mW/1000. /self.w[0]/self.w[1] *gauss # W um^-2
cosSq = np.power(np.cos(2*np.pi/self.l * zb/self.scale ),2)
lattice = cosSq *4*np.sqrt(self.retro*self.alpha)\
+ ( 1 + self.retro - 2*np.sqrt(self.retro*self.alpha) )
return uL(self.l)*intensity*lattice
def getBottom( self, X, Y, Z):
"""
Returns the envelope of the lattice potential in microKelvin.
Parameters
----------
X, Y, Z : can be floats or array-like. The potential is calculated
in a vectorized way.
Returns
-------
envelope of the lattice potential in microKelvin
Notes
----
Examples
--------
"""
xb,yb,zb = self.transform( X,Y,Z)
gauss = beam( xb,yb,zb, self.w[0], self.w[1], self.l)
intensity = (2/np.pi)* self.mW/1000. /self.w[0]/self.w[1] *gauss # W um^-2
latticeBot = 4*np.sqrt(self.retro*self.alpha) \
+ 1 + self.retro - 2*np.sqrt(self.retro*self.alpha)
return uL(self.l)*intensity * latticeBot
def getS0( self, X, Y, Z):
"""
Returns the lattice depth in microKelvin
Parameters
----------
X, Y, Z : can be floats or array-like. The potential is calculated
in a vectorized way.
Returns
-------
lattice depth in microKelvin
Notes
----
Examples
--------
"""
xb,yb,zb = self.transform( X,Y,Z)
gauss = beam( xb,yb,zb, self.w[0], self.w[1], self.l)
intensity = (2/np.pi)* self.mW/1000. /self.w[0]/self.w[1] \
* gauss # W um^-2
latticeV0 = 4*np.sqrt(self.retro*self.alpha)
return np.abs(uL(self.l)*intensity * latticeV0)
class potential:
"""
A potential is defined as a collection of beams that do not interfere
with each other.
The sum of the potential crated by each beam is the total potential.
Parameters
----------
units : tuple, two elements.
- First element is the string which will be used for
labeling plots.
- Second element is the multiplication factor required to
obtain the desired units. Beams are by default in
microKelvin.
beams : list, this is the list of beams that makes up the
potential
"""
def __init__(self, beams, **kwargs ):
self.units = kwargs.get('units', ('$\mu\mathrm{K}$', 1.))
self.unitlabel = self.units[0]
self.unitfactor = self.units[1]
self.beams = beams
def evalpotential( self, X, Y, Z):
"""
Evaluates the total potential by summing over beams
Parameters
----------
X, Y, Z : can be floats or array-like. The potential is calculated
in a vectorized way.
Returns
-------
total potential. The units used depend on self.unitfactor.
Notes
----
Examples
--------
"""
EVAL = np.zeros_like(X)
for b in self.beams:
EVAL += b(X,Y,Z)
return EVAL* self.unitfactor
"""
Below we include functions to make cuts through the geometry. These can be
line cuts or plane cuts.
"""
def linecut_points( **kwargs ):
"""
Defines an line cut through the potential geometry. Parameters are given
as keyword arguments (kwargs).
All distances are in microns.
Parameters
----------
npoints : number of points along the cut
extents : a way of specifying the limits for a cut that is symmetric
about the cut origin. the limits will be
lims = (-extents, extents)
lims : used only if extents = None. limits are specified using
a tuple ( min, max )
direc : tuple, two elements. polar coordinates for the direcdtion
of the cut
origing : tuple, three elements. cartesian coordinates for the origin
of the cut
Returns
-------
t : array which parameterizes the distance along the cut
X, Y, Z : each of X,Y,Z is an array with the same shape as t.
They correspond to the cartesian coordinates of all the
points along the cut
Notes
----
Examples
--------
"""
npoints = kwargs.get('npoints', 320)
extents = kwargs.get('extents',None)
lims = kwargs.get('lims', (-80.,80.))
direc = kwargs.get('direc', (np.pi/2, 0.))
origin = kwargs.get('origin', vec3(0.,0.,0.))
if extents is not None:
lims = (-extents, extents)
# Prepare set of points for plot
t = np.linspace( lims[0], lims[1], npoints )
unit = vec3()
th = direc[0]
ph = direc[1]
unit.set_spherical(1, th, ph)
# Convert vec3s to ndarray
unit = np.array(unit)
origin = np.array(origin)
#
XYZ = origin + np.outer(t, unit)
X = XYZ[:,0]
Y = XYZ[:,1]
Z = XYZ[:,2]
return t, X, Y, Z, lims
def surfcut_points(**kwargs):
"""
Defines an surface cut through the potential geometry. Parameters are given
as keyword arguments (kwargs).
All distances are in microns.
Parameters
----------
npoints : number of points along the cut
extents : a way of specifying the limits for a cut that is symmetric
about the cut origin. the limits will be
lims = (-extents, extents)
lims : used only if extents = None. limits are specified using
a tuple ( min, max )
direc : tuple, two elements. polar coordinates for the direcdtion
of the cut
origin : tuple, three elements. cartesian coordinates for the origin
of the cut
ax0 : optional axes where the reference surface for the surface
cut can be plotted
Returns
-------
T0, T1 : arrays which parameterizes the position on the cut surface
X, Y, Z : each of X,Y,Z is an array with the same shape as T0 and T1.
They correspond to the cartesian coordinates of all the
points on the cut surface.
Notes
----
Examples
--------
"""
npoints = kwargs.get( 'npoints', 240 )
origin = kwargs.get( 'origin', vec3(0.,0.,0.))
normal = kwargs.get( 'normal', (np.pi/2., 0.) )
lims0 = kwargs.get( 'lims0', (-50., 50.) )
lims1 = kwargs.get( 'lims1', (-50., 50.) )
extents = kwargs.get( 'extents', None)
if extents is not None:
lims0 = (-extents, extents)
lims1 = (-extents, extents)
# Make the unit vectors that define the plane
unit = vec3()
th = normal[0]
ph = normal[1]
unit.set_spherical( 1, th, ph)
orth0 = vec3( -1.*np.sin(ph), np.cos(ph), 0. )
orth1 = cross(unit,orth0)
t0 = np.linspace( lims0[0], lims0[1], npoints )
t1 = np.linspace( lims1[0], lims1[1], npoints )
# Obtain points on which function will be evaluated
T0,T1 = np.meshgrid(t0,t1)
X = origin[0] + T0*orth0[0] + T1*orth1[0]
Y = origin[1] + T0*orth0[1] + T1*orth1[1]
Z = origin[2] + T0*orth0[2] + T1*orth1[2]
# If given an axes it will plot the reference surface to help visusalize
# the surface cut
# Note that the axes needs to be created with a 3d projection.
# For example:
# fig = plt.figure( figsize=(4.,4.) )
# gs = matplotlib.gridspec.GridSpec( 1,1 )
# ax0 = fig.add_subplot( gs[0,0], projection='3d' )
ax0 = kwargs.get( 'ax0', None )
if ax0 is not None:
# Plot the reference surface
ax0.plot_surface(X, Y, Z, rstride=8, cstride=8, alpha=0.3, linewidth=0.)
ax0.set_xlabel('X')
ax0.set_ylabel('Y')
ax0.set_zlabel('Z')
lmin = min([ ax0.get_xlim()[0], ax0.get_ylim()[0], ax0.get_zlim()[0] ] )
lmax = max([ ax0.get_xlim()[1], ax0.get_ylim()[1], ax0.get_zlim()[1] ] )
ax0.set_xlim( lmin, lmax )
ax0.set_ylim( lmin, lmax )
ax0.set_zlim( lmin, lmax )
ax0.set_yticklabels([])
ax0.set_xticklabels([])
ax0.set_zticklabels([])
# If given an axes and a potential it will plot the surface cut of the
# potential
ax1 = kwargs.get( 'ax1', None)
pot = kwargs.get( 'potential', None)
if (ax1 is not None) and (pot is not None):
# Evaluate function at points and plot
EVAL = pot.evalpotential(X,Y,Z)
im =ax1.pcolormesh(T0, T1, EVAL, cmap = plt.get_cmap('jet'))
# cmaps: rainbow, jet
plt.axes( ax1)
cbar = plt.colorbar(im)
cbar.set_label(pot.unitlabel, rotation=0 )#self.unitlabel
return T0, T1, X, Y, Z
def plot3surface( pot, **kwargs ):
"""
This is a packaged function to quickly plot a potential along
three orthogonal planes that intersecdt at the origin.
Parameters
----------
pot : potential to be plotted
Returns
-------
Notes
----
Examples
--------
"""
fig = plt.figure( figsize = (8., 8.) )
gs = matplotlib.gridspec.GridSpec( 3,2, wspace=0.2)
# Make a list with three perpendicular directions which
# will define the three surface cuts
perp = [(np.pi/2., 0.), (np.pi/2., -np.pi/2.), (0., -1.*np.pi/2.) ]
# Iterate to plot the three surface cuts
yMin = 1e16
yMax = -1e16
Ims = []
for i in range(3):
ax0 = fig.add_subplot( gs[i,0], projection='3d')
ax1 = fig.add_subplot( gs[i,1])
T0, T1, X, Y, Z = surfcut_points( normal = perp[i], \
ax0=ax0, **kwargs )
EVAL = pot.evalpotential(X,Y,Z)
im = ax1.pcolormesh( T0, T1, EVAL, \
cmap=plt.get_cmap('jet') )
plt.axes( ax1 )
cbar = plt.colorbar(im)
cbar.set_label( pot.unitlabel, rotation=0)
ymin = EVAL.min()
ymax = EVAL.max()
Ims.append(im)
if ymin < yMin : yMin = ymin
if ymax > yMax : yMax = ymax
for im in Ims:
im.set_clim( vmin=yMin, vmax=yMax)
|
import InputReader
def recuresiveGame(player1cards, player2cards):
states = []
while len(player1cards) > 0 and len(player2cards) > 0:
state = [player1cards.copy(), player2cards.copy()]
if state in states:
return 1, player1cards, player2cards
states.append(state)
winner = 0
if len(player1cards) > player1cards[0] and len(player2cards) > player2cards[0]:
winner, _, _ = recuresiveGame(player1cards[1:player1cards[0] + 1].copy(), player2cards[1:player2cards[0] + 1].copy())
elif (player1cards[0] > player2cards[0]):
winner = 1
else:
winner = 2
if winner == 1:
player1cards.append(player1cards[0])
player1cards.append(player2cards[0])
else:
player2cards.append(player2cards[0])
player2cards.append(player1cards[0])
player1cards = player1cards[1:]
player2cards = player2cards[1:]
if len(player1cards) > 0:
return 1, player1cards, player2cards
else:
return 2, player1cards, player2cards
def main():
origPlayer1cards = []
origPlayer2cards = []
player2 = False
for line in InputReader.readInputFileLines(22):
if (len(line) == 0):
continue
if line.startswith("Player"):
if line[-2] == '2':
player2 = True
continue
if player2:
origPlayer2cards.append(int(line))
else:
origPlayer1cards.append(int(line))
player1cards = origPlayer1cards.copy()
player2cards = origPlayer2cards.copy()
while len(player1cards) > 0 and len(player2cards) > 0:
if (player1cards[0] > player2cards[0]):
player1cards.append(player1cards[0])
player1cards.append(player2cards[0])
else:
player2cards.append(player2cards[0])
player2cards.append(player1cards[0])
player1cards = player1cards[1:]
player2cards = player2cards[1:]
winner = 0
winningScore = 0
if len(player1cards) > 0:
winner = 1
for i in range(1, len(player1cards) + 1):
winningScore += player1cards[len(player1cards) - i] * i
else:
winner = 2
for i in range(1, len(player2cards) + 1):
winningScore += player2cards[len(player2cards) - i] * i
print(f"Player {winner} won part 1 with a score of {winningScore}.")
winner, player1cards, player2cards = recuresiveGame(origPlayer1cards.copy(), origPlayer2cards.copy())
winningScore = 0
if winner == 1:
for i in range(1, len(player1cards) + 1):
winningScore += player1cards[len(player1cards) - i] * i
else:
for i in range(1, len(player2cards) + 1):
winningScore += player2cards[len(player2cards) - i] * i
print(f"Player {winner} won part 2 with a score of {winningScore}.")
if __name__ == '__main__':
main()
|
from dos.open_api import OpenAPI
from dos.flask_wrappers import wrap_validation, wrap_handler, wrap_route
from flask import Flask, redirect, jsonify, url_for, render_template
from .api.dog import get as dog_get
from .api.cat import get as cat_get
def create_app():
app = Flask(__name__)
open_api = OpenAPI("Pet Shop API", "1.0")
open_api.add_contact("Pet Shop Dev Team", "https://www.example.com", "pet_shop@example.com")
open_api.add_logo("/static/pet_shop.png", "#7D9FC3", "Pet Shop", "/")
open_api.add_tag(
"introduction",
"Welcome! This is the documentation for the Pet Shop API.",
)
open_api.add_tag(
"Authentication and Authorization",
"Authentication and Authorization Information. For this basic dos example, there is none.",
)
open_api.add_tag(
"dog",
"Endpoints for interacting with a Dog",
)
open_api.add_tag(
"cat",
"Endpoints interacting with a Cat",
)
open_api.add_disclaimer(
"This file is generated automatically. Do not edit it directly! Edit "
"the input_schema and output_schema of the endpoint you are changing."
)
handler_mapping = [
(dog_get, "/dog/get", "get"),
(cat_get, "/cat/get", "get")
]
for module, path, http_method in handler_mapping:
handler = wrap_handler(module.__name__, module.handler)
handler = wrap_validation(handler, module)
wrap_route(app, handler, path, http_method)
open_api.document(module, path, http_method)
@app.route("/")
def index(): # pylint: disable=unused-variable
return render_template("index.html")
@app.route("/source")
def redirect_to_source(): # pylint: disable=unused-variable
return redirect(url_for("open_api_endpoint"))
@app.route("/docs")
def docs(): # pylint: disable=unused-variable
return render_template("docs.html")
@app.route("/open_api.json")
def open_api_endpoint(): # pylint: disable=unused-variable
return jsonify(open_api)
return app
|
import math
n = float(input("Digite um numero real: "))
print("sua porção inteira é {}".format(math.floor(n)))
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class BaseContent(models.Model):
ACTIVE_CHOICES = ((0, 'Inactive'), (2, 'Active'),)
active = models.PositiveIntegerField(choices=ACTIVE_CHOICES,
default=2)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Meta:
#-----------------------------------------#
# Don't create a table in database
# This table is abstract
#--------------------ends here--------------------#
abstract = True
# BaseContent
def switch(self):
# Deactivate a model if it is active
# Activate a model if it is inactive
self.active = {2: 0, 0: 2}[self.active]
self.save()
class KaamKaj(BaseContent):
title = models.CharField(max_length=150)
memo = models.TextField(blank=True)
# created = models.DateTimeField(auto_now_add=True)
complete_date = models.DateTimeField(null=True, blank=True)
important = models.BooleanField(default=False)
user = models.ForeignKey(User, on_delete=models.CASCADE)
def __str__(self):
return self.title
|
from Artesian._Query.QueryParameters.QueryParameters import _QueryParameters
from Artesian._Query.Config.VersionSelectionConfig import VersionSelectionConfig
class VersionedQueryParameters(_QueryParameters):
def __init__(self, ids, extractionRangeSelectionConfig, extractionRangeType, timezone, filterId, granularity, transformId, versionSelectionConfig, versionSelectionType):
_QueryParameters.__init__(self, ids, extractionRangeSelectionConfig, extractionRangeType, timezone, filterId)
self.granularity = granularity
self.transformId = transformId
self.fill = None
if(versionSelectionConfig is None):
self.versionSelectionConfig = VersionSelectionConfig()
else:
self.versionSelectionConfig = versionSelectionConfig
self.versionSelectionType = versionSelectionType
|
from geotools.utils.polygon import Polygon
import time
def main():
p = Polygon([
[2.3957061767578125, 48.89812957181126],
[2.399139404296875, 48.890906639609454],
[2.3996543884277344, 48.88413419286922],
[2.4090957641601562, 48.8801831753449],
[2.412700653076172, 48.876570543321755],
[2.414073944091797, 48.8712640169951],
[2.414073944091797, 48.86358549323598],
[2.4164772033691406, 48.849354525964365],
[2.4125289916992188, 48.83466754148594],
[2.4109840393066406, 48.833989576686],
[2.4151039123535156, 48.83342459901093],
[2.4224853515625, 48.83591045312373],
[2.4199104309082027, 48.8414466848806],
[2.4199104309082027, 48.84359322235957],
[2.422313690185547, 48.84460997116046],
[2.4247169494628906, 48.84189859515306],
[2.4372482299804688, 48.84099477053062],
[2.4381065368652344, 48.84483591253515],
[2.440166473388672, 48.844497000090826],
[2.4406814575195312, 48.845965604118284],
[2.4465179443359375, 48.84585263610676],
[2.4468612670898438, 48.844948882840264],
[2.4631690979003906, 48.842689428318415],
[2.466602325439453, 48.83997794833464],
[2.4702072143554688, 48.83647540276501],
[2.469348907470703, 48.833876581660704],
[2.4657440185546875, 48.83184262762493],
[2.4647140502929688, 48.827774471831894],
[2.466602325439453, 48.827322434132746],
[2.4631690979003906, 48.81884597223549],
[2.4587059020996094, 48.81681140805428],
[2.4494361877441406, 48.81805476264432],
[2.441883087158203, 48.81794173168324],
[2.4339866638183594, 48.81941111429733],
[2.4302101135253906, 48.823140892101684],
[2.4199104309082027, 48.82415805606007],
[2.4114990234375, 48.82483615389669],
[2.4025726318359375, 48.829695586560575],
[2.364120483398437, 48.81590713080018],
[2.3557090759277344, 48.81579409499648],
[2.351932525634765, 48.81828082380189],
[2.346954345703125, 48.81579409499648],
[2.33184814453125, 48.816924441564105],
[2.332019805908203, 48.818393853998344],
[2.291507720947265, 48.826983403182346],
[2.2789764404296875, 48.832407623139915],
[2.272796630859375, 48.82788748061953],
[2.267303466796875, 48.827774471831894],
[2.2667884826660156, 48.83161662763493],
[2.270050048828125, 48.832972612283456],
[2.267475128173828, 48.83466754148594],
[2.2630119323730464, 48.833876581660704],
[2.25494384765625, 48.83466754148594],
[2.2513389587402344, 48.838961105496054],
[2.2508239746093746, 48.84291537835776],
[2.252025604248047, 48.84517482268593],
[2.2420692443847656, 48.847773057644694],
[2.2394943237304688, 48.850145241393776],
[2.2235298156738277, 48.85342092943525],
[2.2279930114746094, 48.86584400488787],
[2.2322845458984375, 48.87024780944447],
[2.239837646484375, 48.87171565817035],
[2.245502471923828, 48.876570543321755],
[2.255115509033203, 48.87408670745326],
[2.2585487365722656, 48.88063473600221],
[2.2774314880371094, 48.87815110193676],
[2.279834747314453, 48.87894136251639],
[2.2806930541992188, 48.883005362568866],
[2.2848129272460938, 48.886617529842795],
[2.2930526733398438, 48.890455171696374],
[2.294769287109375, 48.889890831072385],
[2.310047149658203, 48.897113910028416],
[2.3186302185058594, 48.89982229558958],
[2.3201751708984375, 48.90106358992757],
[2.384033203125, 48.902417694046676],
[2.3919296264648438, 48.90106358992757],
[2.3957061767578125, 48.89812957181126]
])
t = time.time()
print((p.hashcodes(min_precision=2, max_precision=7, cover=False)))
print("elapsed time: {0:.2f}s".format(time.time() - t))
if __name__ == "__main__":
main()
|
import math
# 11’den 99’a kadar döngünün oluşturulması.
for i in range(11, 99):
# “x”e yeni değerinin atanması
x = 2
j = 0
# “x” ile “kök(i)+1” eşit olmadığı sürece:
# “x” değeri hiçbir zaman (kök(i)+1) değeri ile “i”yi bölemez.
# Bu yüzden bu değerin (kök(i)+1) üstünü kontrol etmeye gerek yoktur
while (x != int(math.sqrt(i)) + 1):
# “x”in “i”nin bir böleni olup olmadığının kontrol edilmesi
if (i % x == 0):
j = 1
# En az bir bölen bulunursa sayının asal olmadığını öğrenmek için yeterlidir
break
else:
x += 1
if (j == 0):
# Asal sayıların ekrana yazdırılması
print(i)
|
"""
api.video
api.video is an API that encodes on the go to facilitate immediate playback, enhancing viewer streaming experiences across multiple devices and platforms. You can stream live or on-demand online videos within minutes. # noqa: E501
Contact: ecosystem@api.video
"""
import os # noqa: F401
import re # noqa: F401
import sys # noqa: F401
from types import MethodType
from types import FunctionType
from apivideo.api_client import ApiClient
from apivideo.endpoint import EndPoint as _EndPoint, ChunkIO
from apivideo.model.video_id import VideoId
from apivideo.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from apivideo.exceptions import ApiTypeError, ApiValueError
from apivideo.model.bad_request import BadRequest
from apivideo.model.not_found import NotFound
from apivideo.model.player_theme import PlayerTheme
from apivideo.model.player_theme_creation_payload import PlayerThemeCreationPayload
from apivideo.model.player_theme_update_payload import PlayerThemeUpdatePayload
from apivideo.model.player_themes_list_response import PlayerThemesListResponse
class PlayerThemesApi(_EndPoint):
def delete(
self,
player_id,
**kwargs
):
"""Delete a player # noqa: E501
Delete a player if you no longer need it. You can delete any player that you have the player ID for. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete(player_id, async_req=True)
>>> result = thread.get()
Args:
player_id (str): The unique identifier for the player you want to delete.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['player_id'] = \
player_id
params_map = {
'all': [
'player_id',
'async_req',
'_preload_content',
'_request_timeout',
'_return_http_data_only'
],
'required': [
'player_id',
],
'nullable': [
'_request_timeout'
],
'enum': [
],
'validation': [
]
}
validations = {
}
allowed_values = {
}
openapi_types = {
'player_id':
(str,),
'async_req': (bool,),
'_preload_content': (bool,),
'_request_timeout': (none_type, int, (int,), [int]),
'_return_http_data_only': (bool,)
}
attribute_map = {
'player_id': 'playerId',
}
location_map = {
'player_id': 'path',
}
collection_format_map = {
}
for key, value in kwargs.items():
if key not in params_map['all']:
raise ApiTypeError(
"Got an unexpected parameter '%s'"
" to method `delete`" %
(key, )
)
if (key not in params_map['nullable'] and value is None):
raise ApiValueError(
"Value may not be None for non-nullable parameter `%s`"
" when calling `delete`" %
(key, )
)
for key in params_map['required']:
if key not in kwargs.keys():
raise ApiValueError(
"Missing the required parameter `%s` when calling "
"`delete`" % (key, )
)
self._validate_inputs(kwargs, params_map, allowed_values, validations, openapi_types)
params = self._gather_params(kwargs, location_map, attribute_map, openapi_types, collection_format_map)
return self.api_client.call_api(
"/players/{playerId}",
"DELETE",
params['path'],
params['query'],
params['header'],
body=params['body'],
post_params=params['form'],
files=params['file'],
response_type=None,
async_req=kwargs['async_req'],
_return_http_data_only=kwargs['_return_http_data_only'],
_preload_content=kwargs['_preload_content'],
_request_timeout=kwargs['_request_timeout'],
collection_formats=params['collection_format'])
def delete_logo(
self,
player_id,
**kwargs
):
"""Delete logo # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_logo(player_id, async_req=True)
>>> result = thread.get()
Args:
player_id (str): The unique identifier for the player.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['player_id'] = \
player_id
params_map = {
'all': [
'player_id',
'async_req',
'_preload_content',
'_request_timeout',
'_return_http_data_only'
],
'required': [
'player_id',
],
'nullable': [
'_request_timeout'
],
'enum': [
],
'validation': [
]
}
validations = {
}
allowed_values = {
}
openapi_types = {
'player_id':
(str,),
'async_req': (bool,),
'_preload_content': (bool,),
'_request_timeout': (none_type, int, (int,), [int]),
'_return_http_data_only': (bool,)
}
attribute_map = {
'player_id': 'playerId',
}
location_map = {
'player_id': 'path',
}
collection_format_map = {
}
for key, value in kwargs.items():
if key not in params_map['all']:
raise ApiTypeError(
"Got an unexpected parameter '%s'"
" to method `delete_logo`" %
(key, )
)
if (key not in params_map['nullable'] and value is None):
raise ApiValueError(
"Value may not be None for non-nullable parameter `%s`"
" when calling `delete_logo`" %
(key, )
)
for key in params_map['required']:
if key not in kwargs.keys():
raise ApiValueError(
"Missing the required parameter `%s` when calling "
"`delete_logo`" % (key, )
)
self._validate_inputs(kwargs, params_map, allowed_values, validations, openapi_types)
params = self._gather_params(kwargs, location_map, attribute_map, openapi_types, collection_format_map)
return self.api_client.call_api(
"/players/{playerId}/logo",
"DELETE",
params['path'],
params['query'],
params['header'],
body=params['body'],
post_params=params['form'],
files=params['file'],
response_type=None,
async_req=kwargs['async_req'],
_return_http_data_only=kwargs['_return_http_data_only'],
_preload_content=kwargs['_preload_content'],
_request_timeout=kwargs['_request_timeout'],
collection_formats=params['collection_format'])
def list(
self,
**kwargs
):
"""List all player themes # noqa: E501
Retrieve a list of all the player themes you created, as well as details about each one. Tutorials that use the [player endpoint](https://api.video/blog/endpoints/player). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list(async_req=True)
>>> result = thread.get()
Keyword Args:
sort_by (str): createdAt is the time the player was created. updatedAt is the time the player was last updated. The time is presented in ISO-8601 format.. [optional]
sort_order (str): Allowed: asc, desc. Ascending for date and time means that earlier values precede later ones. Descending means that later values preced earlier ones.. [optional]
current_page (int): Choose the number of search results to return per page. Minimum value: 1. [optional] if omitted the server will use the default value of 1
page_size (int): Results per page. Allowed values 1-100, default is 25.. [optional] if omitted the server will use the default value of 25
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
async_req (bool): execute request asynchronously
Returns:
PlayerThemesListResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
params_map = {
'all': [
'sort_by',
'sort_order',
'current_page',
'page_size',
'async_req',
'_preload_content',
'_request_timeout',
'_return_http_data_only'
],
'required': [],
'nullable': [
'_request_timeout'
],
'enum': [
'sort_by',
'sort_order',
],
'validation': [
]
}
validations = {
}
allowed_values = {
('sort_by',): {
"NAME": "name",
"CREATEDAT": "createdAt",
"UPDATEDAT": "updatedAt"
},
('sort_order',): {
"ASC": "asc",
"DESC": "desc"
},
}
openapi_types = {
'sort_by':
(str,),
'sort_order':
(str,),
'current_page':
(int,),
'page_size':
(int,),
'async_req': (bool,),
'_preload_content': (bool,),
'_request_timeout': (none_type, int, (int,), [int]),
'_return_http_data_only': (bool,)
}
attribute_map = {
'sort_by': 'sortBy',
'sort_order': 'sortOrder',
'current_page': 'currentPage',
'page_size': 'pageSize',
}
location_map = {
'sort_by': 'query',
'sort_order': 'query',
'current_page': 'query',
'page_size': 'query',
}
collection_format_map = {
}
for key, value in kwargs.items():
if key not in params_map['all']:
raise ApiTypeError(
"Got an unexpected parameter '%s'"
" to method `list`" %
(key, )
)
if (key not in params_map['nullable'] and value is None):
raise ApiValueError(
"Value may not be None for non-nullable parameter `%s`"
" when calling `list`" %
(key, )
)
for key in params_map['required']:
if key not in kwargs.keys():
raise ApiValueError(
"Missing the required parameter `%s` when calling "
"`list`" % (key, )
)
self._validate_inputs(kwargs, params_map, allowed_values, validations, openapi_types)
params = self._gather_params(kwargs, location_map, attribute_map, openapi_types, collection_format_map)
return self.api_client.call_api(
"/players",
"GET",
params['path'],
params['query'],
params['header'],
body=params['body'],
post_params=params['form'],
files=params['file'],
response_type=(PlayerThemesListResponse,),
async_req=kwargs['async_req'],
_return_http_data_only=kwargs['_return_http_data_only'],
_preload_content=kwargs['_preload_content'],
_request_timeout=kwargs['_request_timeout'],
collection_formats=params['collection_format'])
def get(
self,
player_id,
**kwargs
):
"""Show a player # noqa: E501
Use a player ID to retrieve details about the player and display it for viewers. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get(player_id, async_req=True)
>>> result = thread.get()
Args:
player_id (str): The unique identifier for the player you want to retrieve.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
async_req (bool): execute request asynchronously
Returns:
PlayerTheme
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['player_id'] = \
player_id
params_map = {
'all': [
'player_id',
'async_req',
'_preload_content',
'_request_timeout',
'_return_http_data_only'
],
'required': [
'player_id',
],
'nullable': [
'_request_timeout'
],
'enum': [
],
'validation': [
]
}
validations = {
}
allowed_values = {
}
openapi_types = {
'player_id':
(str,),
'async_req': (bool,),
'_preload_content': (bool,),
'_request_timeout': (none_type, int, (int,), [int]),
'_return_http_data_only': (bool,)
}
attribute_map = {
'player_id': 'playerId',
}
location_map = {
'player_id': 'path',
}
collection_format_map = {
}
for key, value in kwargs.items():
if key not in params_map['all']:
raise ApiTypeError(
"Got an unexpected parameter '%s'"
" to method `get`" %
(key, )
)
if (key not in params_map['nullable'] and value is None):
raise ApiValueError(
"Value may not be None for non-nullable parameter `%s`"
" when calling `get`" %
(key, )
)
for key in params_map['required']:
if key not in kwargs.keys():
raise ApiValueError(
"Missing the required parameter `%s` when calling "
"`get`" % (key, )
)
self._validate_inputs(kwargs, params_map, allowed_values, validations, openapi_types)
params = self._gather_params(kwargs, location_map, attribute_map, openapi_types, collection_format_map)
return self.api_client.call_api(
"/players/{playerId}",
"GET",
params['path'],
params['query'],
params['header'],
body=params['body'],
post_params=params['form'],
files=params['file'],
response_type=(PlayerTheme,),
async_req=kwargs['async_req'],
_return_http_data_only=kwargs['_return_http_data_only'],
_preload_content=kwargs['_preload_content'],
_request_timeout=kwargs['_request_timeout'],
collection_formats=params['collection_format'])
def update(
self,
player_id,
player_theme_update_payload,
**kwargs
):
"""Update a player # noqa: E501
Use a player ID to update specific details for a player. NOTE: It may take up to 10 min before the new player configuration is available from our CDN. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update(player_id, player_theme_update_payload, async_req=True)
>>> result = thread.get()
Args:
player_id (str): The unique identifier for the player.
player_theme_update_payload (PlayerThemeUpdatePayload):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
async_req (bool): execute request asynchronously
Returns:
PlayerTheme
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['player_id'] = \
player_id
kwargs['player_theme_update_payload'] = \
player_theme_update_payload
params_map = {
'all': [
'player_id',
'player_theme_update_payload',
'async_req',
'_preload_content',
'_request_timeout',
'_return_http_data_only'
],
'required': [
'player_id',
'player_theme_update_payload',
],
'nullable': [
'_request_timeout'
],
'enum': [
],
'validation': [
]
}
validations = {
}
allowed_values = {
}
openapi_types = {
'player_id':
(str,),
'player_theme_update_payload':
(PlayerThemeUpdatePayload,),
'async_req': (bool,),
'_preload_content': (bool,),
'_request_timeout': (none_type, int, (int,), [int]),
'_return_http_data_only': (bool,)
}
attribute_map = {
'player_id': 'playerId',
}
location_map = {
'player_id': 'path',
'player_theme_update_payload': 'body',
}
collection_format_map = {
}
for key, value in kwargs.items():
if key not in params_map['all']:
raise ApiTypeError(
"Got an unexpected parameter '%s'"
" to method `update`" %
(key, )
)
if (key not in params_map['nullable'] and value is None):
raise ApiValueError(
"Value may not be None for non-nullable parameter `%s`"
" when calling `update`" %
(key, )
)
for key in params_map['required']:
if key not in kwargs.keys():
raise ApiValueError(
"Missing the required parameter `%s` when calling "
"`update`" % (key, )
)
self._validate_inputs(kwargs, params_map, allowed_values, validations, openapi_types)
params = self._gather_params(kwargs, location_map, attribute_map, openapi_types, collection_format_map)
return self.api_client.call_api(
"/players/{playerId}",
"PATCH",
params['path'],
params['query'],
params['header'],
body=params['body'],
post_params=params['form'],
files=params['file'],
response_type=(PlayerTheme,),
async_req=kwargs['async_req'],
_return_http_data_only=kwargs['_return_http_data_only'],
_preload_content=kwargs['_preload_content'],
_request_timeout=kwargs['_request_timeout'],
collection_formats=params['collection_format'])
def create(
self,
player_theme_creation_payload,
**kwargs
):
"""Create a player # noqa: E501
Create a player for your video, and customise it. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create(player_theme_creation_payload, async_req=True)
>>> result = thread.get()
Args:
player_theme_creation_payload (PlayerThemeCreationPayload):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
async_req (bool): execute request asynchronously
Returns:
PlayerTheme
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['player_theme_creation_payload'] = \
player_theme_creation_payload
params_map = {
'all': [
'player_theme_creation_payload',
'async_req',
'_preload_content',
'_request_timeout',
'_return_http_data_only'
],
'required': [
'player_theme_creation_payload',
],
'nullable': [
'_request_timeout'
],
'enum': [
],
'validation': [
]
}
validations = {
}
allowed_values = {
}
openapi_types = {
'player_theme_creation_payload':
(PlayerThemeCreationPayload,),
'async_req': (bool,),
'_preload_content': (bool,),
'_request_timeout': (none_type, int, (int,), [int]),
'_return_http_data_only': (bool,)
}
attribute_map = {
}
location_map = {
'player_theme_creation_payload': 'body',
}
collection_format_map = {
}
for key, value in kwargs.items():
if key not in params_map['all']:
raise ApiTypeError(
"Got an unexpected parameter '%s'"
" to method `create`" %
(key, )
)
if (key not in params_map['nullable'] and value is None):
raise ApiValueError(
"Value may not be None for non-nullable parameter `%s`"
" when calling `create`" %
(key, )
)
for key in params_map['required']:
if key not in kwargs.keys():
raise ApiValueError(
"Missing the required parameter `%s` when calling "
"`create`" % (key, )
)
self._validate_inputs(kwargs, params_map, allowed_values, validations, openapi_types)
params = self._gather_params(kwargs, location_map, attribute_map, openapi_types, collection_format_map)
return self.api_client.call_api(
"/players",
"POST",
params['path'],
params['query'],
params['header'],
body=params['body'],
post_params=params['form'],
files=params['file'],
response_type=(PlayerTheme,),
async_req=kwargs['async_req'],
_return_http_data_only=kwargs['_return_http_data_only'],
_preload_content=kwargs['_preload_content'],
_request_timeout=kwargs['_request_timeout'],
collection_formats=params['collection_format'])
def upload_logo(
self,
player_id,
file,
**kwargs
):
"""Upload a logo # noqa: E501
The uploaded image maximum size should be 200x100 and its weight should be 100KB. It will be scaled down to 30px height and converted to PNG to be displayed in the player. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_logo(player_id, file, async_req=True)
>>> result = thread.get()
Args:
player_id (str): The unique identifier for the player.
file (file_type): The name of the file you want to use for your logo.
Keyword Args:
link (str): A public link that you want to advertise in your player. For example, you could add a link to your company. When a viewer clicks on your logo, they will be taken to this address.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
async_req (bool): execute request asynchronously
Returns:
PlayerTheme
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['player_id'] = \
player_id
kwargs['file'] = \
file
params_map = {
'all': [
'player_id',
'file',
'link',
'async_req',
'_preload_content',
'_request_timeout',
'_return_http_data_only'
],
'required': [
'player_id',
'file',
],
'nullable': [
'_request_timeout'
],
'enum': [
],
'validation': [
]
}
validations = {
}
allowed_values = {
}
openapi_types = {
'player_id':
(str,),
'file':
(file_type,),
'link':
(str,),
'async_req': (bool,),
'_preload_content': (bool,),
'_request_timeout': (none_type, int, (int,), [int]),
'_return_http_data_only': (bool,)
}
attribute_map = {
'player_id': 'playerId',
'file': 'file',
'link': 'link',
}
location_map = {
'player_id': 'path',
'file': 'form',
'link': 'form',
}
collection_format_map = {
}
for key, value in kwargs.items():
if key not in params_map['all']:
raise ApiTypeError(
"Got an unexpected parameter '%s'"
" to method `upload_logo`" %
(key, )
)
if (key not in params_map['nullable'] and value is None):
raise ApiValueError(
"Value may not be None for non-nullable parameter `%s`"
" when calling `upload_logo`" %
(key, )
)
for key in params_map['required']:
if key not in kwargs.keys():
raise ApiValueError(
"Missing the required parameter `%s` when calling "
"`upload_logo`" % (key, )
)
self._validate_inputs(kwargs, params_map, allowed_values, validations, openapi_types)
params = self._gather_params(kwargs, location_map, attribute_map, openapi_types, collection_format_map)
return self.api_client.call_api(
"/players/{playerId}/logo",
"POST",
params['path'],
params['query'],
params['header'],
body=params['body'],
post_params=params['form'],
files=params['file'],
response_type=(PlayerTheme,),
async_req=kwargs['async_req'],
_return_http_data_only=kwargs['_return_http_data_only'],
_preload_content=kwargs['_preload_content'],
_request_timeout=kwargs['_request_timeout'],
collection_formats=params['collection_format'])
|
# coding=utf-8
import logging
import requests
class Request(object):
""" Simplifies querying bandcamp that protects from python-requests User-Agent """
headers = {'User-Agent': 'bandcamp_player/0.1'}
@staticmethod
def get(url) -> requests.Response:
""" :returns: Response from bandcamp """
url = url.replace("https:https:", "https:")
logging.info("request: %s", url)
return requests.get(url, headers=Request.headers)
|
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 20 19:52:05 2017
@author: vgor
"""
import re
from collections import Counter
#acording to J. Mol. Biol. 157:105-132(1982).
AA_hidropaticity = {
"A": 1.800,
"R": -4.500,
"N": -3.500,
"D": -3.500,
"C": 2.500,
"Q": -3.500,
"E": -3.500,
"G": -0.400,
"H": -3.200,
"I": 4.500,
"L": 3.800,
"K": -3.900,
"M": 1.900,
"F": 2.800,
"P": -1.600,
"S": -0.800,
"T": -0.700,
"W": -0.900,
"Y": -1.300,
"V": 4.200
}
#Regex to select letters that are not in standard FASTA format
invalidFasta = re.compile("[^{}]".format("".join(AA_hidropaticity.keys())))
def gravy(sequence):
"""
Calculate GRAVY measure
"""
sequence = re.sub(invalidFasta, "", sequence)
return sum([AA_hidropaticity[aa] for aa in sequence]) / len(sequence)
def AAContent(sequence, frequencies=True):
"""
Calculate amino acid composition
"""
AACont = Counter(sequence)
if frequencies:
for k, v in AACont.items():
AACont[k] = float(v)/len(sequence)
return AACont
|
import zeit.cms.content.interfaces
import zeit.cms.tagging.interfaces
class IExampleContentType(
zeit.cms.content.interfaces.ICommonMetadata,
zeit.cms.content.interfaces.IXMLContent):
"""A type for testing."""
keywords = zeit.cms.tagging.interfaces.Keywords(
required=False,
default=())
IExampleContentType.setTaggedValue('zeit.cms.type', 'testcontenttype')
IExampleContentType.setTaggedValue(
'zeit.cms.addform', 'zeit.cms.testcontenttype.Add')
|
# coding: utf-8
"""
Hydrogen Atom API
The Hydrogen Atom API # noqa: E501
OpenAPI spec version: 1.7.0
Contact: info@hydrogenplatform.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class Overflow(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'account_id': 'str',
'client_id': 'str',
'create_date': 'datetime',
'funding_requests': 'list[FundingRequestMap]',
'funding_start_date': 'datetime',
'id': 'str',
'overflow_setting_id': 'str',
'total_overflow_amount': 'float',
'update_balances': 'bool',
'update_date': 'datetime'
}
attribute_map = {
'account_id': 'account_id',
'client_id': 'client_id',
'create_date': 'create_date',
'funding_requests': 'funding_requests',
'funding_start_date': 'funding_start_date',
'id': 'id',
'overflow_setting_id': 'overflow_setting_id',
'total_overflow_amount': 'total_overflow_amount',
'update_balances': 'update_balances',
'update_date': 'update_date'
}
def __init__(self, account_id=None, client_id=None, create_date=None, funding_requests=None, funding_start_date=None, id=None, overflow_setting_id=None, total_overflow_amount=None, update_balances=None, update_date=None): # noqa: E501
"""Overflow - a model defined in Swagger""" # noqa: E501
self._account_id = None
self._client_id = None
self._create_date = None
self._funding_requests = None
self._funding_start_date = None
self._id = None
self._overflow_setting_id = None
self._total_overflow_amount = None
self._update_balances = None
self._update_date = None
self.discriminator = None
self.account_id = account_id
self.client_id = client_id
if create_date is not None:
self.create_date = create_date
if funding_requests is not None:
self.funding_requests = funding_requests
if funding_start_date is not None:
self.funding_start_date = funding_start_date
if id is not None:
self.id = id
if overflow_setting_id is not None:
self.overflow_setting_id = overflow_setting_id
self.total_overflow_amount = total_overflow_amount
if update_balances is not None:
self.update_balances = update_balances
if update_date is not None:
self.update_date = update_date
@property
def account_id(self):
"""Gets the account_id of this Overflow. # noqa: E501
accountId # noqa: E501
:return: The account_id of this Overflow. # noqa: E501
:rtype: str
"""
return self._account_id
@account_id.setter
def account_id(self, account_id):
"""Sets the account_id of this Overflow.
accountId # noqa: E501
:param account_id: The account_id of this Overflow. # noqa: E501
:type: str
"""
if account_id is None:
raise ValueError("Invalid value for `account_id`, must not be `None`") # noqa: E501
self._account_id = account_id
@property
def client_id(self):
"""Gets the client_id of this Overflow. # noqa: E501
clientId # noqa: E501
:return: The client_id of this Overflow. # noqa: E501
:rtype: str
"""
return self._client_id
@client_id.setter
def client_id(self, client_id):
"""Sets the client_id of this Overflow.
clientId # noqa: E501
:param client_id: The client_id of this Overflow. # noqa: E501
:type: str
"""
if client_id is None:
raise ValueError("Invalid value for `client_id`, must not be `None`") # noqa: E501
self._client_id = client_id
@property
def create_date(self):
"""Gets the create_date of this Overflow. # noqa: E501
:return: The create_date of this Overflow. # noqa: E501
:rtype: datetime
"""
return self._create_date
@create_date.setter
def create_date(self, create_date):
"""Sets the create_date of this Overflow.
:param create_date: The create_date of this Overflow. # noqa: E501
:type: datetime
"""
self._create_date = create_date
@property
def funding_requests(self):
"""Gets the funding_requests of this Overflow. # noqa: E501
:return: The funding_requests of this Overflow. # noqa: E501
:rtype: list[FundingRequestMap]
"""
return self._funding_requests
@funding_requests.setter
def funding_requests(self, funding_requests):
"""Sets the funding_requests of this Overflow.
:param funding_requests: The funding_requests of this Overflow. # noqa: E501
:type: list[FundingRequestMap]
"""
self._funding_requests = funding_requests
@property
def funding_start_date(self):
"""Gets the funding_start_date of this Overflow. # noqa: E501
fundingStartDate # noqa: E501
:return: The funding_start_date of this Overflow. # noqa: E501
:rtype: datetime
"""
return self._funding_start_date
@funding_start_date.setter
def funding_start_date(self, funding_start_date):
"""Sets the funding_start_date of this Overflow.
fundingStartDate # noqa: E501
:param funding_start_date: The funding_start_date of this Overflow. # noqa: E501
:type: datetime
"""
self._funding_start_date = funding_start_date
@property
def id(self):
"""Gets the id of this Overflow. # noqa: E501
:return: The id of this Overflow. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this Overflow.
:param id: The id of this Overflow. # noqa: E501
:type: str
"""
self._id = id
@property
def overflow_setting_id(self):
"""Gets the overflow_setting_id of this Overflow. # noqa: E501
overflowSettingId # noqa: E501
:return: The overflow_setting_id of this Overflow. # noqa: E501
:rtype: str
"""
return self._overflow_setting_id
@overflow_setting_id.setter
def overflow_setting_id(self, overflow_setting_id):
"""Sets the overflow_setting_id of this Overflow.
overflowSettingId # noqa: E501
:param overflow_setting_id: The overflow_setting_id of this Overflow. # noqa: E501
:type: str
"""
self._overflow_setting_id = overflow_setting_id
@property
def total_overflow_amount(self):
"""Gets the total_overflow_amount of this Overflow. # noqa: E501
totalOverflowAmount # noqa: E501
:return: The total_overflow_amount of this Overflow. # noqa: E501
:rtype: float
"""
return self._total_overflow_amount
@total_overflow_amount.setter
def total_overflow_amount(self, total_overflow_amount):
"""Sets the total_overflow_amount of this Overflow.
totalOverflowAmount # noqa: E501
:param total_overflow_amount: The total_overflow_amount of this Overflow. # noqa: E501
:type: float
"""
if total_overflow_amount is None:
raise ValueError("Invalid value for `total_overflow_amount`, must not be `None`") # noqa: E501
self._total_overflow_amount = total_overflow_amount
@property
def update_balances(self):
"""Gets the update_balances of this Overflow. # noqa: E501
updateBalances # noqa: E501
:return: The update_balances of this Overflow. # noqa: E501
:rtype: bool
"""
return self._update_balances
@update_balances.setter
def update_balances(self, update_balances):
"""Sets the update_balances of this Overflow.
updateBalances # noqa: E501
:param update_balances: The update_balances of this Overflow. # noqa: E501
:type: bool
"""
self._update_balances = update_balances
@property
def update_date(self):
"""Gets the update_date of this Overflow. # noqa: E501
:return: The update_date of this Overflow. # noqa: E501
:rtype: datetime
"""
return self._update_date
@update_date.setter
def update_date(self, update_date):
"""Sets the update_date of this Overflow.
:param update_date: The update_date of this Overflow. # noqa: E501
:type: datetime
"""
self._update_date = update_date
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Overflow, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Overflow):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
"""
This module contains classes to describe objects such as Datasources on
sensor Instruments etc
"""
from ._base import BaseModule
from ._cranfield import CRANFIELD
from ._crds import CRDS
from ._datasource import Datasource
from ._eurocom import EUROCOM
from ._footprint import Footprint
from ._gcwerks import GCWERKS
from ._icos import ICOS
from ._noaa import NOAA
from ._thamesbarrier import THAMESBARRIER
from ._obs_surface import ObsSurface
|
"""
Run supervised segmentation experiment with superpixels and training examples
Pipeline:
1. segment SLIC superpixels
2. compute features (color and texture)
3. estimate model from single image or whole set
4. segment new images
.. note:: there are a few constants to that have an impact on the experiment,
see them bellow with explanation for each of them.
Sample usage::
python run_segm_slic_model_graphcut.py \
-l data-images/langerhans_islets/list_lang-isl_imgs-annot.csv \
-i "data-images/langerhans_islets/image/*.jpg" \
-o results -n LangIsl --nb_classes 3 --nb_workers 2 --visual
Copyright (C) 2016-2018 Jiri Borovec <jiri.borovec@fel.cvut.cz>
"""
import argparse
import gc
import glob
import logging
import os
import pickle
import sys
import time
from functools import partial
import matplotlib
from imsegm.utilities import ImageDimensionError
if os.environ.get('DISPLAY', '') == '':
print('No display found. Using non-interactive Agg backend.')
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from PIL import Image
# from llvmpy._api.llvm.CmpInst import FCMP_OLE
from skimage import segmentation
from sklearn import metrics
sys.path += [os.path.abspath('.'), os.path.abspath('..')] # Add path to root
import imsegm.descriptors as seg_fts
import imsegm.labeling as seg_lbs
import imsegm.pipelines as seg_pipe
import imsegm.utilities.data_io as tl_data
import imsegm.utilities.drawing as tl_visu
import imsegm.utilities.experiments as tl_expt
# sometimes it freeze in "Cython: computing Colour means for image"
seg_fts.USE_CYTHON = False
NB_WORKERS = tl_expt.get_nb_workers(0.9)
TYPES_LOAD_IMAGE = ['2d_rgb', '2d_split']
NAME_DUMP_MODEL = 'estimated_model.npz'
NAME_CSV_ARS_CORES = 'metric_ARS.csv'
# setting experiment sub-folders
FOLDER_IMAGE = 'images'
FOLDER_ANNOT = 'annotations'
FOLDER_SEGM_GMM = 'segmentation_MixtureModel'
FOLDER_SEGM_GMM_VISU = FOLDER_SEGM_GMM + '___visual'
FOLDER_SEGM_GROUP = 'segmentation_GroupMM'
FOLDER_SEGM_GROUP_VISU = FOLDER_SEGM_GROUP + '___visual'
LIST_FOLDERS_BASE = (FOLDER_IMAGE, FOLDER_SEGM_GMM, FOLDER_SEGM_GROUP)
LIST_FOLDERS_DEBUG = (FOLDER_SEGM_GMM_VISU, FOLDER_SEGM_GROUP_VISU)
# unique experiment means adding timestemp on the end of folder name
EACH_UNIQUE_EXPERIMENT = False
# showing some intermediate debug images from segmentation
SHOW_DEBUG_IMAGES = True
# relabel annotation such that labels are in sequence no gaps in between them
ANNOT_RELABEL_SEQUENCE = False
# whether skip loading config from previous fun
FORCE_RELOAD = True
# even you have dumped data from previous time, all wil be recomputed
FORCE_RECOMP_DATA = True
FEATURES_SET_COLOR = {'color': ('mean', 'std', 'energy')}
FEATURES_SET_TEXTURE = {'tLM': ('mean', 'std', 'energy')}
FEATURES_SET_ALL = {
'color': ('mean', 'std', 'median'),
'tLM': ('mean', 'std', 'energy', 'meanGrad'),
}
FEATURES_SET_MIN = {
'color': ('mean', 'std', 'energy'),
'tLM_short': ('mean', ),
}
FEATURES_SET_MIX = {
'color': ('mean', 'std', 'energy', 'median'),
'tLM': ('mean', 'std'),
}
# Default parameter configuration
SEGM_PARAMS = {
'name': 'imgDisk',
'nb_classes': 3,
'img_type': '2d_rgb',
'slic_size': 35,
'slic_regul': 0.2,
# 'spacing': (12, 1, 1),
'features': FEATURES_SET_COLOR,
'estim_model': 'GMM',
'pca_coef': None,
'gc_regul': 2.0,
'gc_edge_type': 'model',
'gc_use_trans': False,
}
PATH_IMAGES = os.path.join(tl_data.update_path('data-images'), 'drosophila_disc')
# PATH_IMAGES = tl_data.update_path(os.path.join('data-images', 'langerhans_islets'))
PATH_RESULTS = tl_data.update_path('results', absolute=True)
NAME_EXPERIMENT = 'experiment_segm-unSupervised'
SEGM_PARAMS.update({
# 'path_train_list': os.path.join(PATH_IMAGES, 'list_imaginal-disks.csv'),
'path_train_list': '',
'path_predict_imgs': os.path.join(PATH_IMAGES, 'image', '*.jpg'),
# 'path_predict_imgs': '',
'path_out': PATH_RESULTS,
})
def arg_parse_params(params):
""" argument parser from cmd
SEE: https://docs.python.org/3/library/argparse.html
:return dict:
"""
parser = argparse.ArgumentParser()
parser.add_argument(
'-l',
'--path_train_list',
type=str,
required=False,
help='path to the list of image',
default=params['path_train_list']
)
parser.add_argument(
'-i',
'--path_predict_imgs',
type=str,
required=False,
help='path to folder & name pattern with new image',
default=params['path_predict_imgs']
)
parser.add_argument(
'-o', '--path_out', type=str, required=False, help='path to the output directory', default=params['path_out']
)
parser.add_argument('-n', '--name', type=str, required=False, help='name of the experiment', default=params['name'])
parser.add_argument(
'-cfg', '--path_config', type=str, required=False, help='path to the segmentation config', default=''
)
parser.add_argument(
'--img_type',
type=str,
required=False,
default=params['img_type'],
choices=TYPES_LOAD_IMAGE,
help='type of image to be loaded'
)
parser.add_argument(
'--nb_classes',
type=int,
required=False,
help='number of classes for segmentation',
default=params.get('nb_classes', 2)
)
parser.add_argument(
'--nb_workers', type=int, required=False, help='number of processes in parallel', default=NB_WORKERS
)
parser.add_argument(
'--visual', required=False, action='store_true', help='export debug visualisations', default=False
)
parser.add_argument(
'--unique',
required=False,
action='store_true',
help='each experiment has uniques stamp',
default=EACH_UNIQUE_EXPERIMENT
)
args = vars(parser.parse_args())
logging.info('ARG PARAMETERS: \n %r', args)
for k in (k for k in args if 'path' in k):
if args[k] in ('', 'none'):
continue
args[k] = tl_data.update_path(args[k])
p = os.path.dirname(args[k]) if k == 'path_predict_imgs' else args[k]
if not os.path.exists(p):
raise FileNotFoundError('missing: (%s) "%s"' % (k, p))
# args['visual'] = bool(args['visual'])
# if the config path is set load the it otherwise use default
if os.path.isfile(args.get('path_config', '')):
config = tl_expt.load_config_yaml(args['path_config'])
params.update(config)
params.update(args)
return params
def load_image(path_img, img_type=TYPES_LOAD_IMAGE[0]):
""" load image and annotation according chosen type
:param str path_img:
:param str img_type:
:return ndarray:
"""
path_img = tl_data.update_path(path_img)
if not os.path.isfile(path_img):
raise FileNotFoundError('missing: "%s"' % path_img)
if img_type == '2d_split':
img, _ = tl_data.load_img_double_band_split(path_img)
if img.ndim != 2:
raise ImageDimensionError('image dims: %r' % img.shape)
# img = np.rollaxis(np.tile(img, (3, 1, 1)), 0, 3)
# if img.max() > 1:
# img = (img / 255.)
elif img_type == '2d_rgb':
img, _ = tl_data.load_image_2d(path_img)
# if img.max() > 1:
# img = (img / 255.)
elif img_type == '2d_segm':
img, _ = tl_data.load_image_2d(path_img)
if img.ndim == 3:
img = img[:, :, 0]
if ANNOT_RELABEL_SEQUENCE:
img, _, _ = segmentation.relabel_sequential(img)
else:
logging.error('not supported loading img_type: %s', img_type)
img = None
return img
def load_model(path_model):
""" load exported segmentation model
:param str path_model:
:return (obj, obj, obj, {}, list(str)):
"""
logging.info('loading dumped model "%s"', path_model)
with open(path_model, 'rb') as f:
dict_data = pickle.load(f)
# npz_file = np.load(path_model)
model = dict_data['model']
params = dict_data['params']
feature_names = dict_data['feature_names']
return model, params, feature_names
def save_model(path_model, model, params=None, feature_names=None):
""" save model on specific destination
:param str path_model:
:param obj scaler:
:param obj pca:
:param obj model:
:param dict params:
:param list(str) feature_names:
"""
logging.info('save (dump) model to "%s"', path_model)
# np.savez_compressed(path_model, scaler=scaler, pca=pca,
# model=model, params=params, feature_names=feature_names)
dict_data = dict(model=model, params=params, feature_names=feature_names)
with open(path_model, 'wb') as f:
pickle.dump(dict_data, f)
def parse_imgs_idx_path(imgs_idx_path):
""" general parser for splitting all possible input combination
:param imgs_idx_path: set of image index and path
:return (int, str): split index and name
"""
if isinstance(imgs_idx_path, tuple):
idx, path_img = imgs_idx_path
elif isinstance(imgs_idx_path, str):
idx, path_img = None, imgs_idx_path
else:
logging.error('not valid imgs_idx_path -> "%r"', imgs_idx_path)
idx, path_img = None, ''
return idx, path_img
def get_idx_name(idx, path_img):
""" create string identifier for particular image
:param int idx: image index
:param str path_img: image path
:return str: identifier
"""
im_name = os.path.splitext(os.path.basename(path_img))[0]
if idx is not None:
return '%04d_%s' % (idx, im_name)
return im_name
def export_visual(idx_name, img, segm, debug_visual=None, path_out=None, path_visu=None):
""" export visualisations
:param str idx_name:
:param ndarray img: input image
:param ndarray segm: resulting segmentation
:param debug_visual: dictionary with debug images
:param str path_out: path to dir with segmentation
:param str path_visu: path to dir with debug images
"""
logging.info('export results and visualization...')
if set(np.unique(segm)) <= {0, 1}:
segm *= 255
path_img = os.path.join(path_out, str(idx_name) + '.png')
logging.debug('exporting segmentation: %s', path_img)
im_seg = Image.fromarray(segm.astype(np.uint8))
im_seg.convert('L').save(path_img)
# io.imsave(path_img, segm)
if path_visu is not None and os.path.isdir(path_visu):
path_fig = os.path.join(path_visu, str(idx_name) + '.png')
logging.debug('exporting segmentation results: %s', path_fig)
fig = tl_visu.figure_image_segm_results(img, segm)
fig.savefig(path_fig)
plt.close(fig)
if path_visu is not None and os.path.isdir(path_visu) and debug_visual is not None:
path_fig = os.path.join(path_visu, str(idx_name) + '_debug.png')
logging.debug('exporting (debug) visualization: %s', path_fig)
fig = tl_visu.figure_segm_graphcut_debug(debug_visual)
fig.savefig(path_fig, bbox_inches='tight', pad_inches=0.1)
plt.close(fig)
def segment_image_independent(img_idx_path, params, path_out, path_visu=None, show_debug_imgs=SHOW_DEBUG_IMAGES):
""" segment image indecently (estimate model just for this model)
:param (int, str) img_idx_path:
:param dict params: segmentation parameters
:param str path_out: path to dir with segmentation
:param str path_visu: path to dir with debug images
:return (str, ndarray):
"""
idx, path_img = parse_imgs_idx_path(img_idx_path)
logging.debug('segmenting image: "%s"', path_img)
idx_name = get_idx_name(idx, path_img)
img = load_image(path_img, params['img_type'])
path_img = os.path.join(params['path_exp'], FOLDER_IMAGE, idx_name + '.png')
tl_data.io_imsave(path_img, img.astype(np.uint8))
debug_visual = {} if show_debug_imgs else None
try:
segm, segm_soft = seg_pipe.pipe_color2d_slic_features_model_graphcut(
img,
nb_classes=params['nb_classes'],
sp_size=params['slic_size'],
sp_regul=params['slic_regul'],
dict_features=params['features'],
estim_model=params['estim_model'],
pca_coef=params['pca_coef'],
gc_regul=params['gc_regul'],
gc_edge_type=params['gc_edge_type'],
debug_visual=debug_visual
)
path_npz = os.path.join(path_out, idx_name + '.npz')
np.savez_compressed(path_npz, segm_soft)
except Exception:
logging.exception('pipe_color2d_slic_features_model_graphcut(...)')
segm = np.zeros(img.shape[:2])
boundary_size = int(params['slic_size'] * 3)
segm = seg_lbs.assume_bg_on_boundary(segm, bg_label=0, boundary_size=boundary_size)
export_visual(idx_name, img, segm, debug_visual, path_out, path_visu)
# gc.collect(), time.sleep(1)
return idx_name, segm
def segment_image_model(imgs_idx_path, params, model, path_out=None, path_visu=None, show_debug_imgs=SHOW_DEBUG_IMAGES):
""" segment image with already estimated model
:param (int, str) imgs_idx_path:
:param dict params: segmentation parameters
:param obj scaler:
:param obj pca:
:param obj model:
:param str path_out: path to dir with segmentation
:param str path_visu: path to dir with debug images
:param bool show_debug_imgs: whether show debug images
:return (str, ndarray):
"""
idx, path_img = parse_imgs_idx_path(imgs_idx_path)
logging.debug('segmenting image: "%s"', path_img)
idx_name = get_idx_name(idx, path_img)
img = load_image(path_img, params['img_type'])
path_img = os.path.join(params['path_exp'], FOLDER_IMAGE, idx_name + '.png')
tl_data.io_imsave(path_img, img.astype(np.uint8))
debug_visual = {} if show_debug_imgs else None
try:
segm, segm_soft = seg_pipe.segment_color2d_slic_features_model_graphcut(
img,
model,
sp_size=params['slic_size'],
sp_regul=params['slic_regul'],
dict_features=params['features'],
gc_regul=params['gc_regul'],
gc_edge_type=params['gc_edge_type'],
debug_visual=debug_visual
)
path_npz = os.path.join(path_out, idx_name + '.npz')
np.savez_compressed(path_npz, segm_soft)
except Exception:
logging.exception('segment_color2d_slic_features_model_graphcut(...)')
segm = np.zeros(img.shape[:2])
boundary_size = int(np.sqrt(np.prod(segm.shape)) * 0.01)
segm = seg_lbs.assume_bg_on_boundary(segm, bg_label=0, boundary_size=boundary_size)
export_visual(idx_name, img, segm, debug_visual, path_out, path_visu)
# gc.collect(), time.sleep(1)
return idx_name, segm
def compare_segms_metric_ars(dict_segm_a, dict_segm_b, suffix=''):
""" compute ARS for each pair of segmentation
:param {str: ndarray} dict_segm_a:
:param {str: ndarray} dict_segm_b:
:param str suffix:
:return DF:
"""
df_ars = pd.DataFrame()
for n in dict_segm_a:
if n not in dict_segm_b:
logging.warning('particular key "%s" is missing in dictionary', n)
continue
y_a = dict_segm_a[n].ravel()
y_b = dict_segm_b[n].ravel()
dict_ars = {'image': n, 'ARS' + suffix: metrics.adjusted_rand_score(y_a, y_b)}
df_ars = df_ars.append(dict_ars, ignore_index=True)
df_ars.set_index(['image'], inplace=True)
return df_ars
def experiment_single_gmm(params, paths_img, path_out, path_visu, show_debug_imgs=SHOW_DEBUG_IMAGES):
imgs_idx_path = list(zip([None] * len(paths_img), paths_img))
logging.info('Perform image segmentation as single image in each time')
_wrapper_segment = partial(
segment_image_independent,
params=params,
path_out=path_out,
path_visu=path_visu,
show_debug_imgs=show_debug_imgs,
)
iterate = tl_expt.WrapExecuteSequence(
_wrapper_segment,
imgs_idx_path,
nb_workers=params['nb_workers'],
desc='experiment single GMM',
)
# dict_segms_gmm = {}
# for name, segm in iterate:
# dict_segms_gmm[name] = segm
dict_segms_gmm = dict(iterate)
gc.collect()
time.sleep(1)
return dict_segms_gmm
def experiment_group_gmm(params, paths_img, path_out, path_visu, show_debug_imgs=SHOW_DEBUG_IMAGES):
logging.info('load all images')
list_images = [load_image(path_img, params['img_type']) for path_img in paths_img]
imgs_idx_path = list(zip([None] * len(paths_img), paths_img))
logging.info('Estimate image segmentation from whole sequence of images')
params['path_model'] = os.path.join(params['path_exp'], NAME_DUMP_MODEL)
if os.path.isfile(params['path_model']) and not FORCE_RECOMP_DATA:
model, _, _ = load_model(params['path_model'])
else:
model, _ = seg_pipe.estim_model_classes_group(
list_images,
nb_classes=params['nb_classes'],
dict_features=params['features'],
sp_size=params['slic_size'],
sp_regul=params['slic_regul'],
pca_coef=params['pca_coef'],
model_type=params['estim_model']
)
save_model(params['path_model'], model)
logging.info('Perform image segmentation from group model')
_wrapper_segment = partial(
segment_image_model,
params=params,
model=model,
path_out=path_out,
path_visu=path_visu,
show_debug_imgs=show_debug_imgs,
)
iterate = tl_expt.WrapExecuteSequence(
_wrapper_segment,
imgs_idx_path,
nb_workers=params['nb_workers'],
desc='experiment group GMM',
)
# dict_segms_group = {}
# for name, segm in iterate:
# dict_segms_group[name] = segm
dict_segms_group = dict(iterate)
gc.collect()
time.sleep(1)
return dict_segms_group
def load_path_images(params):
if os.path.isfile(params.get('path_train_list', '')):
logging.info('loading images from CSV: %s', params['path_train_list'])
df_paths = pd.read_csv(params['path_train_list'], index_col=0)
paths_img = df_paths['path_image'].tolist()
elif 'path_predict_imgs' in params:
logging.info('loading images from path: %s', params['path_predict_imgs'])
paths_img = glob.glob(params['path_predict_imgs'])
if not paths_img:
logging.warning('no images found on given path...')
else:
logging.warning('no images to load!')
paths_img = []
return paths_img
def write_skip_file(path_dir):
if not os.path.isdir(path_dir):
raise FileNotFoundError('missing: %s' % path_dir)
with open(os.path.join(path_dir, 'RESULTS'), 'w') as fp:
fp.write('This particular experiment was skipped by user option.')
def main(params):
""" the main body containgn two approches:
1) segment each image indecently
2) estimate model over whole image sequence and estimate
:param dict params:
:return dict:
"""
logging.getLogger().setLevel(logging.DEBUG)
show_visual = params.get('visual', False)
reload_dir_config = os.path.isfile(params['path_config']) or FORCE_RELOAD
stamp_unique = params.get('unique', EACH_UNIQUE_EXPERIMENT)
params = tl_expt.create_experiment_folder(
params, dir_name=NAME_EXPERIMENT, stamp_unique=stamp_unique, skip_load=reload_dir_config
)
tl_expt.set_experiment_logger(params['path_exp'])
logging.info(tl_expt.string_dict(params, desc='PARAMETERS'))
tl_expt.create_subfolders(params['path_exp'], LIST_FOLDERS_BASE)
if show_visual:
tl_expt.create_subfolders(params['path_exp'], LIST_FOLDERS_DEBUG)
paths_img = load_path_images(params)
if not paths_img:
raise FileNotFoundError('missing images')
def _path_expt(n):
return os.path.join(params['path_exp'], n)
# Segment as single model per image
path_visu = _path_expt(FOLDER_SEGM_GMM_VISU) if show_visual else None
dict_segms_gmm = experiment_single_gmm(
params, paths_img, _path_expt(FOLDER_SEGM_GMM), path_visu, show_debug_imgs=show_visual
)
gc.collect()
time.sleep(1)
# Segment as model ober set of images
if params.get('run_groupGMM', False):
path_visu = _path_expt(FOLDER_SEGM_GROUP_VISU) if show_visual else None
dict_segms_group = experiment_group_gmm(
params, paths_img, _path_expt(FOLDER_SEGM_GROUP), path_visu, show_debug_imgs=show_visual
)
else:
write_skip_file(_path_expt(FOLDER_SEGM_GROUP))
# write_skip_file(_path_expt(FOLDER_SEGM_GROUP_VISU))
dict_segms_group = None
if dict_segms_group is not None:
df_ars = compare_segms_metric_ars(dict_segms_gmm, dict_segms_group, suffix='_gmm-group')
df_ars.to_csv(_path_expt(NAME_CSV_ARS_CORES))
logging.info(df_ars.describe())
return params
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
logging.info('running...')
cli_params = arg_parse_params(SEGM_PARAMS)
main(cli_params)
logging.info('DONE')
|
import os
import sys
path = os.path.dirname(os.path.realpath(__file__))
pythonpath = path.rpartition('/')[0]
sys.path.append(pythonpath)
import config_common
import pan.config_mirror as config_mirror
SYSLOG_TO_FLOWS_LATENCY = 3
NUM_STATIC_FLOWS = 1
IP1 = '1.1.1.1'
IP2 = '2.2.2.2'
TCP = 6
UDP = 17
ICMP = 1
IP_PORT_SRC = 100
IP_PORT_DST = 200
timeout = '10m'
tests = [
{'test_name': 'pan_mirror_mode_setup_mirroring_flow',
'input_syslog_msg': '',
'num_entries': NUM_STATIC_FLOWS,
'get_flows': 'MIRROR.*',
'output_directflow_entries': [
{'flow_name_regex': 'MIRROR_TO_FW_TAP_*?',
'persistent': True,
'priority': config_common.PRIORITY_STATIC_PORT_BINDING_FLOW,
'hardTimeout': 0,
'idleTimeout': 0,
'match': {'inInterfaces': config_mirror.SWITCH_INTERFACES_TO_BE_MIRRORED},
'action': {'outputNormal': True,
'egrMirrorInterfaces': config_mirror.SWITCH_INTERFACES_TO_FW_TAP}}]
},
{'test_name': 'pan_mirror_mode_deny_msg',
'input_syslog_msg': '<14>Aug 10 11:47:28 bizdev-pan-5050 : 1,2019/08/10 11:47:27,0009C101677,TRAFFIC,drop,1,2015/08/10 11:47:27,%s,%s,0.0.0.0,0.0.0.0,Dev_inline_drop,,,ping,vsys1,untrust3,trust3,ethernet1/10,,Dev_DirectFlow_Assist,2015/08/10 11:47:27,0,6,0,0,0,0,0x100000,icmp,deny,588,588,0,6,2015/08/10 11:47:12,0,any,0,8140342,0x0,10.0.0.0-10.255.255.255,172.16.0.0-172.31.255.255,0,6,0,policy-deny,0,0,0,0,,bizdev-pan-5050,from-policy' % (IP1, IP2),
'num_entries': 2,
'get_flows': 'DROP.*',
'output_directflow_entries': [
{'flow_name_regex': 'DROP_ping_ICMP_*?',
'persistent': False,
'priority': config_common.PRIORITY_DROP_FLOW,
'hardTimeout': config_common.DROP_FLOW_LIFETIME * 60,
'idleTimeout': config_common.DROP_FLOW_IDLE_TIMEOUT * 60,
'match': {'ipProto': ICMP,
'inInterfaces': [],
'ipSrc': {'mask': '255.255.255.255', 'ip': IP1},
'ipDst': {'mask': '255.255.255.255', 'ip': IP2}
},
'action': {'outputDrop': True,}}]
}
]
|
import cv2
import numpy as np
from sklearn.cluster import MiniBatchKMeans
import SimpleCV
class DepthTrackerManager(object):
def __init__(self):
self._disparity_map = None
self._left_image = None
self._right_image = None
@property
def disparity_map(self):
rgb_disparity_frame = cv2.cvtColor(
self._disparity_map,
cv2.COLOR_GRAY2RGB)
return rgb_disparity_frame
def compute_stereo_distance(self, metric="euclidean"):
"""Compute the distance between the left and right image frames.
Possible values for the metric keyword are: `euclidean` and `manhattan`.
"""
if self._disparity_map is None:
raise ValueError("Must compute the disparity map first!")
difference_image = self._left_image - self._right_image
# Calculate distance between the two images
if metric == "euclidean":
distance = np.linalg.norm(difference_image)
elif metric == "manhattan":
distance = sum(abs(difference_image))
else:
raise ValueError("Supplied distance metric is not supported!")
return distance
def compute_histogram(self, bins=10):
"""Returns the histogram of the disparity map. This gives us a general
idea of the current amount of near objects on screen."""
if self._disparity_map is None:
raise ValueError("Must compute the disparity map first!")
# Compute the histogram
histogram = np.histogram2d(
x=self._disparity_map[:, 1], y=self._disparity_map[:, 0],
bins=bins)
return histogram
def objects_in_proximity(self, min_member_size, n_clusters=10,
return_images=False):
"""Returns an array of object locations
on the disparity map relative to the specified distance
Set the return_images to True if you want images of the actual objects
"""
if self._disparity_map is None:
raise ValueError("Must compute the disparity map first!")
# Find the contours on the disparity map to find nearby objects
image = SimpleCV.Image(self._disparity_map)
#
image = image.dilate(1).binarize()
image.show()
blobs = image.findBlobs()
blobs = [c for c in blobs if c.area() > min_member_size]
# Does the user want the cropped images?
if return_images:
contour_polygons = map(lambda x: x.contour(), blobs)
cropped_images = []
for polygon in contour_polygons:
# This is the image which we will crop the polygon from
base_image = self._left_image
# Create the mask
mask = np.zeros(base_image.shape, dtype=np.uint8)
roi_corners = np.array([polygon], dtype=np.int32)
cv2.fillPoly(mask, roi_corners, (255, 255, 255))
# Apply the mask
masked_image = cv2.bitwise_and(base_image, mask)
cropped_images.append(masked_image)
return (cropped_images, blobs)
return blobs
def compute_disparity(self, left_image, right_image,
ndisparities=16, SADWindowSize=25):
"""Compute the disparity image, given the left and right image."""
stereo = cv2.StereoBM(
cv2.STEREO_BM_BASIC_PRESET,
ndisparities=ndisparities,
SADWindowSize=SADWindowSize)
self._left_image = left_image
# Convert the given images to grayscale
gray_left = cv2.cvtColor(left_image,
cv2.COLOR_BGR2GRAY)
self._right_image = right_image
gray_right = cv2.cvtColor(right_image,
cv2.COLOR_BGR2GRAY)
# Compute stereo disparity image
disparity_map = (
stereo.compute(gray_left, gray_right).astype(np.float32) - 32
) / 25.0
self._disparity_map = disparity_map
|
import re
import copy
import math
import datetime as dt
import json
def decimalDate(date,fmt="%Y-%m-%d",variable=False,dateSplitter='-'):
""" Converts calendar dates in specified format to decimal date. """
if variable==True: ## if date is variable - extract what is available
dateL=len(date.split(dateSplitter))
if dateL==2:
fmt=dateSplitter.join(fmt.split(dateSplitter)[:-1])
elif dateL==1:
fmt=dateSplitter.join(fmt.split(dateSplitter)[:-2])
adatetime=dt.datetime.strptime(date,fmt) ## convert to datetime object
year = adatetime.year ## get year
boy = dt.datetime(year, 1, 1) ## get beginning of the year
eoy = dt.datetime(year + 1, 1, 1) ## get beginning of next year
return year + ((adatetime - boy).total_seconds() / ((eoy - boy).total_seconds())) ## return fractional year
def convertDate(x,start,end):
""" Converts calendar dates between given formats """
return dt.datetime.strftime(dt.datetime.strptime(x,start),end)
class clade: ## clade class
def __init__(self,givenName):
self.branchType='leaf' ## clade class poses as a leaf
self.subtree=None ## subtree will contain all the branches that were collapsed
self.leaves=None
self.length=0.0
self.height=None
self.absoluteTime=None
self.parent=None
self.traits={}
self.index=None
self.name=givenName ## the pretend tip name for the clade
self.numName=givenName
self.x=None
self.y=None
self.lastHeight=None ## refers to the height of the highest tip in the collapsed clade
self.lastAbsoluteTime=None ## refers to the absolute time of the highest tip in the collapsed clade
self.width=1
class node: ## node class
def __init__(self):
self.branchType='node'
self.length=0.0 ## branch length, recovered from string
self.height=None ## height, set by traversing the tree, which adds up branch lengths along the way
self.absoluteTime=None ## branch end point in absolute time, once calibrations are done
self.parent=None ## reference to parent node of the node
self.children=[] ## a list of descendent branches of this node
self.traits={} ## dictionary that will contain annotations from the tree string, e.g. {'posterior':1.0}
self.index=None ## index of the character designating this object in the tree string, it's a unique identifier for every object in the tree
self.childHeight=None ## the youngest descendant tip of this node
self.x=None ## X and Y coordinates of this node, once drawTree() is called
self.y=None
## contains references to all tips of this node
self.leaves=set() ## is a set of tips that are descended from it
class leaf: ## leaf class
def __init__(self):
self.branchType='leaf'
self.name=None ## name of tip after translation, since BEAST trees will generally have numbers for taxa but will provide a map at the beginning of the file
self.numName=None ## the original name of the taxon, would be an integer if coming from BEAST, otherwise can be actual name
self.index=None ## index of the character that defines this object, will be a unique ID for each object in the tree
self.length=None ## branch length
self.absoluteTime=None ## position of tip in absolute time
self.height=None ## height of tip
self.parent=None ## parent
self.traits={} ## trait dictionary
self.x=None ## position of tip on x axis if the tip were to be plotted
self.y=None ## position of tip on y axis if the tip were to be plotted
class tree: ## tree class
def __init__(self):
self.cur_node=node() ## current node is a new instance of a node class
self.cur_node.index='Root' ## first object in the tree is the root to which the rest gets attached
self.cur_node.length=0.0 ## startind node branch length is 0
self.cur_node.height=0.0 ## starting node height is 0
self.root=None #self.cur_node ## root of the tree is current node
self.Objects=[] ## tree objects have a flat list of all branches in them
self.tipMap=None
self.treeHeight=0 ## tree height is the distance between the root and the most recent tip
self.ySpan=0.0
def add_node(self,i):
""" Attaches a new node to current node. """
new_node=node() ## new node instance
new_node.index=i ## new node's index is the position along the tree string
if self.root is None:
self.root=new_node
new_node.parent=self.cur_node ## new node's parent is current node
self.cur_node.children.append(new_node) ## new node is a child of current node
self.cur_node=new_node ## current node is now new node
self.Objects.append(self.cur_node) ## add new node to list of objects in the tree
def add_leaf(self,i,name):
""" Attach a new leaf (tip) to current node. """
new_leaf=leaf() ## new instance of leaf object
new_leaf.index=i ## index is position along tree string
if self.root is None:
self.root=new_leaf
new_leaf.parent=self.cur_node ## leaf's parent is current node
self.cur_node.children.append(new_leaf) ## assign leaf to parent's children
new_leaf.numName=name ## numName is the name tip has inside tree string, BEAST trees usually have numbers for tip names
self.cur_node=new_leaf ## current node is now new leaf
self.Objects.append(self.cur_node) ## add leaf to all objects in the tree
def subtree(self,k=None,traverse_condition=None):
""" Generate a subtree (as a baltic tree object) from a traversal.
k is the starting branch for traversal (default: root).
traverse_condition is a function that determines whether a child branch should be visited (default: always true).
Returns a new baltic tree instance.
Note - custom traversal functions can result in multitype trees.
If this is undesired call singleType() on the resulting subtree afterwards. """
subtree=copy.deepcopy(self.traverse_tree(k,include_condition=lambda k:True,traverse_condition=traverse_condition))
if subtree is None or len([k for k in subtree if k.branchType=='leaf'])==0:
return None
else:
local_tree=tree() ## create a new tree object where the subtree will be
local_tree.Objects=subtree ## assign branches to new tree object
local_tree.root=subtree[0] ## connect tree object's root with subtree
subtree_set=set(subtree) ## turn branches into set for quicker look up later
if traverse_condition is not None: ## didn't use default traverse condition, might need to deal with hanging nodes and prune children
for nd in local_tree.getInternal(): ## iterate over nodes
nd.children=list(filter(lambda k:k in subtree_set,nd.children)) ## only keep children seen in traversal
local_tree.fixHangingNodes()
return local_tree
def singleType(self):
""" Removes any branches with a single child (multitype nodes). """
multiTypeNodes=[k for k in self.Objects if k.branchType=='node' and len(k.children)==1]
while len(multiTypeNodes)>0:
multiTypeNodes=[k for k in self.Objects if k.branchType=='node' and len(k.children)==1]
for k in sorted(multiTypeNodes,key=lambda x:-x.height):
child=k.children[0] ## fetch child
grandparent=k.parent ## fetch grandparent
child.parent=grandparent ## child's parent is now grandparent
grandparent.children.append(child) ## add child to grandparent's children
grandparent.children.remove(k) ## remove old parent from grandparent's children
grandparent.children=list(set(grandparent.children))
child.length+=k.length ## adjust child length
multiTypeNodes.remove(k) ## remove old parent from multitype nodes
self.Objects.remove(k) ## remove old parent from all objects
self.sortBranches()
def setAbsoluteTime(self,date):
""" place all objects in absolute time by providing the date of the most recent tip """
for i in self.Objects: ## iterate over all objects
i.absoluteTime=date-self.treeHeight+i.height ## heights are in units of time from the root
def treeStats(self):
""" provide information about the tree """
self.traverse_tree() ## traverse the tree
obs=self.Objects ## convenient list of all objects in the tree
print('\nTree height: %.6f\nTree length: %.6f'%(self.treeHeight,sum([x.length for x in obs]))) ## report the height and length of tree
nodes=self.getInternal() ## get all nodes
strictlyBifurcating=False ## assume tree is not strictly bifurcating
multiType=False
singleton=False
N_children=[len(x.children) for x in nodes]
if len(N_children)==0:
singleton=True
else:
minChildren,maxChildren=min(N_children),max(N_children) ## get the largest number of descendant branches of any node
if maxChildren==2 and minChildren==2: ## if every node has at most two children branches
strictlyBifurcating=True ## it's strictly bifurcating
if minChildren==1:
multiType=True
hasTraits=False ## assume tree has no annotations
maxAnnotations=max([len(x.traits) for x in obs]) ## check the largest number of annotations any branch has
if maxAnnotations>0: ## if it's more than 0
hasTraits=True ## there are annotations
if strictlyBifurcating:
print('strictly bifurcating tree') ## report
if multiType:
print('multitype tree') ## report
if singleton:
print('singleton tree')
if hasTraits:
print('annotations present') ## report
print('\nNumbers of objects in tree: %d (%d nodes and %d leaves)\n'%(len(obs),len(nodes),len(obs)-len(nodes))) ## report numbers of different objects in the tree
def traverse_tree(self,cur_node=None,include_condition=lambda k:k.branchType=='leaf',traverse_condition=lambda k:True,collect=None,verbose=False):
if cur_node==None: ## if no starting point defined - start from root
for k in self.Objects: ## reset various parameters
if k.branchType=='node':
k.leaves=set()
k.childHeight=None
k.height=None
if verbose==True:
print('Initiated traversal from root')
cur_node=self.root#.children[-1]
if collect==None: ## initiate collect list if not initiated
collect=[]
if cur_node.parent and cur_node.height==None: ## cur_node has a parent - set height if it doesn't already
cur_node.height=cur_node.length+cur_node.parent.height
elif cur_node.height==None: ## cur_node does not have a parent (root), if height not set before it's zero
cur_node.height=0.0
if verbose==True:
print('at %s (%s)'%(cur_node.index,cur_node.branchType))
if include_condition(cur_node): ## test if interested in cur_node
collect.append(cur_node) ## add to collect list for reporting later
if cur_node.branchType=='leaf': ## cur_node is a tip
cur_node.parent.leaves.add(cur_node.numName) ## add to parent's list of tips
elif cur_node.branchType=='node': ## cur_node is node
for child in filter(traverse_condition,cur_node.children): ## only traverse through children we're interested
if verbose==True:
print('visiting child %s'%(child.index))
self.traverse_tree(cur_node=child,include_condition=include_condition,traverse_condition=traverse_condition,verbose=verbose,collect=collect) ## recurse through children
if verbose==True:
print('child %s done'%(child.index))
assert len(cur_node.children)>0, 'Tried traversing through hanging node without children. Index: %s'%(cur_node.index)
cur_node.childHeight=max([child.childHeight if child.branchType=='node' else child.height for child in cur_node.children])
if cur_node.parent:
cur_node.parent.leaves=cur_node.parent.leaves.union(cur_node.leaves) ## pass tips seen during traversal to parent
self.treeHeight=cur_node.childHeight ## it's the highest child of the starting node
return collect
def renameTips(self,d=None):
""" Give each tip its correct label using a dictionary. """
if d==None and self.tipMap!=None:
d=self.tipMap
for k in self.getExternal(): ## iterate through leaf objects in tree
k.name=d[k.numName] ## change its name
def sortBranches(self,descending=True):
""" Sort descendants of each node. """
if descending==True:
modifier=-1 ## define the modifier for sorting function later
elif descending==False:
modifier=1
for k in self.getInternal(): ## iterate over nodes
## split node's offspring into nodes and leaves, sort each list individually
nodes=sorted([x for x in k.children if x.branchType=='node'],key=lambda q:(-len(q.leaves)*modifier,q.length*modifier))
leaves=sorted([x for x in k.children if x.branchType=='leaf'],key=lambda q:q.length*modifier)
if modifier==1: ## if sorting one way - nodes come first, leaves later
k.children=nodes+leaves
elif modifier==-1: ## otherwise sort the other way
k.children=leaves+nodes
self.drawTree() ## update x and y positions of each branch, since y positions will have changed because of sorting
def drawTree(self,order=None,verbose=False):
""" Find x and y coordinates of each branch. """
if order==None:
order=self.traverse_tree() ## order is a list of tips recovered from a tree traversal to make sure they're plotted in the correct order along the vertical tree dimension
if verbose==True:
print('Drawing tree in pre-order')
else:
if verbose==True:
print('Drawing tree with provided order')
name_order=[x.numName for x in order]
skips=[1 if isinstance(x,leaf) else x.width+1 for x in order]
for k in self.Objects: ## reset coordinates for all objects
k.x=None
k.y=None
storePlotted=0
drawn={} ## drawn keeps track of what's been drawn
while len(drawn)!=len(self.Objects): # keep drawing the tree until everything is drawn
if verbose==True:
print('Drawing iteration %d'%(len(drawn)))
for k in filter(lambda w:w.index not in drawn,self.Objects): ## iterate through objects that have not been drawn
if k.branchType=='leaf': ## if leaf - get position of leaf, draw branch connecting tip to parent node
if verbose==True:
print('Setting leaf %s y coordinate to'%(k.index))
x=k.height ## x position is height
y_idx=name_order.index(k.numName) ## y position of leaf is given by the order in which tips were visited during the traversal
y=sum(skips[y_idx:]) ## sum across skips to find y position
if verbose==True:
print('%s'%(y))
if isinstance(k,clade) and skips[y_idx]>1: ## if dealing with collapsed clade - adjust y position to be in the middle of the skip
y-=skips[y_idx]/2.0
if verbose==True:
print('adjusting clade y position to %s'%(y))
k.x=x ## set x and y coordinates
k.y=y
drawn[k.index]=None ## remember that this objects has been drawn
if hasattr(k.parent,'yRange')==False: ## if parent doesn't have a maximum extent of its children's y coordinates
setattr(k.parent,'yRange',[k.y,k.y]) ## assign it
if k.branchType=='node': ## if parent is non-root node and y positions of all its children are known
if len([q.y for q in k.children if q.y!=None])==len(k.children):
if verbose==True:
print('Setting node %s coordinates'%(k.index))
x=k.height ## x position is height
children_y_coords=[q.y for q in k.children if q.y!=None] ## get all existing y coordinates of the node
y=sum(children_y_coords)/float(len(children_y_coords)) ## internal branch is in the middle of the vertical bar
k.x=x
k.y=y
drawn[k.index]=None ## remember that this objects has been drawn
minYrange=min([min(child.yRange) if child.branchType=='node' else child.y for child in k.children]) ## get lowest y coordinate across children
maxYrange=max([max(child.yRange) if child.branchType=='node' else child.y for child in k.children]) ## get highest y coordinate across children
setattr(k,'yRange',[minYrange,maxYrange]) ## assign the maximum extent of children's y coordinates
assert len(drawn)>storePlotted,'Got stuck trying to find y positions of objects'
storePlotted=len(drawn)
self.ySpan=sum(skips)
def drawUnrooted(self,n=None,total=None):
"""
Calculate x and y coordinates in an unrooted arrangement.
Code translated from https://github.com/nextstrain/auspice/commit/fc50bbf5e1d09908be2209450c6c3264f298e98c, written by Richard Neher.
"""
if n==None:
total=sum([1 if isinstance(x,leaf) else x.width+1 for x in self.getExternal()])
n=self.root#.children[0]
for k in self.Objects:
k.traits['tau']=0.0
k.x=0.0
k.y=0.0
if n.branchType=='leaf':
w=2*math.pi*1.0/float(total)
else:
w=2*math.pi*len(n.leaves)/float(total)
if n.parent.x==None:
n.parent.x=0.0
n.parent.y=0.0
n.x = n.parent.x + n.length * math.cos(n.traits['tau'] + w*0.5)
n.y = n.parent.y + n.length * math.sin(n.traits['tau'] + w*0.5)
eta=n.traits['tau']
if n.branchType=='node':
for ch in n.children:
if ch.branchType=='leaf':
w=2*math.pi*1.0/float(total)
else:
w=2*math.pi*len(ch.leaves)/float(total)
ch.traits['tau'] = eta
eta += w
self.drawUnrooted(ch,total)
def commonAncestor(self,descendants,numName=False,strict=False):
types=[desc.__class__ for desc in descendants]
assert len(set(types))==1,'More than one type of data detected in descendants list'
if numName==False:
assert sum([1 if k in [w.name for w in self.getExternal()] else 0 for k in descendants])==len(descendants),'Not all specified descendants are in tree: %s'%(descendants)
else:
assert sum([1 if k in [w.numName for w in self.getExternal()] else 0 for k in descendants])==len(descendants),'Not all specified descendants are in tree: %s'%(descendants)
dtype=list(set(types))[0]
allAncestors=sorted([k for k in self.Objects if (k.branchType=='node' or isinstance(k,clade)) and len(k.leaves)>=len(descendants)],key=lambda x:x.height)
if numName==False:
ancestor=[k for k in allAncestors if sum([[self.tipMap[w] for w in k.leaves].count(l) for l in descendants])==len(descendants)][-1]
else:
ancestor=[k for k in allAncestors if sum([[w for w in k.leaves].count(l) for l in descendants])==len(descendants)][-1]
if strict==False:
return ancestor
elif strict==True and len(ancestor.leaves)==len(descendants):
return ancestor
elif strict==True and len(ancestor.leaves)>len(descendants):
return None
def collapseSubtree(self,cl,givenName,verbose=False,widthFunction=lambda k:len(k.leaves)):
""" Collapse an entire subtree into a clade object. """
assert cl.branchType=='node','Cannot collapse non-node class'
collapsedClade=clade(givenName)
collapsedClade.index=cl.index
collapsedClade.leaves=cl.leaves
collapsedClade.length=cl.length
collapsedClade.height=cl.height
collapsedClade.parent=cl.parent
collapsedClade.absoluteTime=cl.absoluteTime
collapsedClade.traits=cl.traits
collapsedClade.width=widthFunction(cl)
if verbose==True:
print('Replacing node %s (parent %s) with a clade class'%(cl.index,cl.parent.index))
parent=cl.parent
remove_from_tree=self.traverse_tree(cl,include_condition=lambda k: True)
collapsedClade.subtree=remove_from_tree
assert len(remove_from_tree)<len(self.Objects),'Attempted collapse of entire tree'
collapsedClade.lastHeight=max([x.height for x in remove_from_tree])
if [x.absoluteTime for x in remove_from_tree].count(None)!=len(remove_from_tree):
collapsedClade.lastAbsoluteTime=max([x.absoluteTime for x in remove_from_tree])
for k in remove_from_tree:
self.Objects.remove(k)
parent.children.remove(cl)
parent.children.append(collapsedClade)
self.Objects.append(collapsedClade)
collapsedClade.parent=parent
if self.tipMap!=None:
self.tipMap[givenName]=givenName
self.traverse_tree()
self.sortBranches()
def uncollapseSubtree(self):
""" Uncollapse all collapsed subtrees. """
while len([k for k in self.Objects if isinstance(k,clade)])>0:
clades=[k for k in self.Objects if isinstance(k,clade)]
for cl in clades:
parent=cl.parent
subtree=cl.subtree
parent.children.remove(cl)
parent.children.append(subtree[0])
self.Objects+=subtree
self.Objects.remove(cl)
if self.tipMap!=None:
self.tipMap.pop(cl.name,None)
self.traverse_tree()
def collapseBranches(self,collapseIf=lambda x:x.traits['posterior']<=0.5,designated_nodes=[],verbose=False):
""" Collapse all branches that satisfy a function collapseIf (default is an anonymous function that returns true if posterior probability is <=0.5).
Alternatively, a list of nodes can be supplied to the script.
Returns a deep copied version of the tree.
"""
newTree=copy.deepcopy(self) ## work on a copy of the tree
if len(designated_nodes)==0: ## no nodes were designated for deletion - relying on anonymous function to collapse nodes
nodes_to_delete=list(filter(lambda n: n.branchType=='node' and collapseIf(n)==True and n!=newTree.root, newTree.Objects)) ## fetch a list of all nodes who are not the root and who satisfy the condition
else:
assert [w.branchType for w in designated_nodes].count('node')==len(designated_nodes),'Non-node class detected in list of nodes designated for deletion'
assert len([w for w in designated_nodes if w!=newTree.root])==0,'Root node was designated for deletion'
nodes_to_delete=list(filter(lambda w: w.index in [q.index for q in designated_nodes], newTree.Objects)) ## need to look up nodes designated for deletion by their indices, since the tree has been copied and nodes will have new memory addresses
if verbose==True:
print('%s nodes set for collapsing: %s'%(len(nodes_to_delete),[w.index for w in nodes_to_delete]))
# assert len(nodes_to_delete)<len(newTree.getInternal())-1,'Chosen cutoff would remove all branches'
while len(nodes_to_delete)>0: ## as long as there are branches to be collapsed - keep reducing the tree
if verbose==True:
print('Continuing collapse cycle, %s nodes left'%(len(nodes_to_delete)))
for k in sorted(nodes_to_delete,key=lambda x:-x.height): ## start with branches near the tips
zero_node=k.children ## fetch the node's children
k.parent.children+=zero_node ## add them to the zero node's parent
old_parent=k ## node to be deleted is the old parent
new_parent=k.parent ## once node is deleted, the parent to all their children will be the parent of the deleted node
if new_parent==None:
new_parent=self.root
if verbose==True:
print('Removing node %s, attaching children %s to node %s'%(old_parent.index,[w.index for w in k.children],new_parent.index))
for w in newTree.Objects: ## assign the parent of deleted node as the parent to any children of deleted node
if w.parent==old_parent:
w.parent=new_parent
w.length+=old_parent.length
if verbose==True:
print('Fixing branch length for node %s'%(w.index))
k.parent.children.remove(k) ## remove traces of deleted node - it doesn't exist as a child, doesn't exist in the tree and doesn't exist in the nodes list
newTree.Objects.remove(k)
nodes_to_delete.remove(k) ## in fact, the node never existed
if len(designated_nodes)==0:
nodes_to_delete==list(filter(lambda n: n.branchType=='node' and collapseIf(n)==True and n!=newTree.root, newTree.Objects))
else:
assert [w.branchType for w in designated_nodes].count('node')==len(designated_nodes),'Non-node class detected in list of nodes designated for deletion'
assert len([w for w in designated_nodes if w!=newTree.root])==0,'Root node was designated for deletion'
nodes_to_delete=[w for w in newTree.Objects if w.index in [q.index for q in designated_nodes]]
if verbose==True:
print('Removing references to node %s'%(k.index))
newTree.sortBranches() ## sort the tree to traverse, draw and sort tree to adjust y coordinates
return newTree ## return collapsed tree
def toString(self,cur_node=None,traits=None,numName=False,verbose=False,nexus=False,string_fragment=None,traverse_condition=None,json=False):
""" Output the topology of the tree with branch lengths and comments to stringself.
cur_node: starting point (default: None, starts at root)
traits: list of keys that will be used to output entries in traits dict of each branch (default: all traits)
numName: boolean, whether encoded (True) or decoded (default: False) tip names will be output
verbose: boolean, debug
nexus: boolean, whether to output newick (default: False) or nexus (True) formatted tree
string_fragment: list of characters that comprise the tree string
"""
if cur_node==None:
cur_node=self.root#.children[-1]
if traits==None: ## if None
traits=set(sum([list(k.traits.keys()) for k in self.Objects],[])) ## fetch all trait keys
if string_fragment==None:
string_fragment=[]
if nexus==True:
assert json==False,'Nexus format not a valid option for JSON output'
if verbose==True:
print('Exporting to Nexus format')
string_fragment.append('#NEXUS\nBegin trees;\ntree TREE1 = [&R] ')
if traverse_condition==None:
traverse_condition=lambda k: True
comment=[] ## will hold comment
if len(traits)>0: ## non-empty list of traits to output
for tr in traits: ## iterate through keys
if tr in cur_node.traits: ## if key is available
if verbose==True:
print('trait %s available for %s (%s) type: %s'%(tr,cur_node.index,cur_node.branchType,type(cur_node.traits[tr])))
if isinstance(cur_node.traits[tr],str): ## string value
comment.append('%s="%s"'%(tr,cur_node.traits[tr]))
if verbose==True:
print('adding string comment %s'%(comment[-1]))
elif isinstance(cur_node.traits[tr],float) or isinstance(cur_node.traits[tr],int): ## float or integer
comment.append('%s=%s'%(tr,cur_node.traits[tr]))
if verbose==True:
print('adding numeric comment %s'%(comment[-1]))
elif isinstance(cur_node.traits[tr],list): ## lists
rangeComment=[]
for val in cur_node.traits[tr]:
if isinstance(val,str): ## string
rangeComment.append('"%s"'%(val))
elif isinstance(val,float) or isinstance(val,int): ## float or integer
rangeComment.append('%s'%(val))
comment.append('%s={%s}'%(tr,','.join(rangeComment)))
if verbose==True:
print('adding range comment %s'%(comment[-1]))
elif verbose==True:
print('trait %s unavailable for %s (%s)'%(tr,cur_node.index,cur_node.branchType))
if cur_node.branchType=='node':
if verbose==True:
print('node: %s'%(cur_node.index))
string_fragment.append('(')
traverseChildren=list(filter(traverse_condition,cur_node.children))
assert len(traverseChildren)>0,'Node %s does not have traversable children'%(cur_node.index)
for c,child in enumerate(traverseChildren): ## iterate through children of node if they satisfy traverse condition
if verbose==True:
print('moving to child %s of node %s'%(child.index,cur_node.index))
self.toString(cur_node=child,traits=traits,numName=numName,verbose=verbose,nexus=nexus,string_fragment=string_fragment,traverse_condition=traverse_condition)
if (c+1)<len(traverseChildren): ## not done with children, add comma for next iteration
string_fragment.append(',')
string_fragment.append(')') ## last child, node terminates
elif cur_node.branchType=='leaf':
if numName==False: ## if real names wanted
assert cur_node.name!=None,'Tip does not have converted name' ## assert they have been converted
treeName=cur_node.name ## designate real name
elif numName==True: ## if number names wanted
treeName=cur_node.numName ## designated numName
if verbose==True:
print('leaf: %s (%s)'%(cur_node.index,treeName))
string_fragment.append("'%s'"%(treeName))
if len(comment)>0:
if verbose==True:
print('adding comment to %s'%(cur_node.index))
comment=','.join(comment)
comment='[&'+comment+']'
string_fragment.append('%s'%(comment)) ## end of node, add annotations
if verbose==True:
print('adding branch length to %s'%(cur_node.index))
string_fragment.append(':%8f'%(cur_node.length)) ## end of node, add branch length
if cur_node==self.root:#.children[-1]:
string_fragment.append(';')
if nexus==True:
string_fragment.append('\nEnd;')
if verbose==True:
print('finished')
return ''.join(string_fragment)
def allTMRCAs(self,numName=True):
if numName==False:
assert len(self.tipMap)>0,'Tree does not have a translation dict for tip names'
tip_names=[self.tipMap[k.numName] for k in self.Objects if isinstance(k,leaf)]
else:
tip_names=[k.numName for k in self.Objects if isinstance(k,leaf)]
tmrcaMatrix={x:{y:None if x!=y else 0.0 for y in tip_names} for x in tip_names} ## pairwise matrix of tips
for k in self.getInternal():
if numName==True:
all_children=list(k.leaves) ## fetch all descendant tips of node
else:
all_children=[self.tipMap[lv] for lv in k.leaves]
for x in range(0,len(all_children)-1): ## for all pairwise comparisons of tips
for y in range(x+1,len(all_children)):
tipA=all_children[x]
tipB=all_children[y]
if tmrcaMatrix[tipA][tipB]==None or tmrcaMatrix[tipA][tipB]<=k.absoluteTime: ## if node's time is more recent than previous entry - set new TMRCA value for pair of tips
tmrcaMatrix[tipA][tipB]=k.absoluteTime
tmrcaMatrix[tipB][tipA]=k.absoluteTime
return tmrcaMatrix
def reduceTree(self,keep,verbose=False):
"""
Reduce the tree to just those tracking a small number of tips.
Returns a new baltic tree object.
"""
assert len(keep)>0,"No tips given to reduce the tree to."
assert len([k for k in keep if k.branchType!='leaf'])==0, "Embedding contains %d non-leaf branches."%(len([k for k in keep if k.branchType!='leaf']))
if verbose==True:
print("Preparing branch hash for keeping %d branches"%(len(keep)))
branch_hash={k.index:k for k in keep}
embedding=[]
if verbose==True:
print("Deep copying tree")
reduced_tree=copy.deepcopy(self) ## new tree object
for k in reduced_tree.Objects: ## deep copy branches from current tree
if k.index in branch_hash: ## if branch is designated as one to keep
cur_b=k
if verbose==True:
print("Traversing to root from %s"%(cur_b.index))
while cur_b!=reduced_tree.root: ## descend to root
if verbose==True:
print("at %s root: %s"%(cur_b.index,cur_b==reduced_tree.root))
embedding.append(cur_b) ## keep track of the path to root
cur_b=cur_b.parent
embedding.append(reduced_tree.root) ## add root to embedding
if verbose==True:
print("Finished extracting embedding")
embedding=set(embedding) ## prune down to only unique branches
reduced_tree.Objects=sorted(list(embedding),key=lambda x:x.height) ## assign branches that are kept to new tree's Objects
if verbose==True:
print("Pruning untraversed lineages")
for k in reduced_tree.getInternal(): ## iterate through reduced tree
k.children = [c for c in k.children if c in embedding] ## only keep children that are present in lineage traceback
reduced_tree.root.children=[c for c in reduced_tree.root.children if c in embedding] ## do the same for root
reduced_tree.fixHangingNodes()
if verbose==True:
print("Last traversal and branch sorting")
reduced_tree.traverse_tree() ## traverse
reduced_tree.sortBranches() ## sort
return reduced_tree ## return new tree
def countLineages(self,t,condition=lambda x:True):
return len([k for k in self.Objects if k.parent.absoluteTime<t<=k.absoluteTime and condition(k)])
def getExternal(self):
return list(filter(lambda k:k.branchType=='leaf',self.Objects))
def getInternal(self):
return list(filter(lambda k:k.branchType=='node',self.Objects))
def getBranches(self,attrs=lambda x:True):
select=list(filter(attrs,self.Objects))
if len(select)==0:
raise Exception('No branches satisfying function were found amongst branches')
elif len(select)==1:
return select[-1]
else:
return select
def fixHangingNodes(self):
"""
Remove internal nodes without any children.
"""
hangingCondition=lambda k:k.branchType=='node' and len(k.children)==0
hangingNodes=list(filter(hangingCondition,self.Objects)) ## check for nodes without any children (hanging nodes)
while len(hangingNodes)>0:
for h in sorted(hangingNodes,key=lambda x:-x.height):
h.parent.children.remove(h) ## remove old parent from grandparent's children
hangingNodes.remove(h) ## remove old parent from multitype nodes
self.Objects.remove(h) ## remove old parent from all objects
hangingNodes=list(filter(hangingCondition,self.Objects)) ## regenerate list
def addText(self,ax,target=lambda k:k.branchType=='leaf',position=lambda k:(k.x*1.01,k.y),text=lambda k:k.numName,zorder_function=lambda k: 101,**kwargs):
for k in filter(target,self.Objects):
x,y=position(k)
z=zorder_function(k)
ax.text(x,y,text(k),zorder=z,**kwargs)
return ax
def plotPoints(self,ax,x_attr=lambda k:k.height,y_attr=lambda k:k.y,target=lambda k:k.branchType=='leaf',size_function=lambda k:40,colour_function=lambda k:'k',zorder_function=lambda k: 100,**kwargs):
for k in filter(target,self.Objects):
y=y_attr(k) ## get y coordinates
x=x_attr(k) ## x coordinate
c=colour_function(k)
size=size_function(k)
z=zorder_function(k)
ax.scatter(x,y,s=size,facecolor=c,edgecolor='none',zorder=z,**kwargs) ## put a circle at each tip
return ax
def plotTree(self,ax,type='rectangular',target=lambda k: True,x_attr=lambda k:k.height,y_attr=lambda k:k.y,branchWidth=lambda k:2,colour_function=lambda f:'k',zorder_function=lambda k: 98,**kwargs):
assert type in ['rectangular','unrooted'],'Unrecognised drawing type "%s"'%(type)
for k in filter(target,self.Objects): ## iterate over branches in the tree
y=y_attr(k) ## get y coordinates
x=x_attr(k) ## x coordinate
xp=x_attr(k.parent) ## get parent's x
if xp==None:
xp=x
c=colour_function(k)
b=branchWidth(k)
z=zorder_function(k)
if type=='rectangular':
if k.branchType=='node': ## if node...
yl=y_attr(k.children[0]) ## get y coordinates of first and last child
yr=y_attr(k.children[-1])
ax.plot([x,x],[yl,yr],color=c,lw=b,zorder=z,**kwargs) ## plot vertical bar connecting node to both its offspring
ax.plot([x,xp],[y,y],color=c,lw=b,zorder=z,**kwargs) ## plot horizontal branch to parent
elif type=='unrooted':
yp=y_attr(k.parent)
ax.plot([x,xp],[y,yp],color=c,lw=b,zorder=z,**kwargs)
return ax
def make_tree(data,ll=None,verbose=False):
"""
data is a tree string, ll (LL) is an instance of a tree object
"""
if isinstance(data,str)==False: ## tree string is not an instance of string (could be unicode) - convert
data=str(data)
if ll==None: ## calling without providing a tree object - create one
ll=tree()
i=0 ## is an adjustable index along the tree string, it is incremented to advance through the string
stored_i=None ## store the i at the end of the loop, to make sure we haven't gotten stuck somewhere in an infinite loop
while i < len(data): ## while there's characters left in the tree string - loop away
if stored_i == i and verbose==True:
print('%d >%s<'%(i,data[i]))
assert (stored_i != i),'\nTree string unparseable\nStopped at >>%s<<\nstring region looks like this: %s'%(data[i],data[i:i+5000]) ## make sure that you've actually parsed something last time, if not - there's something unexpected in the tree string
stored_i=i ## store i for later
if data[i] == '(': ## look for new nodes
if verbose==True:
print('%d adding node'%(i))
ll.add_node(i) ## add node to current node in tree ll
i+=1 ## advance in tree string by one character
cerberus=re.match('(\(|,)([0-9]+)(\[|\:)',data[i-1:i+100]) ## look for tips in BEAST format (integers).
if cerberus is not None:
if verbose==True:
print('%d adding leaf (BEAST) %s'%(i,cerberus.group(2)))
ll.add_leaf(i,cerberus.group(2)) ## add tip
i+=len(cerberus.group(2)) ## advance in tree string by however many characters the tip is encoded
cerberus=re.match('(\(|,)(\'|\")*([A-Za-z\_\-\|\.0-9\?\/ ]+)(\'|\"|)(\[)*',data[i-1:i+200]) ## look for tips with unencoded names - if the tips have some unusual format you'll have to modify this
if cerberus is not None:
if verbose==True:
print('%d adding leaf (non-BEAST) %s'%(i,cerberus.group(3)))
ll.add_leaf(i,cerberus.group(3).strip('"').strip("'")) ## add tip
i+=len(cerberus.group(3))+cerberus.group().count("'")+cerberus.group().count('"') ## advance in tree string by however many characters the tip is encoded
cerberus=re.match('\)([0-9]+)\[',data[i-1:i+100]) ## look for multitype tree singletons.
if cerberus is not None:
if verbose==True:
print('%d adding multitype node %s'%(i,cerberus.group(1)))
i+=len(cerberus.group(1))
cerberus=re.match('(\:)*\[(&[A-Za-z\_\-{}\,0-9\.\%=\"\'\+!# :\/\(\)\&]+)\]',data[i:])## look for MCC comments
if cerberus is not None:
if verbose==True:
print('%d comment: %s'%(i,cerberus.group(2)))
comment=cerberus.group(2)
numerics=re.findall('[,&][A-Za-z\_\.0-9]+=[0-9\-Ee\.]+',comment) ## find all entries that have values as floats
strings=re.findall('[,&][A-Za-z\_\.0-9]+=["|\']*[A-Za-z\_0-9\.\+ :\/\(\)\&]+["|\']*',comment) ## strings
treelist=re.findall('[,&][A-Za-z\_\.0-9]+={[A-Za-z\_,{}0-9\. :\/\(\)\&]+}',comment) ## complete history logged robust counting (MCMC trees)
sets=re.findall('[,&][A-Za-z\_\.0-9\%]+={[A-Za-z\.\-0-9eE,\"\_ :\/\(\)\&]+}',comment) ## sets and ranges
figtree=re.findall('\![A-Za-z]+=[A-Za-z0-9# :\/\(\)\&]+',comment)
for vals in strings:
tr,val=vals.split('=')
tr=tr[1:]
if '+' in val:
val=val.split('+')[0] ## DO NOT ALLOW EQUIPROBABLE DOUBLE ANNOTATIONS (which are in format "A+B") - just get the first one
ll.cur_node.traits[tr]=val.strip('"')
for vals in numerics: ## assign all parsed annotations to traits of current branch
tr,val=vals.split('=') ## split each value by =, left side is name, right side is value
tr=tr[1:]
ll.cur_node.traits[tr]=float(val)
for val in treelist:
tr,val=val.split('=')
tr=tr[1:]
microcerberus=re.findall('{([0-9]+,[0-9\.\-e]+,[A-Z]+,[A-Z]+)}',val)
ll.cur_node.traits[tr]=[]
for val in microcerberus:
codon,timing,start,end=val.split(',')
ll.cur_node.traits[tr].append((int(codon),float(timing),start,end))
for vals in sets:
tr,val=vals.split('=')
tr=tr[1:]
if 'set' in tr:
ll.cur_node.traits[tr]=[]
for v in val[1:-1].split(','):
if 'set.prob' in tr:
ll.cur_node.traits[tr].append(float(v))
else:
ll.cur_node.traits[tr].append(v.strip('"'))
else:
try:
ll.cur_node.traits[tr]=list(map(float,val[1:-1].split(',')))
except:
print('some other trait: %s'%(vals))
if len(figtree)>0:
print('FigTree comment found, ignoring')
i+=len(cerberus.group()) ## advance in tree string by however many characters it took to encode labels
cerberus=re.match('([A-Za-z\_\-0-9\.]+)(\:|\;)',data[i:])## look for old school node labels
if cerberus is not None:
if verbose==True:
print('old school comment found: %s'%(cerberus.group(1)))
ll.cur_node.traits['label']=cerberus.group(1)
i+=len(cerberus.group(1))
microcerberus=re.match('(\:)*([0-9\.\-Ee]+)',data[i:i+100]) ## look for branch lengths without comments
if microcerberus is not None:
if verbose==True:
print('adding branch length (%d) %.6f'%(i,float(microcerberus.group(2))))
ll.cur_node.length=float(microcerberus.group(2)) ## set branch length of current node
i+=len(microcerberus.group()) ## advance in tree string by however many characters it took to encode branch length
if data[i] == ',' or data[i] == ')': ## look for bifurcations or clade ends
i+=1 ## advance in tree string
ll.cur_node=ll.cur_node.parent
if data[i] == ';': ## look for string end
return ll
break ## end loop
def make_treeJSON(JSONnode,json_translation,ll=None,verbose=False):
if 'children' in JSONnode: ## only nodes have children
new_node=node()
else:
new_node=leaf()
new_node.numName=JSONnode[json_translation['name']] ## set leaf numName
new_node.name=JSONnode[json_translation['name']] ## set leaf name to be the same
if ll is None:
ll=tree()
ll.root=new_node
if 'attr' in JSONnode:
attr = JSONnode.pop('attr')
JSONnode.update(attr)
new_node.parent=ll.cur_node ## set parent-child relationships
ll.cur_node.children.append(new_node)
new_node.index=JSONnode[json_translation['name']] ## indexing is based on name
new_node.traits={n:JSONnode[n] for n in list(JSONnode.keys()) if n!='children'} ## set traits to non-children attributes
ll.Objects.append(new_node)
ll.cur_node=new_node
if 'children' in JSONnode:
for child in JSONnode['children']:
make_treeJSON(child,json_translation,ll)
ll.cur_node=ll.cur_node.parent
return ll
def loadNewick(tree_path,tip_regex='\|([0-9]+\-[0-9]+\-[0-9]+)',date_fmt='%Y-%m-%d',variableDate=True,absoluteTime=True,verbose=False):
ll=None
if isinstance(tree_path,str):
handle=open(tree_path,'r')
else:
handle=tree_path
for line in handle:
l=line.strip('\n')
if '(' in l:
treeString_start=l.index('(')
ll=make_tree(l[treeString_start:],verbose=verbose) ## send tree string to make_tree function
if verbose==True:
print('Identified tree string')
assert ll,'Regular expression failed to find tree string'
ll.traverse_tree(verbose=verbose) ## traverse tree
ll.sortBranches() ## traverses tree, sorts branches, draws tree
if absoluteTime==True:
tipDates=[]
for k in ll.getExternal():
n=k.numName
k.name=k.numName
cerberus=re.search(tip_regex,n)
if cerberus is not None:
tipDates.append(decimalDate(cerberus.group(1),fmt=date_fmt,variable=variableDate))
highestTip=max(tipDates)
ll.setAbsoluteTime(highestTip)
return ll
def loadNexus(tree_path,tip_regex='\|([0-9]+\-[0-9]+\-[0-9]+)',date_fmt='%Y-%m-%d',treestring_regex='tree [A-Za-z\_]+([0-9]+)',variableDate=True,absoluteTime=True,verbose=False):
tipFlag=False
tips={}
tipNum=0
ll=None
if isinstance(tree_path,str):
handle=open(tree_path,'r')
else:
handle=tree_path
for line in handle:
l=line.strip('\n')
cerberus=re.search('dimensions ntax=([0-9]+);',l.lower())
if cerberus is not None:
tipNum=int(cerberus.group(1))
if verbose==True:
print('File should contain %d taxa'%(tipNum))
cerberus=re.search(treestring_regex,l)
if cerberus is not None:
treeString_start=l.index('(')
ll=make_tree(l[treeString_start:]) ## send tree string to make_tree function
if verbose==True:
print('Identified tree string')
if tipFlag==True:
cerberus=re.search('([0-9]+) ([A-Za-z\-\_\/\.\'0-9 \|?]+)',l)
if cerberus is not None:
tips[cerberus.group(1)]=cerberus.group(2).strip('"').strip("'")
if verbose==True:
print('Identified tip translation %s: %s'%(cerberus.group(1),tips[cerberus.group(1)]))
elif ';' not in l:
print('tip not captured by regex:',l.replace('\t',''))
if 'translate' in l.lower():
tipFlag=True
if ';' in l:
tipFlag=False
assert ll,'Regular expression failed to find tree string'
ll.traverse_tree() ## traverse tree
ll.sortBranches() ## traverses tree, sorts branches, draws tree
if len(tips)>0:
ll.renameTips(tips) ## renames tips from numbers to actual names
ll.tipMap=tips
if absoluteTime==True:
tipDates=[]
for k in ll.getExternal():
if len(tips)>0:
n=k.name
else:
n=k.numName
cerberus=re.search(tip_regex,n)
if cerberus is not None:
tipDates.append(decimalDate(cerberus.group(1),fmt=date_fmt,variable=variableDate))
highestTip=max(tipDates)
ll.setAbsoluteTime(highestTip)
return ll
def loadJSON(tree_path,json_translation={'name':'strain','absoluteTime':'num_date'},json_meta=None,verbose=False,sort=True,stats=True):
"""
Load a nextstrain JSON by providing either the path to JSON or a file handle.
json_translation is a dictionary that translates JSON attributes to baltic branch attributes (e.g. 'absoluteTime' is called 'num_date' in nextstrain JSONs).
Note that to avoid conflicts in setting node heights you can either define the absolute time of each node or branch lengths (e.g. if you want a substitution tree).
"""
assert 'name' in json_translation and ('absoluteTime' in json_translation or 'length' in json_translation),'JSON translation dictionary missing entries: %s'%(', '.join([entry for entry in ['name','height','absoluteTime','length'] if (entry in json_translation)==False]))
if verbose==True:
print('Reading JSON')
if isinstance(tree_path,str):
with open(tree_path) as json_data:
d = json.load(json_data)
ll=make_treeJSON(d,json_translation,verbose=verbose)
else:
ll=make_treeJSON(json.load(tree_path),json_translation,verbose=verbose)
assert ('absoluteTime' in json_translation and 'length' not in json_translation) or ('absoluteTime' not in json_translation and 'length' in json_translation),'Cannot use both absolute time and branch length, include only one in json_translation dictionary.'
for attr in json_translation: ## iterate through attributes in json_translation
for k in ll.Objects: ## for every branch
setattr(k,attr,k.traits[json_translation[attr]]) ## set attribute value for branch
if 'absoluteTime' in json_translation: ## if using absoluteTime need to set branch lengths for traversals
for k in ll.Objects:
if json_translation['absoluteTime'] in k.parent.traits:
k.length=k.traits[json_translation['absoluteTime']]-k.parent.traits[json_translation['absoluteTime']]
else:
k.length=0.0
if verbose==True:
print('Traversing and drawing tree')
ll.traverse_tree(verbose=verbose)
ll.drawTree()
if stats==True:
ll.treeStats() ## initial traversal, checks for stats
if sort==True:
ll.sortBranches() ## traverses tree, sorts branches, draws tree
if json_meta:
if isinstance(json_meta,str):
metadata=json.load(open(json_meta['file'],'r'))
else:
metadata=json.load(json_meta['file'])
cmap=dict(metadata['color_options'][json_meta['traitName']]['color_map'])
setattr(ll,'cmap',cmap)
return ll
if __name__ == '__main__':
import sys
ll=make_tree(sys.argv[1],ll)
ll.traverse_tree()
sys.stdout.write('%s\n'%(ll.treeHeight))
|
"""Main class for the Crownstone cloud cloud."""
import logging
import asyncio
import aiohttp
from typing import Optional
from crownstone_cloud.helpers.conversion import password_to_hash
from crownstone_cloud.cloud_models.crownstones import Crownstone
from crownstone_cloud.cloud_models.spheres import Spheres
from crownstone_cloud.helpers.requests import RequestHandler
_LOGGER = logging.getLogger(__name__)
class CrownstoneCloud:
"""Create a Crownstone cloud instance."""
def __init__(self, email: str, password: str, clientsession: aiohttp.ClientSession = None) -> None:
# Create request handler instance
self.request_handler = RequestHandler(self, clientsession)
# Instance data
self.login_data = {'email': email, 'password': password_to_hash(password)}
self.access_token: Optional[str] = None
self.cloud_data: Optional[Spheres] = None
async def async_initialize(self) -> None:
"""
Login to Crownstone API & synchronize all cloud data.
This method is a coroutine.
"""
# Login
login_response = await self.request_handler.request_login(self.login_data)
# Save access token & create cloud data object
self.access_token = login_response['id']
self.cloud_data = Spheres(self, login_response['userId'])
_LOGGER.debug("Login to Crownstone Cloud successful")
# Synchronize data
await self.async_synchronize()
async def async_synchronize(self) -> None:
"""
Sync all data from cloud.
This method is a coroutine.
"""
_LOGGER.debug("Initiating all cloud data")
# get the sphere data for this user_id
await self.cloud_data.async_update_sphere_data()
# get the data from the sphere attributes
for sphere in self.cloud_data:
await asyncio.gather(
sphere.async_update_sphere_presence(),
sphere.crownstones.async_update_crownstone_data(),
sphere.locations.async_update_location_data(),
sphere.locations.async_update_location_presence(),
sphere.users.async_update_user_data()
)
_LOGGER.debug("Cloud data successfully initialized")
def get_crownstone(self, crownstone_name) -> Crownstone:
"""
Get a crownstone by name without specifying a sphere.
:param crownstone_name: Name of the Crownstone.
:return: Crownstone object.
"""
try:
for sphere in self.cloud_data:
for crownstone in sphere.crownstones:
if crownstone.name == crownstone_name:
return crownstone
except KeyError:
_LOGGER.exception("This login_id does not exist. Use 'async_login' to login.")
except ValueError:
_LOGGER.exception("No sphere data available for this login. Use 'async_synchronize' to load user data.")
def get_crownstone_by_id(self, crownstone_id) -> Crownstone:
"""
Get a crownstone by id without specifying a sphere.
:param crownstone_id: The cloud id of the Crownstone.
:return: Crownstone object.
"""
try:
for sphere in self.cloud_data:
return sphere.crownstones[crownstone_id]
except KeyError:
_LOGGER.exception("This login_id does not exist. Use 'async_login' to login.")
except ValueError:
_LOGGER.exception("No sphere data available for this login. Use 'async_synchronize' to load user data.")
async def async_close_session(self) -> None:
"""
Close the aiohttp clientsession after all requests are done.
The session should always be closed when the program ends.
When there's an external clientsession in use, DON'T use this method.
This method is a coroutine.
"""
await self.request_handler.client_session.close()
|
# MIT License
#
# Copyright (c) 2021 willfuks
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from datetime import datetime, timedelta
from importlib import reload
import jwt as pyjwt
import pytest
from django.urls import reverse
import djwto.settings as settings
@pytest.mark.django_db
class TestURLs:
sign_key = 'test'
def test_login(self, client):
r = client.post(reverse('login'), {'username': 'alice', 'password': 'pass'})
assert r.status_code == 200
data = r.json()
assert 'refresh' in data
assert 'access' in data
def test_logout(self, client):
reload(settings)
settings.DJWTO_MODE = 'ONE-COOKIE'
settings.DJWTO_IAT_CLAIM = False
settings.DJWTO_JTI_CLAIM = True
settings.DJWTO_SIGNING_KEY = self.sign_key
expected_payload = {'jti': '2', 'user': {'username': 'alice', 'id': 1}}
expected_jwt = pyjwt.encode(expected_payload, self.sign_key)
client.cookies['jwt_refresh'] = expected_jwt
client.cookies['jwt_access'] = 'access jwt'
r = client.post(reverse('logout'), {'jwt_type': 'refresh'})
assert r.status_code == 200
assert r.content == b'{"msg": "Token successfully blacklisted."}'
def test_validate_access(self, client):
reload(settings)
settings.DJWTO_MODE = 'ONE-COOKIE'
settings.DJWTO_IAT_CLAIM = False
settings.DJWTO_JTI_CLAIM = False
settings.DJWTO_CSRF = False
settings.DJWTO_SIGNING_KEY = self.sign_key
exp = datetime.now() + timedelta(days=1)
expected_payload = {'exp': exp, 'user': {'username': 'alice', 'id': 1}}
expected_jwt = pyjwt.encode(expected_payload, self.sign_key)
client.cookies['jwt_refresh'] = 'refresh jwt'
client.cookies['jwt_access'] = expected_jwt
r = client.post(reverse('validate_access'))
assert r.status_code == 200
assert r.content == b'{"msg": "Token is valid"}'
def test_validate_refresh(self, client):
reload(settings)
settings.DJWTO_MODE = 'ONE-COOKIE'
settings.DJWTO_IAT_CLAIM = False
settings.DJWTO_JTI_CLAIM = False
settings.DJWTO_CSRF = False
settings.DJWTO_SIGNING_KEY = self.sign_key
exp = datetime.now() + timedelta(days=1)
expected_payload = {'exp': exp, 'user': {'username': 'alice', 'id': 1}}
expected_jwt = pyjwt.encode(expected_payload, self.sign_key)
client.cookies['jwt_refresh'] = expected_jwt
client.cookies['jwt_access'] = 'access jwt'
r = client.post(reverse('validate_access'), {'jwt_type': 'refresh'})
assert r.status_code == 200
assert r.content == b'{"msg": "Token is valid"}'
def test_refresh_access(self, client):
reload(settings)
settings.DJWTO_MODE = 'ONE-COOKIE'
settings.DJWTO_IAT_CLAIM = False
settings.DJWTO_JTI_CLAIM = False
settings.DJWTO_CSRF = False
settings.DJWTO_SIGNING_KEY = self.sign_key
exp = datetime.now() + timedelta(days=1)
expected_payload = {'exp': exp, 'user': {'username': 'alice', 'id': 1}}
expected_jwt = pyjwt.encode(expected_payload, self.sign_key)
client.cookies['jwt_refresh'] = expected_jwt
client.cookies['jwt_access'] = expected_jwt
r = client.post(reverse('refresh_access'), {'jwt_type': 'refresh'})
assert r.status_code == 200
assert r.content == b'{"msg": "Access token successfully refreshed."}'
def test_update_refresh(self, client):
reload(settings)
settings.DJWTO_MODE = 'ONE-COOKIE'
settings.DJWTO_IAT_CLAIM = False
settings.DJWTO_JTI_CLAIM = False
settings.DJWTO_CSRF = False
settings.DJWTO_SIGNING_KEY = self.sign_key
exp = datetime.now() + timedelta(days=1)
expected_payload = {'exp': exp, 'user': {'username': 'alice', 'id': 1}}
expected_jwt = pyjwt.encode(expected_payload, self.sign_key)
client.cookies['jwt_refresh'] = expected_jwt
client.cookies['jwt_access'] = expected_jwt
r = client.post(reverse('update_refresh'), {'jwt_type': 'refresh'})
assert r.status_code == 200
assert r.content == b'{"msg": "Refresh token successfully updated."}'
def test_update_refresh_set_False(self, client):
reload(settings)
settings.DJWTO_MODE = 'ONE-COOKIE'
settings.DJWTO_IAT_CLAIM = False
settings.DJWTO_JTI_CLAIM = False
settings.DJWTO_CSRF = False
settings.DJWTO_SIGNING_KEY = self.sign_key
exp = datetime.now() + timedelta(days=1)
expected_payload = {'exp': exp, 'user': {'username': 'alice', 'id': 1}}
expected_jwt = pyjwt.encode(expected_payload, self.sign_key)
settings.DJWTO_ALLOW_REFRESH_UPDATE = False
import djwto.urls as urls
reload(urls)
client.cookies['jwt_refresh'] = expected_jwt
client.cookies['jwt_access'] = expected_jwt
r = client.post(reverse('update_refresh'), {'jwt_type': 'refresh'})
assert r.content == b'{"error": "Can\'t update refresh token."}'
assert r.status_code == 500
|
#
# PySNMP MIB module INTELCORPORATIONBASEBOARDMAPPER-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/INTELCORPORATIONBASEBOARDMAPPER-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:43:54 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, SingleValueConstraint, ConstraintsUnion, ValueSizeConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ConstraintsUnion", "ValueSizeConstraint", "ValueRangeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Counter32, Counter64, NotificationType, ObjectIdentity, Bits, TimeTicks, Unsigned32, IpAddress, ModuleIdentity, MibIdentifier, enterprises, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, Integer32, iso = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "Counter64", "NotificationType", "ObjectIdentity", "Bits", "TimeTicks", "Unsigned32", "IpAddress", "ModuleIdentity", "MibIdentifier", "enterprises", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Integer32", "iso")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
class DmiCounter(Counter32):
pass
class DmiInteger(Integer32):
pass
class DmiInteger64(Integer32):
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(-18446744073709551615, 18446744073709551615)
class DmiOctetstring(OctetString):
pass
class DmiDisplaystring(DisplayString):
pass
class DmiDate(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(28, 28)
fixedLength = 28
class DmiComponentIndex(Integer32):
pass
intel = MibIdentifier((1, 3, 6, 1, 4, 1, 343))
products = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2))
server_management = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 10)).setLabel("server-management")
dmtfGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 10, 7))
dmtf = MibIdentifier((1, 3, 6, 1, 4, 1, 412))
dmtfStdMifs = MibIdentifier((1, 3, 6, 1, 4, 1, 412, 2))
mapperdmtfGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 412, 2, 4))
tGeneralInformation = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 1), )
if mibBuilder.loadTexts: tGeneralInformation.setStatus('mandatory')
eGeneralInformation = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 1, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eGeneralInformation.setStatus('mandatory')
a2SystemName = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 1, 1, 1), DmiDisplaystring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a2SystemName.setStatus('mandatory')
a2SystemLocation = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 1, 1, 2), DmiDisplaystring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a2SystemLocation.setStatus('mandatory')
a2SystemPrimaryUserName = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 1, 1, 3), DmiDisplaystring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a2SystemPrimaryUserName.setStatus('mandatory')
a2SystemPrimaryUserPhone = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 1, 1, 4), DmiDisplaystring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a2SystemPrimaryUserPhone.setStatus('mandatory')
a2SystemBootupTime = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 1, 1, 5), DmiDate()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2SystemBootupTime.setStatus('mandatory')
a2SystemDateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 1, 1, 6), DmiDate()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a2SystemDateTime.setStatus('mandatory')
tSystemBios = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 3), )
if mibBuilder.loadTexts: tSystemBios.setStatus('mandatory')
eSystemBios = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 3, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a4BiosIndex"))
if mibBuilder.loadTexts: eSystemBios.setStatus('mandatory')
a4BiosIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 3, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4BiosIndex.setStatus('mandatory')
a4BiosManufacturer = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 3, 1, 2), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4BiosManufacturer.setStatus('mandatory')
a4BiosVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 3, 1, 3), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4BiosVersion.setStatus('mandatory')
a4BiosRomSize = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 3, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4BiosRomSize.setStatus('mandatory')
a4BiosStartingAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 3, 1, 5), DmiInteger64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4BiosStartingAddress.setStatus('mandatory')
a4BiosEndingAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 3, 1, 6), DmiInteger64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4BiosEndingAddress.setStatus('mandatory')
a4BiosLoaderVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 3, 1, 7), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4BiosLoaderVersion.setStatus('mandatory')
a4BiosReleaseDate = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 3, 1, 8), DmiDate()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4BiosReleaseDate.setStatus('mandatory')
a4PrimaryBios = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 3, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4PrimaryBios.setStatus('mandatory')
tSystemBiosCharacteristics = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 4), )
if mibBuilder.loadTexts: tSystemBiosCharacteristics.setStatus('mandatory')
eSystemBiosCharacteristics = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 4, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a5BiosCharacteristicIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a5BiosNumber"))
if mibBuilder.loadTexts: eSystemBiosCharacteristics.setStatus('mandatory')
a5BiosCharacteristicIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 4, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5BiosCharacteristicIndex.setStatus('mandatory')
a5BiosNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 4, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5BiosNumber.setStatus('mandatory')
a5BiosCharacteristic = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 4, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 160))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vUnsupported", 3), ("vIsaSupport", 4), ("vMcaSupport", 5), ("vEisaSupport", 6), ("vPciSupport", 7), ("vPcmciaSupport", 8), ("vPnpSupport", 9), ("vApmSupport", 10), ("vUpgradeableBios", 11), ("vBiosShadowingAllowed", 12), ("vVlVesaSupport", 13), ("vEscdSupport", 14), ("vPc-98", 160)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5BiosCharacteristic.setStatus('mandatory')
a5BiosCharacteristicDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 4, 1, 4), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5BiosCharacteristicDescription.setStatus('mandatory')
tProcessor = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 5), )
if mibBuilder.loadTexts: tProcessor.setStatus('mandatory')
eProcessor = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a6ProcessorIndex"))
if mibBuilder.loadTexts: eProcessor.setStatus('mandatory')
a6ProcessorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6ProcessorIndex.setStatus('mandatory')
a6ProcessorType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vCentralProcessor", 3), ("vMathProcessor", 4), ("vDspProcessor", 5), ("vVideoProcessor", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6ProcessorType.setStatus('mandatory')
a6ProcessorFamily = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 25, 26, 32, 33, 34, 35, 36, 48, 64, 80, 96, 97, 98, 99, 100, 101, 112, 128, 144, 160, 176))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("v8086", 3), ("v80286", 4), ("v80386", 5), ("v80486", 6), ("v8087", 7), ("v80287", 8), ("v80387", 9), ("v80487", 10), ("vPentiumProcessor", 11), ("vPentiumProProcessor", 12), ("vPentiumIIProcessor", 13), ("vPentiumProcessorWithMMXTechnology", 14), ("vCeleronProcessor", 15), ("vPentiumIIXeonProcessor", 16), ("vPentiumIIIProcessor", 17), ("vM1Family", 18), ("vM2Family", 19), ("vK5Family", 25), ("vK6Family", 26), ("vPowerPcFamily", 32), ("vPowerPc601", 33), ("vPowerPc603", 34), ("vPowerPc603p", 35), ("vPowerPc604", 36), ("vAlphaFamily", 48), ("vMipsFamily", 64), ("vSparcFamily", 80), ("v68040", 96), ("v68xxxFamily", 97), ("v68000", 98), ("v68010", 99), ("v68020", 100), ("v68030", 101), ("vHobbitFamily", 112), ("vWeitek", 128), ("vPa-riscFamily", 144), ("vV30Family", 160), ("vPentiumIIIXeonProcessor", 176)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6ProcessorFamily.setStatus('mandatory')
a6ProcessorVersionInformation = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 1, 4), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6ProcessorVersionInformation.setStatus('mandatory')
a6MaximumSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 1, 5), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6MaximumSpeed.setStatus('mandatory')
a6CurrentSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6CurrentSpeed.setStatus('mandatory')
a6ProcessorUpgrade = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vDaughterBoard", 3), ("vZifSocket", 4), ("vReplacementpiggyBack", 5), ("vNone", 6), ("vLifSocket", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6ProcessorUpgrade.setStatus('mandatory')
a6FruGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6FruGroupIndex.setStatus('mandatory')
a6OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 1, 9), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6OperationalGroupIndex.setStatus('mandatory')
a6Level1CacheIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 1, 10), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6Level1CacheIndex.setStatus('mandatory')
a6Level2CacheIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 1, 11), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6Level2CacheIndex.setStatus('mandatory')
a6Level3CacheIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 1, 12), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6Level3CacheIndex.setStatus('mandatory')
a6Status = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vEnabled", 3), ("vDisabledByUser", 4), ("vDisabledByFirmware", 5), ("vIdle", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6Status.setStatus('mandatory')
tMotherboard = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 6), )
if mibBuilder.loadTexts: tMotherboard.setStatus('mandatory')
eMotherboard = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 6, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eMotherboard.setStatus('mandatory')
a7NumberOfExpansionSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 6, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a7NumberOfExpansionSlots.setStatus('mandatory')
a7FruGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 6, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a7FruGroupIndex.setStatus('mandatory')
a7OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 6, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a7OperationalGroupIndex.setStatus('mandatory')
tSystemCache = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 9), )
if mibBuilder.loadTexts: tSystemCache.setStatus('mandatory')
eSystemCache = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 9, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a10SystemCacheIndex"))
if mibBuilder.loadTexts: eSystemCache.setStatus('mandatory')
a10SystemCacheIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 9, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a10SystemCacheIndex.setStatus('mandatory')
a10SystemCacheLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 9, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vPrimary", 3), ("vSecondary", 4), ("vTertiary", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a10SystemCacheLevel.setStatus('mandatory')
a10SystemCacheSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 9, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a10SystemCacheSpeed.setStatus('mandatory')
a10SystemCacheSize = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 9, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a10SystemCacheSize.setStatus('mandatory')
a10SystemCacheWritePolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 9, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vWriteBack", 3), ("vWriteThrough", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a10SystemCacheWritePolicy.setStatus('mandatory')
a10SystemCacheErrorCorrection = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 9, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vNone", 3), ("vParity", 4), ("vSingleBitEcc", 5), ("vMultibitEcc", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a10SystemCacheErrorCorrection.setStatus('mandatory')
a10FruGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 9, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a10FruGroupIndex.setStatus('mandatory')
a10OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 9, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a10OperationalGroupIndex.setStatus('mandatory')
a10SystemCacheType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 9, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vInstruction", 3), ("vData", 4), ("vUnified", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a10SystemCacheType.setStatus('mandatory')
tPowerSupply = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 16), )
if mibBuilder.loadTexts: tPowerSupply.setStatus('mandatory')
ePowerSupply = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a17PowerSupplyIndex"))
if mibBuilder.loadTexts: ePowerSupply.setStatus('mandatory')
a17PowerSupplyIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17PowerSupplyIndex.setStatus('mandatory')
a17FruGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17FruGroupIndex.setStatus('mandatory')
a17OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17OperationalGroupIndex.setStatus('mandatory')
a17PowerUnitIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17PowerUnitIndex.setStatus('mandatory')
a17PowerSupplyType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vLinear", 3), ("vSwitching", 4), ("vBattery", 5), ("vUps", 6), ("vConverter", 7), ("vRegulator", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17PowerSupplyType.setStatus('mandatory')
a17InputVoltageCapabilityDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 6), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17InputVoltageCapabilityDescription.setStatus('mandatory')
a17Range1InputVoltageLow = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17Range1InputVoltageLow.setStatus('mandatory')
a17Range1InputVoltageHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17Range1InputVoltageHigh.setStatus('mandatory')
a17Range1VoltageProbeIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 9), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17Range1VoltageProbeIndex.setStatus('mandatory')
a17Range1ElectricalCurrentProbeIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 10), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17Range1ElectricalCurrentProbeIndex.setStatus('mandatory')
a17Range2InputVoltageLow = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 11), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17Range2InputVoltageLow.setStatus('mandatory')
a17Range2InputVoltageHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 12), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17Range2InputVoltageHigh.setStatus('mandatory')
a17Range2VoltageProbeIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 13), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17Range2VoltageProbeIndex.setStatus('mandatory')
a17Range2CurrentProbeIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 14), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17Range2CurrentProbeIndex.setStatus('mandatory')
a17ActiveInputVoltageRange = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vRange1", 3), ("vRange2", 4), ("vBoth", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17ActiveInputVoltageRange.setStatus('mandatory')
a17InputVoltageRangeSwitching = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vManual", 3), ("vAutoswitch", 4), ("vWideRange", 5), ("vNotApplicable", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17InputVoltageRangeSwitching.setStatus('mandatory')
a17Range1InputFrequencyLow = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 17), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17Range1InputFrequencyLow.setStatus('mandatory')
a17Range1InputFrequencyHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 18), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17Range1InputFrequencyHigh.setStatus('mandatory')
a17Range2InputFrequencyLow = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 19), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17Range2InputFrequencyLow.setStatus('mandatory')
a17Range2InputFrequencyHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 20), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17Range2InputFrequencyHigh.setStatus('mandatory')
a17TotalOutputPower = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 1, 21), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a17TotalOutputPower.setStatus('mandatory')
tSystemSlots = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 18), )
if mibBuilder.loadTexts: tSystemSlots.setStatus('mandatory')
eSystemSlots = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a19SlotIndex"))
if mibBuilder.loadTexts: eSystemSlots.setStatus('mandatory')
a19SlotIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a19SlotIndex.setStatus('mandatory')
a19SlotType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 1, 2), DmiInteger64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a19SlotType.setStatus('mandatory')
a19SlotWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("v8BitCard", 3), ("v16BitCard", 4), ("v32BitCard", 5), ("v64BitCard", 6), ("v128BitCard", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a19SlotWidth.setStatus('mandatory')
a19CurrentUsage = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vAvailable", 3), ("vInUse", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a19CurrentUsage.setStatus('mandatory')
a19SlotDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 1, 5), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a19SlotDescription.setStatus('mandatory')
a19SlotCategory = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vBusConnector", 3), ("vPcmciaSlot", 4), ("vMotherboard", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a19SlotCategory.setStatus('mandatory')
a19VirtualSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1), ("vUnknown", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a19VirtualSlot.setStatus('mandatory')
a19ResourceUserId = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a19ResourceUserId.setStatus('mandatory')
a19VccMixedVoltageSupport = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 1, 9), DmiInteger64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a19VccMixedVoltageSupport.setStatus('mandatory')
a19VppMixedVoltageSupport = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 1, 10), DmiInteger64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a19VppMixedVoltageSupport.setStatus('mandatory')
a19SlotThermalRating = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 1, 11), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a19SlotThermalRating.setStatus('mandatory')
a19OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 1, 12), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a19OperationalGroupIndex.setStatus('mandatory')
a19SlotPowerState = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("vUnknown", 1), ("vOff", 2), ("vOn", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a19SlotPowerState.setStatus('mandatory')
a19SlotFaultState = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("vUnknown", 1), ("vOk", 2), ("vFailed", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a19SlotFaultState.setStatus('mandatory')
a19SlotSwitchStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("vUnknown", 1), ("vClosed", 2), ("vOpened", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a19SlotSwitchStatus.setStatus('mandatory')
tCoolingUnitGlobalTable = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 67), )
if mibBuilder.loadTexts: tCoolingUnitGlobalTable.setStatus('mandatory')
eCoolingUnitGlobalTable = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 67, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a28CoolingUnitIndex"))
if mibBuilder.loadTexts: eCoolingUnitGlobalTable.setStatus('mandatory')
a28CoolingUnitIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 67, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a28CoolingUnitIndex.setStatus('mandatory')
a28CoolingUnitStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 67, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vNotApplicableUnitNotRedundant", 3), ("vOffline", 4), ("vFullyRedundant", 5), ("vDegradedRedundancy", 6), ("vRedundancyLost", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a28CoolingUnitStatus.setStatus('mandatory')
tFieldReplaceableUnit = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 29), )
if mibBuilder.loadTexts: tFieldReplaceableUnit.setStatus('mandatory')
eFieldReplaceableUnit = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 29, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a30FruIndex"))
if mibBuilder.loadTexts: eFieldReplaceableUnit.setStatus('mandatory')
a30FruIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 29, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a30FruIndex.setStatus('mandatory')
a30DeviceGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 29, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a30DeviceGroupIndex.setStatus('mandatory')
a30Description = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 29, 1, 3), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a30Description.setStatus('mandatory')
a30Manufacturer = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 29, 1, 4), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a30Manufacturer.setStatus('mandatory')
a30Model = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 29, 1, 5), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a30Model.setStatus('mandatory')
a30PartNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 29, 1, 6), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a30PartNumber.setStatus('mandatory')
a30FruSerialNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 29, 1, 7), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a30FruSerialNumber.setStatus('mandatory')
a30RevisionLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 29, 1, 8), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a30RevisionLevel.setStatus('mandatory')
a30WarrantyStartDate = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 29, 1, 9), DmiDate()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a30WarrantyStartDate.setStatus('mandatory')
a30WarrantyDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 29, 1, 10), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a30WarrantyDuration.setStatus('mandatory')
a30SupportPhoneNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 29, 1, 11), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a30SupportPhoneNumber.setStatus('mandatory')
a30FruInternetUniformResourceLocator = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 29, 1, 12), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a30FruInternetUniformResourceLocator.setStatus('mandatory')
tOperationalState = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 30), )
if mibBuilder.loadTexts: tOperationalState.setStatus('mandatory')
eOperationalState = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 30, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a31OperationalStateInstanceIndex"))
if mibBuilder.loadTexts: eOperationalState.setStatus('mandatory')
a31OperationalStateInstanceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 30, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a31OperationalStateInstanceIndex.setStatus('mandatory')
a31DeviceGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 30, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a31DeviceGroupIndex.setStatus('mandatory')
a31OperationalStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 30, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vEnabled", 3), ("vDisabled", 4), ("vNotApplicable", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a31OperationalStatus.setStatus('mandatory')
a31UsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 30, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vIdle", 3), ("vActive", 4), ("vBusy", 5), ("vNotApplicable", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a31UsageState.setStatus('mandatory')
a31AvailabilityStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 30, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vRunning", 3), ("vWarning", 4), ("vInTest", 5), ("vNotApplicable", 6), ("vPowerOff", 7), ("vOffLine", 8), ("vOffDuty", 9), ("vDegraded", 10), ("vNotInstalled", 11), ("vInstallError", 12), ("vPowerSave", 13)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a31AvailabilityStatus.setStatus('mandatory')
a31AdministrativeState = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 30, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vLocked", 3), ("vUnlocked", 4), ("vNotApplicable", 5), ("vShuttingDown", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a31AdministrativeState.setStatus('mandatory')
a31FatalErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 30, 1, 7), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a31FatalErrorCount.setStatus('mandatory')
a31MajorErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 30, 1, 8), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a31MajorErrorCount.setStatus('mandatory')
a31WarningErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 30, 1, 9), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a31WarningErrorCount.setStatus('mandatory')
a31CurrentErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 30, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vOk", 3), ("vNon-critical", 4), ("vCritical", 5), ("vNon-recoverable", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a31CurrentErrorStatus.setStatus('mandatory')
a31DevicePredictedFailureStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 30, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vNotSupportedByThisDevice", 3), ("vNoFailurePredictedByTheDevice", 4), ("vDeviceFailurePredictedByTheDevice", 5), ("vMediaFailurePredictedByTheDevice", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a31DevicePredictedFailureStatus.setStatus('mandatory')
tPhysicalMemoryArray = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 33), )
if mibBuilder.loadTexts: tPhysicalMemoryArray.setStatus('mandatory')
ePhysicalMemoryArray = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a34MemoryArrayTableIndex"))
if mibBuilder.loadTexts: ePhysicalMemoryArray.setStatus('mandatory')
a34MemoryArrayTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a34MemoryArrayTableIndex.setStatus('mandatory')
a34MemoryArrayLocation = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 16, 160, 161, 162, 163, 164))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vSystemBoardOrMotherboard", 3), ("vIsaAdd-onCard", 4), ("vEisaAdd-onCard", 5), ("vPciAdd-onCard", 6), ("vMcaAdd-onCard", 7), ("vPcmciaAdd-onCard", 8), ("vProprietaryAdd-onCard", 9), ("vNubus", 16), ("vPc-98c20Add-onCard", 160), ("vPc-98c24Add-onCard", 161), ("vPc-98eAdd-onCard", 162), ("vPc-98localBusAdd-onCard", 163), ("vPc-98cardSlotAdd-onCard", 164)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a34MemoryArrayLocation.setStatus('mandatory')
a34MemoryArrayUse = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vSystemMemory", 3), ("vVideoMemory", 4), ("vFlashMemory", 5), ("vNonVolatileRam", 6), ("vCacheMemory", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a34MemoryArrayUse.setStatus('mandatory')
a34MaximumMemoryCapacity = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a34MaximumMemoryCapacity.setStatus('mandatory')
a34NumberOfMemoryDeviceSockets = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 1, 5), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a34NumberOfMemoryDeviceSockets.setStatus('mandatory')
a34NumberOfMemoryDeviceSocketsUsed = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a34NumberOfMemoryDeviceSocketsUsed.setStatus('mandatory')
a34MemoryErrorCorrection = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vNone", 3), ("vParity", 4), ("vSingleBitEcc", 5), ("vMultibitEcc", 6), ("vCrc", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a34MemoryErrorCorrection.setStatus('mandatory')
a34ArrayErrorType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vOk", 3), ("vBadRead", 4), ("vParityError", 5), ("vSingle-bitError", 6), ("vDouble-bitError", 7), ("vMulti-bitError", 8), ("vNibbleError", 9), ("vChecksumError", 10), ("vCrcError", 11), ("vCorrectedSingle-bitError", 12), ("vCorrectedError", 13), ("vUncorrectableError", 14)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a34ArrayErrorType.setStatus('mandatory')
a34LastErrorUpdate = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vNoUpdateSinceLastInstrumentationStart", 3), ("vUpdatedFromInformationObtainedPriorToIn", 4), ("vUpdatedDuringInstrumentationRun-time", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a34LastErrorUpdate.setStatus('mandatory')
a34ErrorOperation = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vRead", 3), ("vWrite", 4), ("vPartialWrite", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a34ErrorOperation.setStatus('mandatory')
a34ErrorDataSize = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 1, 11), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a34ErrorDataSize.setStatus('mandatory')
a34ErrorData = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 1, 12), DmiOctetstring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a34ErrorData.setStatus('mandatory')
a34VendorSyndrome = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 1, 13), DmiOctetstring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a34VendorSyndrome.setStatus('mandatory')
a34ErrorAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 1, 14), DmiInteger64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a34ErrorAddress.setStatus('mandatory')
a34ErrorResolution = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 1, 15), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a34ErrorResolution.setStatus('mandatory')
a34FruGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 1, 16), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a34FruGroupIndex.setStatus('mandatory')
a34OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 1, 17), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a34OperationalGroupIndex.setStatus('mandatory')
tMemoryArrayMappedAddresses = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 34), )
if mibBuilder.loadTexts: tMemoryArrayMappedAddresses.setStatus('mandatory')
eMemoryArrayMappedAddresses = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 34, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a35MemoryArrayMappedAddressesTableIndex"))
if mibBuilder.loadTexts: eMemoryArrayMappedAddresses.setStatus('mandatory')
a35MemoryArrayMappedAddressesTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 34, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a35MemoryArrayMappedAddressesTableIndex.setStatus('mandatory')
a35MemoryArrayIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 34, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a35MemoryArrayIndex.setStatus('mandatory')
a35MappedRangeStartingAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 34, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a35MappedRangeStartingAddress.setStatus('mandatory')
a35MappedRangeEndingAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 34, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a35MappedRangeEndingAddress.setStatus('mandatory')
a35PartitionId = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 34, 1, 5), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a35PartitionId.setStatus('mandatory')
a35PartitionWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 34, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a35PartitionWidth.setStatus('mandatory')
a35OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 34, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a35OperationalGroupIndex.setStatus('mandatory')
tMemoryDevice = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 35), )
if mibBuilder.loadTexts: tMemoryDevice.setStatus('mandatory')
eMemoryDevice = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a36MemoryDeviceTableIndex"))
if mibBuilder.loadTexts: eMemoryDevice.setStatus('mandatory')
a36MemoryDeviceTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36MemoryDeviceTableIndex.setStatus('mandatory')
a36MemoryArrayIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36MemoryArrayIndex.setStatus('mandatory')
a36DeviceLocator = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 3), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36DeviceLocator.setStatus('mandatory')
a36BankLocator = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 4), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36BankLocator.setStatus('mandatory')
a36Size = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 5), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36Size.setStatus('mandatory')
a36FormFactor = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vSimm", 3), ("vSip", 4), ("vChip", 5), ("vDip", 6), ("vZip", 7), ("vProprietaryCard", 8), ("vDimm", 9), ("vTsop", 10), ("vRowOfChips", 11), ("vRimm", 12), ("vSodimm", 13)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36FormFactor.setStatus('mandatory')
a36TotalWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36TotalWidth.setStatus('mandatory')
a36DataWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36DataWidth.setStatus('mandatory')
a36MemoryType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vDram", 3), ("vEdram", 4), ("vVram", 5), ("vSram", 6), ("vRam", 7), ("vRom", 8), ("vFlash", 9), ("vEeprom", 10), ("vFeprom", 11), ("vEprom", 12), ("vCdram", 13), ("v3dram", 14), ("vSdram", 15), ("vSgram", 16)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36MemoryType.setStatus('mandatory')
a36TypeDetail = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vFastPaged", 3), ("vStaticColumn", 4), ("vPseudo-static", 5), ("vRambus", 6), ("vSynchronous", 7), ("vCmos", 8), ("vEdo", 9), ("vWindowDram", 10), ("vCacheDram", 11), ("vNon-volatile", 12)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36TypeDetail.setStatus('mandatory')
a36DeviceSet = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 11), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36DeviceSet.setStatus('mandatory')
a36DeviceErrorType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vOk", 3), ("vBadRead", 4), ("vParityError", 5), ("vSingle-bitError", 6), ("vDouble-bitError", 7), ("vMulti-bitError", 8), ("vNibbleError", 9), ("vChecksumError", 10), ("vCrcError", 11), ("vCorrectedSingle-bitError", 12), ("vCorrectedError", 13), ("vUncorrectableError", 14)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36DeviceErrorType.setStatus('mandatory')
a36ErrorGranularity = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vDeviceLevel", 3), ("vMemoryPartitionLevel", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36ErrorGranularity.setStatus('mandatory')
a36LastErrorUpdate = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vNoUpdateSinceLastInstrumentationStart", 3), ("vUpdatedFromInformationObtainedPriorToIn", 4), ("vUpdatedDuringInstrumentationRun-time", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36LastErrorUpdate.setStatus('mandatory')
a36ErrorOperation = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vRead", 3), ("vWrite", 4), ("vPartialWrite", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36ErrorOperation.setStatus('mandatory')
a36ErrorDataSize = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 16), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36ErrorDataSize.setStatus('mandatory')
a36ErrorData = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 17), DmiOctetstring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36ErrorData.setStatus('mandatory')
a36VendorSyndrome = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 18), DmiOctetstring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36VendorSyndrome.setStatus('mandatory')
a36DeviceErrorAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 19), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36DeviceErrorAddress.setStatus('mandatory')
a36ArrayErrorAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 20), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36ArrayErrorAddress.setStatus('mandatory')
a36ErrorResolution = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 21), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36ErrorResolution.setStatus('mandatory')
a36FruGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 22), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36FruGroupIndex.setStatus('mandatory')
a36OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 35, 1, 23), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a36OperationalGroupIndex.setStatus('mandatory')
tMemoryDeviceMappedAddresses = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 36), )
if mibBuilder.loadTexts: tMemoryDeviceMappedAddresses.setStatus('mandatory')
eMemoryDeviceMappedAddresses = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 36, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a37MemoryDeviceMappedAddressesTableIndex"))
if mibBuilder.loadTexts: eMemoryDeviceMappedAddresses.setStatus('mandatory')
a37MemoryDeviceMappedAddressesTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 36, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a37MemoryDeviceMappedAddressesTableIndex.setStatus('mandatory')
a37MemoryDeviceSetId = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 36, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a37MemoryDeviceSetId.setStatus('mandatory')
a37Partition = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 36, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a37Partition.setStatus('mandatory')
a37MappedRangeStartingAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 36, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a37MappedRangeStartingAddress.setStatus('mandatory')
a37MappedRangeEndingAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 36, 1, 5), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a37MappedRangeEndingAddress.setStatus('mandatory')
a37PartitionRowPosition = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 36, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a37PartitionRowPosition.setStatus('mandatory')
a37InterleavePosition = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 36, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a37InterleavePosition.setStatus('mandatory')
a37DataDepth = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 36, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a37DataDepth.setStatus('mandatory')
tSystemHardwareSecurity = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 49), )
if mibBuilder.loadTexts: tSystemHardwareSecurity.setStatus('mandatory')
eSystemHardwareSecurity = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 49, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eSystemHardwareSecurity.setStatus('mandatory')
a50Power_onPasswordStatus = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 49, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vDisabled", 3), ("vEnabled", 4), ("vNotImplemented", 5)))).setLabel("a50Power-onPasswordStatus").setMaxAccess("readonly")
if mibBuilder.loadTexts: a50Power_onPasswordStatus.setStatus('mandatory')
a50KeyboardPasswordStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 49, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vDisabled", 3), ("vEnabled", 4), ("vNotImplemented", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a50KeyboardPasswordStatus.setStatus('mandatory')
a50AdministratorPasswordStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 49, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vDisabled", 3), ("vEnabled", 4), ("vNotImplemented", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a50AdministratorPasswordStatus.setStatus('mandatory')
a50FrontPanelResetStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 49, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vDisabled", 3), ("vEnabled", 4), ("vNotImplemented", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a50FrontPanelResetStatus.setStatus('mandatory')
tSystemPowerControls = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 51), )
if mibBuilder.loadTexts: tSystemPowerControls.setStatus('mandatory')
eSystemPowerControls = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 51, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eSystemPowerControls.setStatus('mandatory')
a52PowerControlRequest = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 51, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vPowerOff", 3), ("vPowerOffThenOnAgain", 4), ("vEnterStandbyMode", 5), ("vEnterSuspendMode", 6), ("vEnterHibernationMode", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a52PowerControlRequest.setStatus('mandatory')
a52TimedPower_onAvailable = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 51, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1), ("vUnknown", 2)))).setLabel("a52TimedPower-onAvailable").setMaxAccess("readonly")
if mibBuilder.loadTexts: a52TimedPower_onAvailable.setStatus('mandatory')
a52TimeToNextScheduledPower_on = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 51, 1, 3), DmiInteger()).setLabel("a52TimeToNextScheduledPower-on").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a52TimeToNextScheduledPower_on.setStatus('mandatory')
tVoltageProbe = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 53), )
if mibBuilder.loadTexts: tVoltageProbe.setStatus('mandatory')
eVoltageProbe = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a54VoltageProbeIndex"))
if mibBuilder.loadTexts: eVoltageProbe.setStatus('mandatory')
a54VoltageProbeIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a54VoltageProbeIndex.setStatus('mandatory')
a54VoltageProbeLocation = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vProcessor", 3), ("vDisk", 4), ("vPeripheralBay", 5), ("vSystemManagementModule", 6), ("vMotherboard", 7), ("vMemoryModule", 8), ("vProcessorModule", 9), ("vPowerUnit", 10), ("vAdd-inCard", 11)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a54VoltageProbeLocation.setStatus('mandatory')
a54VoltageProbeDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 3), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a54VoltageProbeDescription.setStatus('mandatory')
a54VoltageStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vOk", 3), ("vNon-critical", 4), ("vCritical", 5), ("vNon-recoverable", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a54VoltageStatus.setStatus('mandatory')
a54VoltageProbeVoltageLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 5), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a54VoltageProbeVoltageLevel.setStatus('mandatory')
a54MonitoredVoltageNominalLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a54MonitoredVoltageNominalLevel.setStatus('mandatory')
a54MonitoredVoltageNormalMaximum = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a54MonitoredVoltageNormalMaximum.setStatus('mandatory')
a54MonitoredVoltageNormalMinimum = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a54MonitoredVoltageNormalMinimum.setStatus('mandatory')
a54VoltageProbeMaximum = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 9), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a54VoltageProbeMaximum.setStatus('mandatory')
a54VoltageProbeMinimum = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 10), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a54VoltageProbeMinimum.setStatus('mandatory')
a54VoltageLevelLowerThreshold_Non_critic = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 11), DmiInteger()).setLabel("a54VoltageLevelLowerThreshold-Non-critic").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a54VoltageLevelLowerThreshold_Non_critic.setStatus('mandatory')
a54VoltageLevelUpperThreshold_Non_critic = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 12), DmiInteger()).setLabel("a54VoltageLevelUpperThreshold-Non-critic").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a54VoltageLevelUpperThreshold_Non_critic.setStatus('mandatory')
a54VoltageLevelLowerThreshold_Critical = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 13), DmiInteger()).setLabel("a54VoltageLevelLowerThreshold-Critical").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a54VoltageLevelLowerThreshold_Critical.setStatus('mandatory')
a54VoltageLevelUpperThreshold_Critical = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 14), DmiInteger()).setLabel("a54VoltageLevelUpperThreshold-Critical").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a54VoltageLevelUpperThreshold_Critical.setStatus('mandatory')
a54VoltageLevelLowerThreshold_Non_recove = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 15), DmiInteger()).setLabel("a54VoltageLevelLowerThreshold-Non-recove").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a54VoltageLevelLowerThreshold_Non_recove.setStatus('mandatory')
a54VoltageLevelUpperThreshold_Non_recove = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 16), DmiInteger()).setLabel("a54VoltageLevelUpperThreshold-Non-recove").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a54VoltageLevelUpperThreshold_Non_recove.setStatus('mandatory')
a54VoltageProbeResolution = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 17), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a54VoltageProbeResolution.setStatus('mandatory')
a54VoltageProbeTolerance = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 18), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a54VoltageProbeTolerance.setStatus('mandatory')
a54VoltageProbeAccuracy = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 19), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a54VoltageProbeAccuracy.setStatus('mandatory')
a54FruGroupIndex = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 20), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a54FruGroupIndex.setStatus('mandatory')
a54OperationalGroupIndex = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 1, 21), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a54OperationalGroupIndex.setStatus('mandatory')
tTemperatureProbe = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 54), )
if mibBuilder.loadTexts: tTemperatureProbe.setStatus('mandatory')
eTemperatureProbe = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a55TemperatureProbeTableIndex"))
if mibBuilder.loadTexts: eTemperatureProbe.setStatus('mandatory')
a55TemperatureProbeTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a55TemperatureProbeTableIndex.setStatus('mandatory')
a55TemperatureProbeLocation = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vProcessor", 3), ("vDisk", 4), ("vPeripheralBay", 5), ("vSmbMaster", 6), ("vMotherboard", 7), ("vMemoryModule", 8), ("vProcessorModule", 9), ("vPowerUnit", 10), ("vAdd-inCard", 11), ("vFrontPanelBoard", 12), ("vBackPanelBoard", 13), ("vPowerSystemBoard", 14), ("vDriveBackPlane", 15)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a55TemperatureProbeLocation.setStatus('mandatory')
a55TemperatureProbeDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 3), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a55TemperatureProbeDescription.setStatus('mandatory')
a55TemperatureStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vOk", 3), ("vNon-critical", 4), ("vCritical", 5), ("vNon-recoverable", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a55TemperatureStatus.setStatus('mandatory')
a55TemperatureProbeTemperatureReading = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 5), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a55TemperatureProbeTemperatureReading.setStatus('mandatory')
a55MonitoredTemperatureNominalReading = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a55MonitoredTemperatureNominalReading.setStatus('mandatory')
a55MonitoredTemperatureNormalMaximum = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a55MonitoredTemperatureNormalMaximum.setStatus('mandatory')
a55MonitoredTemperatureNormalMinimum = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a55MonitoredTemperatureNormalMinimum.setStatus('mandatory')
a55TemperatureProbeMaximum = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 9), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a55TemperatureProbeMaximum.setStatus('mandatory')
a55TemperatureProbeMinimum = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 10), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a55TemperatureProbeMinimum.setStatus('mandatory')
a55TemperatureLowerThreshold_Non_critica = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 11), DmiInteger()).setLabel("a55TemperatureLowerThreshold-Non-critica").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a55TemperatureLowerThreshold_Non_critica.setStatus('mandatory')
a55TemperatureUpperThreshold_Non_critica = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 12), DmiInteger()).setLabel("a55TemperatureUpperThreshold-Non-critica").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a55TemperatureUpperThreshold_Non_critica.setStatus('mandatory')
a55TemperatureLowerThreshold_Critical = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 13), DmiInteger()).setLabel("a55TemperatureLowerThreshold-Critical").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a55TemperatureLowerThreshold_Critical.setStatus('mandatory')
a55TemperatureUpperThreshold_Critical = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 14), DmiInteger()).setLabel("a55TemperatureUpperThreshold-Critical").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a55TemperatureUpperThreshold_Critical.setStatus('mandatory')
a55TemperatureLowerThreshold_Non_recover = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 15), DmiInteger()).setLabel("a55TemperatureLowerThreshold-Non-recover").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a55TemperatureLowerThreshold_Non_recover.setStatus('mandatory')
a55TemperatureUpperThreshold_Non_recover = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 16), DmiInteger()).setLabel("a55TemperatureUpperThreshold-Non-recover").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a55TemperatureUpperThreshold_Non_recover.setStatus('mandatory')
a55TemperatureProbeResolution = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 17), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a55TemperatureProbeResolution.setStatus('mandatory')
a55TemperatureProbeTolerance = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 18), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a55TemperatureProbeTolerance.setStatus('mandatory')
a55TemperatureProbeAccuracy = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 19), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a55TemperatureProbeAccuracy.setStatus('mandatory')
a55FruGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 20), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a55FruGroupIndex.setStatus('mandatory')
a55OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 1, 21), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a55OperationalGroupIndex.setStatus('mandatory')
tPhysicalContainerGlobalTable = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 63), )
if mibBuilder.loadTexts: tPhysicalContainerGlobalTable.setStatus('mandatory')
ePhysicalContainerGlobalTable = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 63, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a64ContainerIndex"))
if mibBuilder.loadTexts: ePhysicalContainerGlobalTable.setStatus('mandatory')
a64ContainerOrChassisType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 63, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vDesktop", 3), ("vLowProfileDesktop", 4), ("vPizzaBox", 5), ("vMiniTower", 6), ("vTower", 7), ("vPortable", 8), ("vLaptop", 9), ("vNotebook", 10), ("vHandHeld", 11), ("vDockingStation", 12), ("vAllInOne", 13), ("vSubNotebook", 14), ("vSpace-saving", 15), ("vLunchBox", 16), ("vMainSystemChassis", 17), ("vExpansionChassis", 18), ("vSubchassis", 19), ("vBusExpansionChassis", 20), ("vPeripheralChassis", 21), ("vRaidChassis", 22), ("vRackMountChassis", 23)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a64ContainerOrChassisType.setStatus('mandatory')
a64AssetTag = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 63, 1, 2), DmiDisplaystring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a64AssetTag.setStatus('mandatory')
a64ChassisLockPresent = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 63, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a64ChassisLockPresent.setStatus('mandatory')
a64BootupState = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 63, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vOk", 3), ("vWarning", 4), ("vCritical", 5), ("vNon-recoverable1", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a64BootupState.setStatus('mandatory')
a64PowerState = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 63, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vOk", 3), ("vWarning", 4), ("vCritical", 5), ("vNon-recoverable", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a64PowerState.setStatus('mandatory')
a64ThermalState = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 63, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vOk", 3), ("vWarning", 4), ("vCritical", 5), ("vNon-recoverable", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a64ThermalState.setStatus('mandatory')
a64FruGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 63, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a64FruGroupIndex.setStatus('mandatory')
a64OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 63, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a64OperationalGroupIndex.setStatus('mandatory')
a64ContainerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 63, 1, 9), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a64ContainerIndex.setStatus('mandatory')
a64ContainerName = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 63, 1, 10), DmiDisplaystring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a64ContainerName.setStatus('mandatory')
a64ContainerLocation = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 63, 1, 11), DmiDisplaystring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a64ContainerLocation.setStatus('mandatory')
a64ContainerSecurityStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 63, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vNoSecurityBreachDetected", 3), ("vContainerSecurityBreachAttempted", 4), ("vContainerSecurityBreached", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a64ContainerSecurityStatus.setStatus('mandatory')
tOperatingSystem = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 2), )
if mibBuilder.loadTexts: tOperatingSystem.setStatus('mandatory')
eOperatingSystem = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 2, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a66OperatingSystemIndex"))
if mibBuilder.loadTexts: eOperatingSystem.setStatus('mandatory')
a66OperatingSystemIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 2, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a66OperatingSystemIndex.setStatus('mandatory')
a66OperatingSystemName = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 2, 1, 2), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a66OperatingSystemName.setStatus('mandatory')
a66OperatingSystemVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 2, 1, 3), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a66OperatingSystemVersion.setStatus('mandatory')
a66PrimaryOperatingSystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a66PrimaryOperatingSystem.setStatus('mandatory')
a66OperatingSystemBootDeviceStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vHardDisk", 3), ("vFloppyDisk", 4), ("vOpticalRom", 5), ("vOpticalWorm", 6), ("vOpticalRw", 7), ("vCompactDisk", 8), ("vFlashDisk", 9), ("vBernoulli", 10), ("vOpticalFloppyDisk", 11)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a66OperatingSystemBootDeviceStorageType.setStatus('mandatory')
a66OperatingSystemBootDeviceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 2, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a66OperatingSystemBootDeviceIndex.setStatus('mandatory')
a66OperatingSystemBootPartitionIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 2, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a66OperatingSystemBootPartitionIndex.setStatus('mandatory')
a66OperatingSystemDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 2, 1, 8), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a66OperatingSystemDescription.setStatus('mandatory')
tPowerUnitGlobalTable = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 66), )
if mibBuilder.loadTexts: tPowerUnitGlobalTable.setStatus('mandatory')
ePowerUnitGlobalTable = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 66, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a67PowerUnitIndex"))
if mibBuilder.loadTexts: ePowerUnitGlobalTable.setStatus('mandatory')
a67PowerUnitIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 66, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a67PowerUnitIndex.setStatus('mandatory')
a67PowerUnitRedundancyStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 66, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vNotApplicableUnitNotRedundant", 3), ("vOffline", 4), ("vFullyRedundant", 5), ("vDegradedRedundancy", 6), ("vRedundancyLost", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a67PowerUnitRedundancyStatus.setStatus('mandatory')
tParallelPorts = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 10), )
if mibBuilder.loadTexts: tParallelPorts.setStatus('mandatory')
eParallelPorts = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 10, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a74ParallelPortIndex"))
if mibBuilder.loadTexts: eParallelPorts.setStatus('mandatory')
a74ParallelPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 10, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a74ParallelPortIndex.setStatus('mandatory')
a74ParallelBaseIoAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 10, 1, 2), DmiInteger64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a74ParallelBaseIoAddress.setStatus('mandatory')
a74IrqUsed = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 10, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a74IrqUsed.setStatus('mandatory')
a74LogicalName = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 10, 1, 4), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a74LogicalName.setStatus('mandatory')
a74ConnectorType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 10, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 160, 161, 162))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vDb-25Female", 3), ("vDb-25Male", 4), ("vCentronics", 5), ("vMini-centronics", 6), ("vProprietary", 7), ("vCentronics-14", 160), ("vDb-36Female", 161), ("vMini-centronics-20", 162)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a74ConnectorType.setStatus('mandatory')
a74ConnectorPinout = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 10, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 160, 161, 162, 163, 164))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vXtat", 3), ("vPs2", 4), ("vIeee1284", 5), ("vProprietary", 6), ("vPc-98", 160), ("vPc-98-hireso", 161), ("vPc-h98", 162), ("vPc-98note", 163), ("vPc-98full", 164)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a74ConnectorPinout.setStatus('mandatory')
a74DmaSupport = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 10, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a74DmaSupport.setStatus('mandatory')
a74ParallelPortCapabilities = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 10, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a74ParallelPortCapabilities.setStatus('mandatory')
a74OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 10, 1, 9), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a74OperationalGroupIndex.setStatus('mandatory')
a74ParallelPortSecuritySettings = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 10, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vNone", 3), ("vExternalInterfaceLockedOut", 4), ("vExternalInterfaceEnabled", 5), ("vBoot-bypass", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a74ParallelPortSecuritySettings.setStatus('mandatory')
tSerialPorts = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 11), )
if mibBuilder.loadTexts: tSerialPorts.setStatus('mandatory')
eSerialPorts = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 11, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a75SerialPortIndex"))
if mibBuilder.loadTexts: eSerialPorts.setStatus('mandatory')
a75SerialPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 11, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a75SerialPortIndex.setStatus('mandatory')
a75SerialBaseIoAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 11, 1, 2), DmiInteger64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a75SerialBaseIoAddress.setStatus('mandatory')
a75IrqUsed = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 11, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a75IrqUsed.setStatus('mandatory')
a75LogicalName = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 11, 1, 4), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a75LogicalName.setStatus('mandatory')
a75ConnectorType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 11, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 160, 161))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vDb-9PinMale", 3), ("vDb-9PinFemale", 4), ("vDb-25PinMale", 5), ("vDb-25PinFemale", 6), ("vRj-11", 7), ("vRj-45", 8), ("vProprietary", 9), ("vCircularDin-8Male", 10), ("vCircularDin-8Female", 11), ("vMini-centronicsType-14", 160), ("vMini-centronicsType-26", 161)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a75ConnectorType.setStatus('mandatory')
a75MaximumSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 11, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a75MaximumSpeed.setStatus('mandatory')
a75SerialPortCapabilities = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 11, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 160, 161))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vXtatCompatible", 3), ("v16450Compatible", 4), ("v16550Compatible", 5), ("v16550aCompatible", 6), ("v8251Compatible", 160), ("v8251fifoCompatible", 161)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a75SerialPortCapabilities.setStatus('mandatory')
a75OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 11, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a75OperationalGroupIndex.setStatus('mandatory')
a75SerialPortSecuritySettings = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 11, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vNone", 3), ("vExternalInterfaceLockedOut", 4), ("vExternalInterfaceEnabled", 5), ("vBoot-bypass", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a75SerialPortSecuritySettings.setStatus('mandatory')
tCoolingDevice = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 17), )
if mibBuilder.loadTexts: tCoolingDevice.setStatus('mandatory')
eCoolingDevice = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 17, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a81CoolingDeviceTableIndex"))
if mibBuilder.loadTexts: eCoolingDevice.setStatus('mandatory')
a81CoolingDeviceTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 17, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a81CoolingDeviceTableIndex.setStatus('mandatory')
a81FruGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 17, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a81FruGroupIndex.setStatus('mandatory')
a81OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 17, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a81OperationalGroupIndex.setStatus('mandatory')
a81CoolingUnitIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 17, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a81CoolingUnitIndex.setStatus('mandatory')
a81CoolingDeviceType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 17, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 32, 33))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vFan", 3), ("vCentrifugalBlower", 4), ("vChipFan", 5), ("vCabinetFan", 6), ("vPowerSupplyFan", 7), ("vHeatPipe", 8), ("vIntegratedRefrigeration", 9), ("vActiveCooling", 32), ("vPassiveCooling", 33)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a81CoolingDeviceType.setStatus('mandatory')
a81TemperatureProbeIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 17, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a81TemperatureProbeIndex.setStatus('mandatory')
tVideo = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 19), )
if mibBuilder.loadTexts: tVideo.setStatus('mandatory')
eVideo = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 19, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a83VideoIndex"))
if mibBuilder.loadTexts: eVideo.setStatus('mandatory')
a83VideoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 19, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a83VideoIndex.setStatus('mandatory')
a83VideoType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 19, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 160))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vCga", 3), ("vEga", 4), ("vVga", 5), ("vSvga", 6), ("vMda", 7), ("vHgc", 8), ("vMcga", 9), ("v8514a", 10), ("vXga", 11), ("vLinearFrameBuffer", 12), ("vPc-98", 160)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a83VideoType.setStatus('mandatory')
a83CurrentVideoMode = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 19, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a83CurrentVideoMode.setStatus('mandatory')
a83MinimumRefreshRate = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 19, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a83MinimumRefreshRate.setStatus('mandatory')
a83MaximumRefreshRate = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 19, 1, 5), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a83MaximumRefreshRate.setStatus('mandatory')
a83VideoMemoryType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 19, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vVram", 3), ("vDram", 4), ("vSram", 5), ("vWram", 6), ("vEdoRam", 7), ("vBurstSynchronousDram", 8), ("vPipelinedBurstSram", 9)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a83VideoMemoryType.setStatus('mandatory')
a83VideoRamMemorySize = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 19, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a83VideoRamMemorySize.setStatus('mandatory')
a83ScanMode = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 19, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vInterlaced", 3), ("vNonInterlaced", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a83ScanMode.setStatus('mandatory')
a83VideoPhysicalLocation = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 19, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vIntegrated", 3), ("vAdd-onCard", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a83VideoPhysicalLocation.setStatus('mandatory')
a83CurrentVerticalResolution = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 19, 1, 10), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a83CurrentVerticalResolution.setStatus('mandatory')
a83CurrentHorizontalResolution = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 19, 1, 11), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a83CurrentHorizontalResolution.setStatus('mandatory')
a83CurrentNumberOfBitsPerPixel = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 19, 1, 12), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a83CurrentNumberOfBitsPerPixel.setStatus('mandatory')
a83CurrentNumberOfRows = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 19, 1, 13), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a83CurrentNumberOfRows.setStatus('mandatory')
a83CurrentNumberOfColumns = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 19, 1, 14), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a83CurrentNumberOfColumns.setStatus('mandatory')
a83CurrentRefreshRate = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 19, 1, 15), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a83CurrentRefreshRate.setStatus('mandatory')
a83FruGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 19, 1, 16), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a83FruGroupIndex.setStatus('mandatory')
a83OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 19, 1, 17), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a83OperationalGroupIndex.setStatus('mandatory')
tVideoBios = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 20), )
if mibBuilder.loadTexts: tVideoBios.setStatus('mandatory')
eVideoBios = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 20, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a84VideoBiosIndex"))
if mibBuilder.loadTexts: eVideoBios.setStatus('mandatory')
a84VideoBiosIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 20, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a84VideoBiosIndex.setStatus('mandatory')
a84VideoBiosManufacturer = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 20, 1, 2), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a84VideoBiosManufacturer.setStatus('mandatory')
a84VideoBiosVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 20, 1, 3), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a84VideoBiosVersion.setStatus('mandatory')
a84VideoBiosReleaseDate = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 20, 1, 4), DmiDate()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a84VideoBiosReleaseDate.setStatus('mandatory')
a84VideoBiosShadowingState = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 20, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a84VideoBiosShadowingState.setStatus('mandatory')
tMouse = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 27), )
if mibBuilder.loadTexts: tMouse.setStatus('mandatory')
eMouse = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 27, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eMouse.setStatus('mandatory')
a91MouseInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 27, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 160, 161))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vSerial", 3), ("vPs2", 4), ("vInfrared", 5), ("vHp-hil", 6), ("vBusMouse", 7), ("vAdb", 8), ("vBusMouseDb-9", 160), ("vBusMouseMicro-din", 161)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a91MouseInterface.setStatus('mandatory')
a91MouseIrq = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 27, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a91MouseIrq.setStatus('mandatory')
a91MouseButtons = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 27, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a91MouseButtons.setStatus('mandatory')
a91MousePortName = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 27, 1, 4), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a91MousePortName.setStatus('mandatory')
a91MouseDriverName = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 27, 1, 5), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a91MouseDriverName.setStatus('mandatory')
a91MouseDriverVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 27, 1, 6), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a91MouseDriverVersion.setStatus('mandatory')
a91FruGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 27, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a91FruGroupIndex.setStatus('mandatory')
a91OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 27, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a91OperationalGroupIndex.setStatus('mandatory')
a91SecuritySettings = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 27, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vNone", 3), ("vExternalInterfaceLockedOut", 4), ("vExternalInterfaceEnabled", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a91SecuritySettings.setStatus('mandatory')
tKeyboard = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 28), )
if mibBuilder.loadTexts: tKeyboard.setStatus('mandatory')
eKeyboard = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 28, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eKeyboard.setStatus('mandatory')
a92KeyboardLayout = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 28, 1, 1), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a92KeyboardLayout.setStatus('mandatory')
a92KeyboardType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 28, 1, 2), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a92KeyboardType.setStatus('mandatory')
a92KeyboardConnectorType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 28, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 160))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vMini-din", 3), ("vMicro-din", 4), ("vPs2", 5), ("vInfrared", 6), ("vHp-hil", 7), ("vDb-9", 8), ("vAccessBus", 9), ("vPc-98", 160)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a92KeyboardConnectorType.setStatus('mandatory')
a92FruGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 28, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a92FruGroupIndex.setStatus('mandatory')
a92OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 28, 1, 5), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a92OperationalGroupIndex.setStatus('mandatory')
a92SecuritySettings = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 28, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vNone", 3), ("vExternalInterfaceLockedOut", 4), ("vExternalInterfaceEnabled", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a92SecuritySettings.setStatus('mandatory')
tEventGenerationForProcessor = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 100), )
if mibBuilder.loadTexts: tEventGenerationForProcessor.setStatus('mandatory')
eEventGenerationForProcessor = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 100, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100AssociatedGroup"))
if mibBuilder.loadTexts: eEventGenerationForProcessor.setStatus('mandatory')
a100EventType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 100, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(256, 257, 258, 259))).clone(namedValues=NamedValues(("vProcessorInternalError", 256), ("vProcessorThermalTrip", 257), ("vProcessorFrb-3Failure", 258), ("vProcessorDisabled", 259)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a100EventType.setStatus('mandatory')
a100EventSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 100, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4, 8, 16, 32))).clone(namedValues=NamedValues(("vMonitor", 1), ("vInformation", 2), ("vOk", 4), ("vNon-critical", 8), ("vCritical", 16), ("vNon-recoverable", 32)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a100EventSeverity.setStatus('mandatory')
a100IsEventState_based = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 100, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setLabel("a100IsEventState-based").setMaxAccess("readonly")
if mibBuilder.loadTexts: a100IsEventState_based.setStatus('mandatory')
a100EventStateKey = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 100, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a100EventStateKey.setStatus('mandatory')
a100AssociatedGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 100, 1, 5), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a100AssociatedGroup.setStatus('mandatory')
a100EventSystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 100, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a100EventSystem.setStatus('mandatory')
a100EventSubsystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 100, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a100EventSubsystem.setStatus('mandatory')
a100IsInstanceDataPresent = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 100, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a100IsInstanceDataPresent.setStatus('mandatory')
a100EventMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 100, 1, 10), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a100EventMessage.setStatus('mandatory')
tEventGenerationForPowerSupply = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 104), )
if mibBuilder.loadTexts: tEventGenerationForPowerSupply.setStatus('mandatory')
eEventGenerationForPowerSupply = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 104, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104AssociatedGroup"))
if mibBuilder.loadTexts: eEventGenerationForPowerSupply.setStatus('mandatory')
a104EventType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 104, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(256, 257, 258, 259, 260))).clone(namedValues=NamedValues(("vPowerSupplyFailed", 256), ("vPowerSupplyOk", 257), ("vPowerSupplyLikelyToFail", 258), ("vPowerSupplyInserted", 259), ("vPowerSupplyRemoved", 260)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a104EventType.setStatus('mandatory')
a104EventSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 104, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4, 8, 16, 32))).clone(namedValues=NamedValues(("vMonitor", 1), ("vInformation", 2), ("vOk", 4), ("vNon-critical", 8), ("vCritical", 16), ("vNon-recoverable", 32)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a104EventSeverity.setStatus('mandatory')
a104IsEventState_based = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 104, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setLabel("a104IsEventState-based").setMaxAccess("readonly")
if mibBuilder.loadTexts: a104IsEventState_based.setStatus('mandatory')
a104EventStateKey = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 104, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a104EventStateKey.setStatus('mandatory')
a104AssociatedGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 104, 1, 5), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a104AssociatedGroup.setStatus('mandatory')
a104EventSystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 104, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a104EventSystem.setStatus('mandatory')
a104EventSubsystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 104, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a104EventSubsystem.setStatus('mandatory')
a104IsInstanceDataPresent = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 104, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a104IsInstanceDataPresent.setStatus('mandatory')
a104EventMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 104, 1, 10), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a104EventMessage.setStatus('mandatory')
tEventGenerationForPhysicalMemory = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 108), )
if mibBuilder.loadTexts: tEventGenerationForPhysicalMemory.setStatus('mandatory')
eEventGenerationForPhysicalMemory = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 108, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108AssociatedGroup"))
if mibBuilder.loadTexts: eEventGenerationForPhysicalMemory.setStatus('mandatory')
a108EventType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 108, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(256, 257))).clone(namedValues=NamedValues(("vSingle-bitMemoryError", 256), ("vMulti-bitMemoryErrorFromPreviousBoot", 257)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a108EventType.setStatus('mandatory')
a108EventSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 108, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4, 8, 16, 32))).clone(namedValues=NamedValues(("vMonitor", 1), ("vInformation", 2), ("vOk", 4), ("vNon-critical", 8), ("vCritical", 16), ("vNon-recoverable", 32)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a108EventSeverity.setStatus('mandatory')
a108IsEventState_based = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 108, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setLabel("a108IsEventState-based").setMaxAccess("readonly")
if mibBuilder.loadTexts: a108IsEventState_based.setStatus('mandatory')
a108EventStateKey = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 108, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a108EventStateKey.setStatus('mandatory')
a108AssociatedGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 108, 1, 5), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a108AssociatedGroup.setStatus('mandatory')
a108EventSystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 108, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a108EventSystem.setStatus('mandatory')
a108EventSubsystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 108, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a108EventSubsystem.setStatus('mandatory')
a108IsInstanceDataPresent = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 108, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a108IsInstanceDataPresent.setStatus('mandatory')
a108EventMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 108, 1, 10), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a108EventMessage.setStatus('mandatory')
tEventGenerationForVoltageProbe = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 113), )
if mibBuilder.loadTexts: tEventGenerationForVoltageProbe.setStatus('mandatory')
eEventGenerationForVoltageProbe = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 113, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113AssociatedGroup"))
if mibBuilder.loadTexts: eEventGenerationForVoltageProbe.setStatus('mandatory')
a113EventType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 113, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(256, 257, 258, 259, 260, 261, 262))).clone(namedValues=NamedValues(("vStatusChangedToOk", 256), ("vStatusChangedToUpperCritical", 257), ("vStatusChangedToLowerCritical", 258), ("vStatusChangedFromOkToUpperNon-critical", 259), ("vStatusChangedFromOkToLowerNon-critical", 260), ("vStatusChangedFromUpperCriticalToUpperNo", 261), ("vStatusChangedFromLowerCriticalToLowerNo", 262)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a113EventType.setStatus('mandatory')
a113EventSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 113, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4, 8, 16, 32))).clone(namedValues=NamedValues(("vMonitor", 1), ("vInformation", 2), ("vOk", 4), ("vNon-critical", 8), ("vCritical", 16), ("vNon-recoverable", 32)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a113EventSeverity.setStatus('mandatory')
a113IsEventState_based = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 113, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setLabel("a113IsEventState-based").setMaxAccess("readonly")
if mibBuilder.loadTexts: a113IsEventState_based.setStatus('mandatory')
a113EventStateKey = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 113, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a113EventStateKey.setStatus('mandatory')
a113AssociatedGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 113, 1, 5), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a113AssociatedGroup.setStatus('mandatory')
a113EventSystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 113, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a113EventSystem.setStatus('mandatory')
a113EventSubsystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 113, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a113EventSubsystem.setStatus('mandatory')
a113IsInstanceDataPresent = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 113, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a113IsInstanceDataPresent.setStatus('mandatory')
a113EventMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 113, 1, 10), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a113EventMessage.setStatus('mandatory')
tEventGenerationForTemperatureProbe = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 114), )
if mibBuilder.loadTexts: tEventGenerationForTemperatureProbe.setStatus('mandatory')
eEventGenerationForTemperatureProbe = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 114, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114AssociatedGroup"))
if mibBuilder.loadTexts: eEventGenerationForTemperatureProbe.setStatus('mandatory')
a114EventType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 114, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(256, 257, 258, 259, 260, 261, 262))).clone(namedValues=NamedValues(("vStatusChangedToOk", 256), ("vStatusChangedToUpperCritical", 257), ("vStatusChangedToLowerCritical", 258), ("vStatusChangedFromOkToUpperNon-critical", 259), ("vStatusChangedFromOkToLowerNon-critical", 260), ("vStatusChangedFromUpperCriticalToUpperNo", 261), ("vStatusChangedFromLowerCriticalToLowerNo", 262)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a114EventType.setStatus('mandatory')
a114EventSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 114, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4, 8, 16, 32))).clone(namedValues=NamedValues(("vMonitor", 1), ("vInformation", 2), ("vOk", 4), ("vNon-critical", 8), ("vCritical", 16), ("vNon-recoverable", 32)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a114EventSeverity.setStatus('mandatory')
a114IsEventState_based = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 114, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setLabel("a114IsEventState-based").setMaxAccess("readonly")
if mibBuilder.loadTexts: a114IsEventState_based.setStatus('mandatory')
a114EventStateKey = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 114, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a114EventStateKey.setStatus('mandatory')
a114AssociatedGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 114, 1, 5), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a114AssociatedGroup.setStatus('mandatory')
a114EventSystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 114, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a114EventSystem.setStatus('mandatory')
a114EventSubsystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 114, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a114EventSubsystem.setStatus('mandatory')
a114IsInstanceDataPresent = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 114, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a114IsInstanceDataPresent.setStatus('mandatory')
a114EventMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 114, 1, 10), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a114EventMessage.setStatus('mandatory')
tEventGenerationForPhysicalContainer = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 116), )
if mibBuilder.loadTexts: tEventGenerationForPhysicalContainer.setStatus('mandatory')
eEventGenerationForPhysicalContainer = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 116, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a116AssociatedGroup"))
if mibBuilder.loadTexts: eEventGenerationForPhysicalContainer.setStatus('mandatory')
a116EventType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 116, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(6, 256))).clone(namedValues=NamedValues(("vContainerSecurityBreach", 6), ("vContainerSecurityStatusOk", 256)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a116EventType.setStatus('mandatory')
a116EventSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 116, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4, 8, 16, 32))).clone(namedValues=NamedValues(("vMonitor", 1), ("vInformation", 2), ("vOk", 4), ("vNon-critical", 8), ("vCritical", 16), ("vNon-recoverable", 32)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a116EventSeverity.setStatus('mandatory')
a116IsEventState_based = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 116, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setLabel("a116IsEventState-based").setMaxAccess("readonly")
if mibBuilder.loadTexts: a116IsEventState_based.setStatus('mandatory')
a116EventStateKey = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 116, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a116EventStateKey.setStatus('mandatory')
a116AssociatedGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 116, 1, 5), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a116AssociatedGroup.setStatus('mandatory')
a116EventSystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 116, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a116EventSystem.setStatus('mandatory')
a116EventSubsystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 116, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a116EventSubsystem.setStatus('mandatory')
a116IsInstanceDataPresent = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 116, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a116IsInstanceDataPresent.setStatus('mandatory')
a116EventMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 116, 1, 10), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a116EventMessage.setStatus('mandatory')
tPointingDevice = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 122), )
if mibBuilder.loadTexts: tPointingDevice.setStatus('mandatory')
ePointingDevice = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 122, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: ePointingDevice.setStatus('mandatory')
a122PointingDeviceType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 122, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vMouse", 3), ("vTrackBall", 4), ("vTrackPoint", 5), ("vGlidePoint", 6), ("vTouchPad", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a122PointingDeviceType.setStatus('mandatory')
a122PointingDeviceInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 122, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 160, 161, 162))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vSerial", 3), ("vPs2", 4), ("vInfrared", 5), ("vHp-hil", 6), ("vBusMouse", 7), ("vAdb", 8), ("vBusMouseDb-9", 160), ("vBusMouseMicro-din", 161), ("vUsb", 162)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a122PointingDeviceInterface.setStatus('mandatory')
a122PointingDeviceIrq = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 122, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a122PointingDeviceIrq.setStatus('mandatory')
a122PointingDeviceButtons = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 122, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a122PointingDeviceButtons.setStatus('mandatory')
a122PointingDevicePortName = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 122, 1, 5), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a122PointingDevicePortName.setStatus('mandatory')
a122PointingDeviceDriverName = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 122, 1, 6), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a122PointingDeviceDriverName.setStatus('mandatory')
a122PointingDeviceDriverVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 122, 1, 7), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a122PointingDeviceDriverVersion.setStatus('mandatory')
a122FruGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 122, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a122FruGroupIndex.setStatus('mandatory')
a122OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 122, 1, 9), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a122OperationalGroupIndex.setStatus('mandatory')
a122SecuritySettings = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 122, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vNone", 3), ("vExternalInterfaceLockedOut", 4), ("vExternalInterfaceEnabled", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a122SecuritySettings.setStatus('mandatory')
tBusGlobalTable = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 64), )
if mibBuilder.loadTexts: tBusGlobalTable.setStatus('mandatory')
eBusGlobalTable = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 64, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a128BusId"))
if mibBuilder.loadTexts: eBusGlobalTable.setStatus('mandatory')
a128BusId = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 64, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a128BusId.setStatus('mandatory')
a128BusType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 64, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vEisa", 3), ("vIsa", 4), ("vPci", 5), ("vScsi", 6), ("vIde", 7), ("vDiagnostic", 8), ("vI2c", 9), ("vPower", 10)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a128BusType.setStatus('mandatory')
tPhysicalExpansionSitesTable = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 65), )
if mibBuilder.loadTexts: tPhysicalExpansionSitesTable.setStatus('mandatory')
ePhysicalExpansionSitesTable = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 65, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a129ExpansionSiteIndex"))
if mibBuilder.loadTexts: ePhysicalExpansionSitesTable.setStatus('mandatory')
a129ExpansionSiteIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 65, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a129ExpansionSiteIndex.setStatus('mandatory')
a129ExpansionSiteType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 65, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vBusSlot", 3), ("vDriveBay", 4), ("vPowerUnitBay", 5), ("vSubchassisSlot", 6), ("vPcmciaSlot", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a129ExpansionSiteType.setStatus('mandatory')
a129VirtualExpansionSite = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 65, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a129VirtualExpansionSite.setStatus('mandatory')
a129ExpansionSiteName = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 65, 1, 4), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a129ExpansionSiteName.setStatus('mandatory')
a129ExpansionSiteDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 65, 1, 5), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a129ExpansionSiteDescription.setStatus('mandatory')
a129ExpansionSiteCurrentlyOccupied = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 65, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a129ExpansionSiteCurrentlyOccupied.setStatus('mandatory')
tEventGenerationForCoolingDevice = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 140), )
if mibBuilder.loadTexts: tEventGenerationForCoolingDevice.setStatus('mandatory')
eEventGenerationForCoolingDevice = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 140, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a140AssociatedGroup"))
if mibBuilder.loadTexts: eEventGenerationForCoolingDevice.setStatus('mandatory')
a140EventType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 140, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("vCoolingDeviceStatusChange", 1), ("vTemperatureFault", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a140EventType.setStatus('mandatory')
a140EventSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 140, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4, 8, 16, 32))).clone(namedValues=NamedValues(("vMonitor", 1), ("vInformation", 2), ("vOk", 4), ("vNon-critical", 8), ("vCritical", 16), ("vNon-recoverable", 32)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a140EventSeverity.setStatus('mandatory')
a140IsEventState_based = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 140, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setLabel("a140IsEventState-based").setMaxAccess("readonly")
if mibBuilder.loadTexts: a140IsEventState_based.setStatus('mandatory')
a140EventStateKey = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 140, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a140EventStateKey.setStatus('mandatory')
a140AssociatedGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 140, 1, 5), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a140AssociatedGroup.setStatus('mandatory')
a140EventSystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 140, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a140EventSystem.setStatus('mandatory')
a140EventSubsystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 140, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a140EventSubsystem.setStatus('mandatory')
tEventGenerationForPowerUnit = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 201), )
if mibBuilder.loadTexts: tEventGenerationForPowerUnit.setStatus('mandatory')
eEventGenerationForPowerUnit = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 201, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201AssociatedGroup"))
if mibBuilder.loadTexts: eEventGenerationForPowerUnit.setStatus('mandatory')
a201EventType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 201, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vPowerUnitRedundancyLost", 1), ("vPowerUnitRedundancyRegained", 2), ("vPowerUnitRedundancyDegraded", 3), ("vPowerUnitVaShutdownConditionCleared", 4), ("vPowerUnitVaShutdownLimitExceeded", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a201EventType.setStatus('mandatory')
a201EventSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 201, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4, 8, 16, 32))).clone(namedValues=NamedValues(("vMonitor", 1), ("vInformation", 2), ("vOk", 4), ("vNon-critical", 8), ("vCritical", 16), ("vNon-recoverable", 32)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a201EventSeverity.setStatus('mandatory')
a201IsEventState_based = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 201, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setLabel("a201IsEventState-based").setMaxAccess("readonly")
if mibBuilder.loadTexts: a201IsEventState_based.setStatus('mandatory')
a201EventStateKey = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 201, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a201EventStateKey.setStatus('mandatory')
a201AssociatedGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 201, 1, 5), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a201AssociatedGroup.setStatus('mandatory')
a201EventSystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 201, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a201EventSystem.setStatus('mandatory')
a201EventSubsystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 201, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a201EventSubsystem.setStatus('mandatory')
a201IsInstanceDataPresent = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 201, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a201IsInstanceDataPresent.setStatus('mandatory')
a201EventMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 201, 1, 10), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a201EventMessage.setStatus('mandatory')
tEventGenerationForCoolingSensors = MibTable((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 202), )
if mibBuilder.loadTexts: tEventGenerationForCoolingSensors.setStatus('mandatory')
eEventGenerationForCoolingSensors = MibTableRow((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 202, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a202AssociatedGroup"))
if mibBuilder.loadTexts: eEventGenerationForCoolingSensors.setStatus('mandatory')
a202EventType = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 202, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(256, 257))).clone(namedValues=NamedValues(("vCoolingDeviceFailure", 256), ("vCoolingDeviceOk", 257)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a202EventType.setStatus('mandatory')
a202EventSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 202, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4, 8, 16, 32))).clone(namedValues=NamedValues(("vMonitor", 1), ("vInformation", 2), ("vOk", 4), ("vNon-critical", 8), ("vCritical", 16), ("vNon-recoverable", 32)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a202EventSeverity.setStatus('mandatory')
a202IsEventState_based = MibScalar((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 202, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setLabel("a202IsEventState-based").setMaxAccess("readonly")
if mibBuilder.loadTexts: a202IsEventState_based.setStatus('mandatory')
a202EventStateKey = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 202, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a202EventStateKey.setStatus('mandatory')
a202AssociatedGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 202, 1, 5), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a202AssociatedGroup.setStatus('mandatory')
a202EventSystem = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 202, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a202EventSystem.setStatus('mandatory')
a202EventSubsystem = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 202, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a202EventSubsystem.setStatus('mandatory')
a202IsInstanceDataPresent = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 202, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a202IsInstanceDataPresent.setStatus('mandatory')
a202EventMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 202, 1, 10), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a202EventMessage.setStatus('mandatory')
tEventGenerationForSystemSlots = MibTable((1, 3, 6, 1, 4, 1, 412, 2, 4, 205), )
if mibBuilder.loadTexts: tEventGenerationForSystemSlots.setStatus('mandatory')
eEventGenerationForSystemSlots = MibTableRow((1, 3, 6, 1, 4, 1, 412, 2, 4, 205, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205AssociatedGroup"))
if mibBuilder.loadTexts: eEventGenerationForSystemSlots.setStatus('mandatory')
a205EventType = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 205, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("vSlotStatusChangeToOk", 1), ("vSlotStatusChangeToNon-critical", 2), ("vSlotStatusChangeToCritical", 3), ("vSlotPoweredOn", 4), ("vSlotPoweredOff", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a205EventType.setStatus('mandatory')
a205EventSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 205, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4, 8, 16, 32))).clone(namedValues=NamedValues(("vMonitor", 1), ("vInformation", 2), ("vOk", 4), ("vNon-critical", 8), ("vCritical", 16), ("vNon-recoverable", 32)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a205EventSeverity.setStatus('mandatory')
a205IsEventState_based = MibScalar((1, 3, 6, 1, 4, 1, 412, 2, 4, 205, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setLabel("a205IsEventState-based").setMaxAccess("readonly")
if mibBuilder.loadTexts: a205IsEventState_based.setStatus('mandatory')
a205EventStateKey = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 205, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a205EventStateKey.setStatus('mandatory')
a205AssociatedGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 205, 1, 5), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a205AssociatedGroup.setStatus('mandatory')
a205EventSystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 205, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a205EventSystem.setStatus('mandatory')
a205EventSubsystem = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 205, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOther", 0), ("vUnknown", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a205EventSubsystem.setStatus('mandatory')
a205IsInstanceDataPresent = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 205, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a205IsInstanceDataPresent.setStatus('mandatory')
a205EventMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 412, 2, 4, 205, 1, 10), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a205EventMessage.setStatus('mandatory')
tSystemControl = MibTable((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2), )
if mibBuilder.loadTexts: tSystemControl.setStatus('mandatory')
eSystemControl = MibTableRow((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a1004Selfid"))
if mibBuilder.loadTexts: eSystemControl.setStatus('mandatory')
a1004Selfid = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1004Selfid.setStatus('mandatory')
a1004ResetSystem = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 2), DmiOctetstring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1004ResetSystem.setStatus('mandatory')
a1004TimedResetIncrement = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1004TimedResetIncrement.setStatus('mandatory')
a1004TimedResetResolution = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1004TimedResetResolution.setStatus('mandatory')
a1004TimeUntilSystemReset = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 5), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1004TimeUntilSystemReset.setStatus('mandatory')
a1004SystemPowerCapabilities = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("vUnknown", 0), ("vUnsupported", 1), ("vOnAndOff", 2), ("vOffOnly", 3), ("vOnOnly", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1004SystemPowerCapabilities.setStatus('mandatory')
a1004SystemPowerStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("vOff", 0), ("vOn", 1), ("vUnsupported", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1004SystemPowerStatus.setStatus('mandatory')
a1004EventLoggingCapability = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("vUnknown", 0), ("vUnsupported", 1), ("vActive", 2), ("vInactive", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1004EventLoggingCapability.setStatus('mandatory')
a1004WatchdogTimerIncrement = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 9), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1004WatchdogTimerIncrement.setStatus('mandatory')
a1004WatchdogTimerResolution = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 10), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1004WatchdogTimerResolution.setStatus('mandatory')
a1004WatchdogUpdateInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 11), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1004WatchdogUpdateInterval.setStatus('mandatory')
a1004UseSystemWatchdogFeature = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("vOff", 0), ("vOn", 1), ("vUnsupported", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1004UseSystemWatchdogFeature.setStatus('mandatory')
a1004ResetSystemAfterDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 13), DmiOctetstring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1004ResetSystemAfterDelay.setStatus('mandatory')
a1004SavePersistentData = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("vOff", 0), ("vOn", 1), ("vUnsupported", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1004SavePersistentData.setStatus('mandatory')
a1004RestoreFactoryDefaults = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 16), DmiOctetstring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1004RestoreFactoryDefaults.setStatus('mandatory')
a1004ShutdownOs = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 17), DmiOctetstring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1004ShutdownOs.setStatus('mandatory')
a1004ShutdownOsAndPowerOff = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 18), DmiOctetstring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1004ShutdownOsAndPowerOff.setStatus('mandatory')
a1004ShutdownOsAndHardwareReset = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 19), DmiOctetstring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1004ShutdownOsAndHardwareReset.setStatus('mandatory')
a1004IssueAHardwareNmi = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 20), DmiOctetstring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1004IssueAHardwareNmi.setStatus('mandatory')
a1004ImmediatePowerDown = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 21), DmiOctetstring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1004ImmediatePowerDown.setStatus('mandatory')
a1004Challenge = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 22), DmiOctetstring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1004Challenge.setStatus('mandatory')
a1004VerifyPrivilege = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 2, 1, 23), DmiOctetstring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1004VerifyPrivilege.setStatus('mandatory')
tCoolingSensors = MibTable((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3), )
if mibBuilder.loadTexts: tCoolingSensors.setStatus('mandatory')
eCoolingSensors = MibTableRow((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a1005Selfid"))
if mibBuilder.loadTexts: eCoolingSensors.setStatus('mandatory')
a1005Selfid = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1005Selfid.setStatus('mandatory')
a1005FruGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1005FruGroupIndex.setStatus('mandatory')
a1005OperationalGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1005OperationalGroupIndex.setStatus('mandatory')
a1005CoolingDeviceType = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 32, 33))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vFan", 3), ("vCentrifugalBlower", 4), ("vChipFan", 5), ("vCabinetFan", 6), ("vPowerSupplyFan", 7), ("vHeatPipe", 8), ("vIntegratedRefrigeration", 9), ("vActiveCooling", 32), ("vPassiveCooling", 33)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1005CoolingDeviceType.setStatus('mandatory')
a1005CfmRating = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1005CfmRating.setStatus('mandatory')
a1005FanUnits = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("vRpm", 0), ("vCfm", 1), ("vOkfatal", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1005FanUnits.setStatus('mandatory')
a1005MaximumReading = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 9), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1005MaximumReading.setStatus('mandatory')
a1005MinimumReading = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 10), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1005MinimumReading.setStatus('mandatory')
a1005CurrentReading = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 11), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1005CurrentReading.setStatus('mandatory')
a1005SensorAccuracy = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 13), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1005SensorAccuracy.setStatus('mandatory')
a1005SensorTolerancePlus = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 14), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1005SensorTolerancePlus.setStatus('mandatory')
a1005SensorToleranceMinus = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 15), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1005SensorToleranceMinus.setStatus('mandatory')
a1005Non_criticalThreshold = MibScalar((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 16), DmiInteger()).setLabel("a1005Non-criticalThreshold").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1005Non_criticalThreshold.setStatus('mandatory')
a1005CriticalThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 17), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1005CriticalThreshold.setStatus('mandatory')
a1005Non_recoverableThreshold = MibScalar((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 18), DmiInteger()).setLabel("a1005Non-recoverableThreshold").setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1005Non_recoverableThreshold.setStatus('mandatory')
a1005CoolingSensorDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 19), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1005CoolingSensorDescription.setStatus('mandatory')
a1005NominalReading = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 21), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1005NominalReading.setStatus('mandatory')
a1005LowestNormalReading = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 22), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1005LowestNormalReading.setStatus('mandatory')
a1005HighestNormalReading = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 1, 23), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1005HighestNormalReading.setStatus('mandatory')
tSystemEventLog = MibTable((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 4), )
if mibBuilder.loadTexts: tSystemEventLog.setStatus('mandatory')
eSystemEventLog = MibTableRow((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 4, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a1006Selfid"))
if mibBuilder.loadTexts: eSystemEventLog.setStatus('mandatory')
a1006Selfid = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 4, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1006Selfid.setStatus('mandatory')
a1006Timestamp = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 4, 1, 2), DmiDate()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1006Timestamp.setStatus('mandatory')
a1006RecordType = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 4, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1006RecordType.setStatus('mandatory')
a1006RecordLength = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 4, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1006RecordLength.setStatus('mandatory')
a1006RecordData = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 4, 1, 5), DmiOctetstring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1006RecordData.setStatus('mandatory')
tPciHotplugDevice = MibTable((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 18), )
if mibBuilder.loadTexts: tPciHotplugDevice.setStatus('mandatory')
ePciHotplugDevice = MibTableRow((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 18, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"), (0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "a1008PciHotplugDeviceIndex"))
if mibBuilder.loadTexts: ePciHotplugDevice.setStatus('mandatory')
a1008PciHotplugDeviceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 18, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1008PciHotplugDeviceIndex.setStatus('mandatory')
a1008PciHotplugSlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 18, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1008PciHotplugSlotNumber.setStatus('mandatory')
a1008DeviceManufacturer = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 18, 1, 3), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1008DeviceManufacturer.setStatus('mandatory')
a1008DeviceType = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 18, 1, 4), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1008DeviceType.setStatus('mandatory')
a1008DeviceRevision = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 18, 1, 5), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1008DeviceRevision.setStatus('mandatory')
tPagingConfig = MibTable((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 5), )
if mibBuilder.loadTexts: tPagingConfig.setStatus('mandatory')
ePagingConfig = MibTableRow((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 5, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: ePagingConfig.setStatus('mandatory')
a1010PagingSupported = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 5, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1010PagingSupported.setStatus('mandatory')
a1010DefaultPepString = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 5, 1, 2), DmiDisplaystring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1010DefaultPepString.setStatus('mandatory')
a1010GlobalPaging = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 5, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vFalse", 0), ("vTrue", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1010GlobalPaging.setStatus('mandatory')
a1010PepStringSize = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 5, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1010PepStringSize.setStatus('mandatory')
a1010TestPage = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 5, 1, 5), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1010TestPage.setStatus('mandatory')
a1010IssuePaging = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 5, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("vDoNotPage", 0), ("vPage", 1), ("vUnsupported", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1010IssuePaging.setStatus('mandatory')
tLocalPagingConfig = MibTable((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 6), )
if mibBuilder.loadTexts: tLocalPagingConfig.setStatus('mandatory')
eLocalPagingConfig = MibTableRow((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 6, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eLocalPagingConfig.setStatus('mandatory')
a1011PepString1 = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 6, 1, 1), DmiDisplaystring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1011PepString1.setStatus('mandatory')
a1011PepString2 = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 6, 1, 2), DmiDisplaystring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1011PepString2.setStatus('mandatory')
a1011RepeatCount = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 6, 1, 3), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1011RepeatCount.setStatus('mandatory')
a1011RepeatInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 6, 1, 4), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1011RepeatInterval.setStatus('mandatory')
a1011TestString = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 6, 1, 5), DmiDisplaystring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1011TestString.setStatus('mandatory')
tEmailConfig = MibTable((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 7), )
if mibBuilder.loadTexts: tEmailConfig.setStatus('mandatory')
eEmailConfig = MibTableRow((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 7, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eEmailConfig.setStatus('mandatory')
a1012SmtpServer = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 7, 1, 1), DmiDisplaystring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1012SmtpServer.setStatus('mandatory')
a1012EmailFromAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 7, 1, 2), DmiDisplaystring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1012EmailFromAddress.setStatus('mandatory')
a1012EmailToAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 7, 1, 3), DmiDisplaystring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1012EmailToAddress.setStatus('mandatory')
a1012EmailSubject = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 7, 1, 4), DmiDisplaystring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1012EmailSubject.setStatus('mandatory')
a1012EmailMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 7, 1, 5), DmiDisplaystring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1012EmailMessage.setStatus('mandatory')
a1012TestEmail = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 7, 1, 6), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a1012TestEmail.setStatus('mandatory')
tDpcDiscovery = MibTable((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 17), )
if mibBuilder.loadTexts: tDpcDiscovery.setStatus('mandatory')
eDpcDiscovery = MibTableRow((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 17, 1), ).setIndexNames((0, "INTELCORPORATIONBASEBOARDMAPPER-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eDpcDiscovery.setStatus('mandatory')
a9000DpcDialNumberString = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 17, 1, 1), DmiDisplaystring()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a9000DpcDialNumberString.setStatus('mandatory')
a9000DpcServicePartitionPresenceFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 17, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vPresent", 3), ("vNotPresent", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9000DpcServicePartitionPresenceFlag.setStatus('mandatory')
a9000ScanServicePartitionFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 17, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vScan", 3), ("vDoNotScan", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a9000ScanServicePartitionFlag.setStatus('mandatory')
a9000BootServicePartitionFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 17, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("vOther", 1), ("vUnknown", 2), ("vBoot", 3), ("vDoNotBoot", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a9000BootServicePartitionFlag.setStatus('mandatory')
tProcessorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 0))
tPowerSupplyTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 0))
tPhysicalMemoryArrayTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 0))
tVoltageProbeTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 0))
tTemperatureProbeTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 0))
tPhysicalContainerGlobalTableTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 412, 2, 4, 63, 0))
tPowerUnitGlobalTableTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 412, 2, 4, 66, 0))
tCoolingSensorsTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 0))
tSystemSlotsTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 0))
notification1ForProcessor = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 0, 256)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventMessage"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a6ProcessorIndex"))
if mibBuilder.loadTexts: notification1ForProcessor.setStatus('current')
notification2ForProcessor = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 0, 257)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventMessage"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a6ProcessorIndex"))
if mibBuilder.loadTexts: notification2ForProcessor.setStatus('current')
notification3ForProcessor = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 0, 258)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventMessage"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a6ProcessorIndex"))
if mibBuilder.loadTexts: notification3ForProcessor.setStatus('current')
notification4ForProcessor = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 5, 0, 259)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a100EventMessage"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a6ProcessorIndex"))
if mibBuilder.loadTexts: notification4ForProcessor.setStatus('current')
notification1ForPowerSupply = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 0, 256)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a17PowerSupplyIndex"))
if mibBuilder.loadTexts: notification1ForPowerSupply.setStatus('current')
notification2ForPowerSupply = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 0, 257)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a17PowerSupplyIndex"))
if mibBuilder.loadTexts: notification2ForPowerSupply.setStatus('current')
notification3ForPowerSupply = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 0, 258)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a17PowerSupplyIndex"))
if mibBuilder.loadTexts: notification3ForPowerSupply.setStatus('current')
notification4ForPowerSupply = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 0, 259)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a17PowerSupplyIndex"))
if mibBuilder.loadTexts: notification4ForPowerSupply.setStatus('current')
notification5ForPowerSupply = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 0, 260)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a17PowerSupplyIndex"))
if mibBuilder.loadTexts: notification5ForPowerSupply.setStatus('current')
notification6ForPowerSupply = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 0, 261)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a17PowerSupplyIndex"))
if mibBuilder.loadTexts: notification6ForPowerSupply.setStatus('current')
notification7ForPowerSupply = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 0, 262)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a17PowerSupplyIndex"))
if mibBuilder.loadTexts: notification7ForPowerSupply.setStatus('current')
notification8ForPowerSupply = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 16, 0, 263)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a104IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a17PowerSupplyIndex"))
if mibBuilder.loadTexts: notification8ForPowerSupply.setStatus('current')
notification1ForPhysicalMemory = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 0, 256)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a34MemoryArrayTableIndex"))
if mibBuilder.loadTexts: notification1ForPhysicalMemory.setStatus('current')
notification2ForPhysicalMemory = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 0, 257)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a34MemoryArrayTableIndex"))
if mibBuilder.loadTexts: notification2ForPhysicalMemory.setStatus('current')
notification3ForPhysicalMemory = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 0, 258)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a34MemoryArrayTableIndex"))
if mibBuilder.loadTexts: notification3ForPhysicalMemory.setStatus('current')
notification4ForPhysicalMemory = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 0, 259)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a34MemoryArrayTableIndex"))
if mibBuilder.loadTexts: notification4ForPhysicalMemory.setStatus('current')
notification5ForPhysicalMemory = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 33, 0, 260)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a108IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a34MemoryArrayTableIndex"))
if mibBuilder.loadTexts: notification5ForPhysicalMemory.setStatus('current')
notification1ForVoltageProbe = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 0, 256)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a54VoltageProbeIndex"))
if mibBuilder.loadTexts: notification1ForVoltageProbe.setStatus('current')
notification2ForVoltageProbe = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 0, 257)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a54VoltageProbeIndex"))
if mibBuilder.loadTexts: notification2ForVoltageProbe.setStatus('current')
notification3ForVoltageProbe = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 0, 258)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a54VoltageProbeIndex"))
if mibBuilder.loadTexts: notification3ForVoltageProbe.setStatus('current')
notification4ForVoltageProbe = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 0, 259)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a54VoltageProbeIndex"))
if mibBuilder.loadTexts: notification4ForVoltageProbe.setStatus('current')
notification5ForVoltageProbe = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 0, 260)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a54VoltageProbeIndex"))
if mibBuilder.loadTexts: notification5ForVoltageProbe.setStatus('current')
notification6ForVoltageProbe = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 0, 261)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a54VoltageProbeIndex"))
if mibBuilder.loadTexts: notification6ForVoltageProbe.setStatus('current')
notification7ForVoltageProbe = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 53, 0, 262)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a113IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a54VoltageProbeIndex"))
if mibBuilder.loadTexts: notification7ForVoltageProbe.setStatus('current')
notification1ForTemperatureProbe = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 0, 256)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a55TemperatureProbeTableIndex"))
if mibBuilder.loadTexts: notification1ForTemperatureProbe.setStatus('current')
notification2ForTemperatureProbe = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 0, 257)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a55TemperatureProbeTableIndex"))
if mibBuilder.loadTexts: notification2ForTemperatureProbe.setStatus('current')
notification3ForTemperatureProbe = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 0, 258)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a55TemperatureProbeTableIndex"))
if mibBuilder.loadTexts: notification3ForTemperatureProbe.setStatus('current')
notification4ForTemperatureProbe = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 0, 259)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a55TemperatureProbeTableIndex"))
if mibBuilder.loadTexts: notification4ForTemperatureProbe.setStatus('current')
notification5ForTemperatureProbe = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 0, 260)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a55TemperatureProbeTableIndex"))
if mibBuilder.loadTexts: notification5ForTemperatureProbe.setStatus('current')
notification6ForTemperatureProbe = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 0, 261)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a55TemperatureProbeTableIndex"))
if mibBuilder.loadTexts: notification6ForTemperatureProbe.setStatus('current')
notification7ForTemperatureProbe = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 54, 0, 262)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a114IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a55TemperatureProbeTableIndex"))
if mibBuilder.loadTexts: notification7ForTemperatureProbe.setStatus('current')
notification1ForPhysicalContainer = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 63, 0, 6)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a116EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a116EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a116IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a116EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a116AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a116EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a116EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a116IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a64ContainerIndex"))
if mibBuilder.loadTexts: notification1ForPhysicalContainer.setStatus('current')
notification2ForPhysicalContainer = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 63, 0, 256)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a116EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a116EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a116IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a116EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a116AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a116EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a116EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a116IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a64ContainerIndex"))
if mibBuilder.loadTexts: notification2ForPhysicalContainer.setStatus('current')
notification1ForPowerUnit = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 66, 0, 1)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a67PowerUnitIndex"))
if mibBuilder.loadTexts: notification1ForPowerUnit.setStatus('current')
notification2ForPowerUnit = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 66, 0, 2)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a67PowerUnitIndex"))
if mibBuilder.loadTexts: notification2ForPowerUnit.setStatus('current')
notification3ForPowerUnit = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 66, 0, 3)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a67PowerUnitIndex"))
if mibBuilder.loadTexts: notification3ForPowerUnit.setStatus('current')
notification4ForPowerUnit = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 66, 0, 4)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a67PowerUnitIndex"))
if mibBuilder.loadTexts: notification4ForPowerUnit.setStatus('current')
notification5ForPowerUnit = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 66, 0, 5)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a201IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a67PowerUnitIndex"))
if mibBuilder.loadTexts: notification5ForPowerUnit.setStatus('current')
notification1ForCoolingSensors = NotificationType((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 0, 256)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a202EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a202EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a202IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a202EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a202AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a202EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a202EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a202IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a1005Selfid"))
if mibBuilder.loadTexts: notification1ForCoolingSensors.setStatus('current')
notification2ForCoolingSensors = NotificationType((1, 3, 6, 1, 4, 1, 343, 2, 10, 7, 3, 0, 257)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a202EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a202EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a202IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a202EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a202AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a202EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a202EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a202IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a1005Selfid"))
if mibBuilder.loadTexts: notification2ForCoolingSensors.setStatus('current')
notification1ForSystemSlots = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 0, 1)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventMessage"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a19SlotIndex"))
if mibBuilder.loadTexts: notification1ForSystemSlots.setStatus('current')
notification2ForSystemSlots = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 0, 2)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventMessage"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a19SlotIndex"))
if mibBuilder.loadTexts: notification2ForSystemSlots.setStatus('current')
notification3ForSystemSlots = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 0, 3)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventMessage"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a19SlotIndex"))
if mibBuilder.loadTexts: notification3ForSystemSlots.setStatus('current')
notification4ForSystemSlots = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 0, 4)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventMessage"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a19SlotIndex"))
if mibBuilder.loadTexts: notification4ForSystemSlots.setStatus('current')
notification5ForSystemSlots = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 0, 5)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventMessage"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a19SlotIndex"))
if mibBuilder.loadTexts: notification5ForSystemSlots.setStatus('current')
notification6ForSystemSlots = NotificationType((1, 3, 6, 1, 4, 1, 412, 2, 4, 18, 0, 6)).setObjects(("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventType"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventSeverity"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205IsEventState_based"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventStateKey"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205AssociatedGroup"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventSystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventSubsystem"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205IsInstanceDataPresent"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a205EventMessage"), ("INTELCORPORATIONBASEBOARDMAPPER-MIB", "a19SlotIndex"))
if mibBuilder.loadTexts: notification6ForSystemSlots.setStatus('current')
mibBuilder.exportSymbols("INTELCORPORATIONBASEBOARDMAPPER-MIB", eOperatingSystem=eOperatingSystem, a10SystemCacheIndex=a10SystemCacheIndex, eEventGenerationForCoolingSensors=eEventGenerationForCoolingSensors, a30SupportPhoneNumber=a30SupportPhoneNumber, a64ContainerOrChassisType=a64ContainerOrChassisType, a30Model=a30Model, a202EventSubsystem=a202EventSubsystem, a83VideoType=a83VideoType, eParallelPorts=eParallelPorts, a55TemperatureProbeTemperatureReading=a55TemperatureProbeTemperatureReading, a36ErrorGranularity=a36ErrorGranularity, a108EventSeverity=a108EventSeverity, a122OperationalGroupIndex=a122OperationalGroupIndex, a81CoolingDeviceType=a81CoolingDeviceType, tParallelPorts=tParallelPorts, a54VoltageProbeResolution=a54VoltageProbeResolution, a30PartNumber=a30PartNumber, a116AssociatedGroup=a116AssociatedGroup, a19SlotFaultState=a19SlotFaultState, ePowerUnitGlobalTable=ePowerUnitGlobalTable, a36DeviceErrorAddress=a36DeviceErrorAddress, a1004UseSystemWatchdogFeature=a1004UseSystemWatchdogFeature, a6ProcessorUpgrade=a6ProcessorUpgrade, a113IsEventState_based=a113IsEventState_based, a113IsInstanceDataPresent=a113IsInstanceDataPresent, tSystemBiosCharacteristics=tSystemBiosCharacteristics, a74OperationalGroupIndex=a74OperationalGroupIndex, a1008PciHotplugDeviceIndex=a1008PciHotplugDeviceIndex, a4BiosReleaseDate=a4BiosReleaseDate, a74DmaSupport=a74DmaSupport, eVideoBios=eVideoBios, a104EventStateKey=a104EventStateKey, a91MousePortName=a91MousePortName, a81CoolingDeviceTableIndex=a81CoolingDeviceTableIndex, a55TemperatureProbeMaximum=a55TemperatureProbeMaximum, a92KeyboardConnectorType=a92KeyboardConnectorType, a28CoolingUnitIndex=a28CoolingUnitIndex, a75ConnectorType=a75ConnectorType, tEventGenerationForTemperatureProbe=tEventGenerationForTemperatureProbe, a30FruInternetUniformResourceLocator=a30FruInternetUniformResourceLocator, notification3ForTemperatureProbe=notification3ForTemperatureProbe, notification2ForCoolingSensors=notification2ForCoolingSensors, a201EventSystem=a201EventSystem, ePhysicalExpansionSitesTable=ePhysicalExpansionSitesTable, a10SystemCacheSpeed=a10SystemCacheSpeed, notification1ForPowerSupply=notification1ForPowerSupply, notification4ForPowerSupply=notification4ForPowerSupply, a30Manufacturer=a30Manufacturer, a37MemoryDeviceMappedAddressesTableIndex=a37MemoryDeviceMappedAddressesTableIndex, a75LogicalName=a75LogicalName, a10SystemCacheLevel=a10SystemCacheLevel, a7OperationalGroupIndex=a7OperationalGroupIndex, a31DeviceGroupIndex=a31DeviceGroupIndex, a1005CoolingSensorDescription=a1005CoolingSensorDescription, a55TemperatureLowerThreshold_Non_critica=a55TemperatureLowerThreshold_Non_critica, notification6ForPowerSupply=notification6ForPowerSupply, a35MemoryArrayIndex=a35MemoryArrayIndex, a91MouseButtons=a91MouseButtons, tEventGenerationForVoltageProbe=tEventGenerationForVoltageProbe, eEventGenerationForPhysicalContainer=eEventGenerationForPhysicalContainer, a1008DeviceType=a1008DeviceType, a17Range2InputVoltageHigh=a17Range2InputVoltageHigh, a92OperationalGroupIndex=a92OperationalGroupIndex, notification2ForProcessor=notification2ForProcessor, a116IsEventState_based=a116IsEventState_based, a10OperationalGroupIndex=a10OperationalGroupIndex, a36ErrorData=a36ErrorData, a36FruGroupIndex=a36FruGroupIndex, eCoolingDevice=eCoolingDevice, a83CurrentVerticalResolution=a83CurrentVerticalResolution, notification5ForPhysicalMemory=notification5ForPhysicalMemory, a104IsEventState_based=a104IsEventState_based, eVoltageProbe=eVoltageProbe, a128BusType=a128BusType, DmiOctetstring=DmiOctetstring, tMouse=tMouse, tSystemControl=tSystemControl, a1005SensorToleranceMinus=a1005SensorToleranceMinus, a17Range2VoltageProbeIndex=a17Range2VoltageProbeIndex, tEventGenerationForCoolingDevice=tEventGenerationForCoolingDevice, eSystemHardwareSecurity=eSystemHardwareSecurity, a54VoltageStatus=a54VoltageStatus, a6Level3CacheIndex=a6Level3CacheIndex, server_management=server_management, a83CurrentRefreshRate=a83CurrentRefreshRate, a34ErrorOperation=a34ErrorOperation, a83CurrentNumberOfRows=a83CurrentNumberOfRows, a55TemperatureProbeDescription=a55TemperatureProbeDescription, a1005FruGroupIndex=a1005FruGroupIndex, eMouse=eMouse, a36DeviceLocator=a36DeviceLocator, a201EventSubsystem=a201EventSubsystem, a108EventStateKey=a108EventStateKey, a1004ShutdownOsAndHardwareReset=a1004ShutdownOsAndHardwareReset, a34FruGroupIndex=a34FruGroupIndex, a114EventSystem=a114EventSystem, dmtf=dmtf, a54VoltageLevelLowerThreshold_Non_recove=a54VoltageLevelLowerThreshold_Non_recove, a83FruGroupIndex=a83FruGroupIndex, a1012TestEmail=a1012TestEmail, a36ArrayErrorAddress=a36ArrayErrorAddress, a122PointingDeviceIrq=a122PointingDeviceIrq, a74ConnectorPinout=a74ConnectorPinout, a122PointingDevicePortName=a122PointingDevicePortName, a113EventSubsystem=a113EventSubsystem, eSystemCache=eSystemCache, a1010PagingSupported=a1010PagingSupported, a35MemoryArrayMappedAddressesTableIndex=a35MemoryArrayMappedAddressesTableIndex, a17Range1ElectricalCurrentProbeIndex=a17Range1ElectricalCurrentProbeIndex, a37PartitionRowPosition=a37PartitionRowPosition, a100EventSystem=a100EventSystem, notification5ForPowerSupply=notification5ForPowerSupply, a129ExpansionSiteType=a129ExpansionSiteType, a84VideoBiosManufacturer=a84VideoBiosManufacturer, a114EventMessage=a114EventMessage, a2SystemLocation=a2SystemLocation, a104EventType=a104EventType, a31AdministrativeState=a31AdministrativeState, a74ParallelPortCapabilities=a74ParallelPortCapabilities, a36DataWidth=a36DataWidth, a1006RecordData=a1006RecordData, a5BiosCharacteristicDescription=a5BiosCharacteristicDescription, a64ThermalState=a64ThermalState, a113AssociatedGroup=a113AssociatedGroup, a1004Challenge=a1004Challenge, a4PrimaryBios=a4PrimaryBios, a10SystemCacheSize=a10SystemCacheSize, a205EventSubsystem=a205EventSubsystem, a4BiosManufacturer=a4BiosManufacturer, a83MinimumRefreshRate=a83MinimumRefreshRate, a36TotalWidth=a36TotalWidth, notification4ForProcessor=notification4ForProcessor, tMemoryDevice=tMemoryDevice, a129ExpansionSiteName=a129ExpansionSiteName, a35PartitionWidth=a35PartitionWidth, eSystemControl=eSystemControl, a122PointingDeviceInterface=a122PointingDeviceInterface, a205EventStateKey=a205EventStateKey, a36MemoryType=a36MemoryType, a35PartitionId=a35PartitionId, a34MemoryArrayUse=a34MemoryArrayUse, a1012EmailSubject=a1012EmailSubject, a1005MinimumReading=a1005MinimumReading, a5BiosNumber=a5BiosNumber, dmtfGroups=dmtfGroups, a37InterleavePosition=a37InterleavePosition, a34MemoryErrorCorrection=a34MemoryErrorCorrection, tGeneralInformation=tGeneralInformation, a201AssociatedGroup=a201AssociatedGroup, a100EventSubsystem=a100EventSubsystem, notification1ForPhysicalContainer=notification1ForPhysicalContainer, a202EventType=a202EventType, notification4ForSystemSlots=notification4ForSystemSlots, a205EventSystem=a205EventSystem, a2SystemPrimaryUserPhone=a2SystemPrimaryUserPhone, a66OperatingSystemBootPartitionIndex=a66OperatingSystemBootPartitionIndex, a10SystemCacheErrorCorrection=a10SystemCacheErrorCorrection, notification2ForPhysicalMemory=notification2ForPhysicalMemory, a6FruGroupIndex=a6FruGroupIndex, a1010TestPage=a1010TestPage, a55MonitoredTemperatureNominalReading=a55MonitoredTemperatureNominalReading, a31WarningErrorCount=a31WarningErrorCount, tSystemBios=tSystemBios, a36Size=a36Size, a19OperationalGroupIndex=a19OperationalGroupIndex, a17Range2InputVoltageLow=a17Range2InputVoltageLow, a84VideoBiosReleaseDate=a84VideoBiosReleaseDate, a91MouseIrq=a91MouseIrq, a116EventSystem=a116EventSystem, a17Range1InputFrequencyHigh=a17Range1InputFrequencyHigh, a31OperationalStateInstanceIndex=a31OperationalStateInstanceIndex, a108EventSubsystem=a108EventSubsystem, a1005Non_recoverableThreshold=a1005Non_recoverableThreshold, a1004TimeUntilSystemReset=a1004TimeUntilSystemReset, a1011RepeatInterval=a1011RepeatInterval, a30Description=a30Description, a100IsEventState_based=a100IsEventState_based, a10SystemCacheType=a10SystemCacheType, eEventGenerationForSystemSlots=eEventGenerationForSystemSlots, tEventGenerationForProcessor=tEventGenerationForProcessor, a30RevisionLevel=a30RevisionLevel, ePhysicalMemoryArray=ePhysicalMemoryArray, tTemperatureProbe=tTemperatureProbe, a202EventSeverity=a202EventSeverity, a34MemoryArrayTableIndex=a34MemoryArrayTableIndex, tEventGenerationForPhysicalContainer=tEventGenerationForPhysicalContainer, a140EventType=a140EventType, notification5ForVoltageProbe=notification5ForVoltageProbe, a6Level2CacheIndex=a6Level2CacheIndex, a54VoltageLevelUpperThreshold_Critical=a54VoltageLevelUpperThreshold_Critical, a140IsEventState_based=a140IsEventState_based, a52PowerControlRequest=a52PowerControlRequest, tPhysicalContainerGlobalTableTraps=tPhysicalContainerGlobalTableTraps, notification1ForCoolingSensors=notification1ForCoolingSensors, a129ExpansionSiteIndex=a129ExpansionSiteIndex, tVideo=tVideo, DmiInteger64=DmiInteger64, DmiDate=DmiDate, tVoltageProbeTraps=tVoltageProbeTraps, tSystemCache=tSystemCache, a116IsInstanceDataPresent=a116IsInstanceDataPresent, tCoolingDevice=tCoolingDevice, DmiComponentIndex=DmiComponentIndex, a17Range1InputVoltageLow=a17Range1InputVoltageLow, a17Range2InputFrequencyHigh=a17Range2InputFrequencyHigh, a2SystemDateTime=a2SystemDateTime, tVoltageProbe=tVoltageProbe, a52TimeToNextScheduledPower_on=a52TimeToNextScheduledPower_on, a31UsageState=a31UsageState, a54MonitoredVoltageNormalMaximum=a54MonitoredVoltageNormalMaximum, a5BiosCharacteristicIndex=a5BiosCharacteristicIndex, a55OperationalGroupIndex=a55OperationalGroupIndex, a84VideoBiosIndex=a84VideoBiosIndex, a108EventMessage=a108EventMessage, a9000ScanServicePartitionFlag=a9000ScanServicePartitionFlag, a83CurrentHorizontalResolution=a83CurrentHorizontalResolution, a81CoolingUnitIndex=a81CoolingUnitIndex, a54VoltageProbeMaximum=a54VoltageProbeMaximum, a37DataDepth=a37DataDepth, a116EventSubsystem=a116EventSubsystem, a116EventSeverity=a116EventSeverity, a129ExpansionSiteDescription=a129ExpansionSiteDescription, a9000DpcServicePartitionPresenceFlag=a9000DpcServicePartitionPresenceFlag, notification4ForPhysicalMemory=notification4ForPhysicalMemory, a104EventMessage=a104EventMessage, notification3ForPowerSupply=notification3ForPowerSupply, a1005LowestNormalReading=a1005LowestNormalReading, a55TemperatureProbeTolerance=a55TemperatureProbeTolerance, eCoolingUnitGlobalTable=eCoolingUnitGlobalTable, a83CurrentNumberOfColumns=a83CurrentNumberOfColumns, a54VoltageProbeTolerance=a54VoltageProbeTolerance, a36DeviceErrorType=a36DeviceErrorType, eMotherboard=eMotherboard, a55MonitoredTemperatureNormalMaximum=a55MonitoredTemperatureNormalMaximum, a4BiosIndex=a4BiosIndex, eEventGenerationForPowerSupply=eEventGenerationForPowerSupply, a1004VerifyPrivilege=a1004VerifyPrivilege, a201IsEventState_based=a201IsEventState_based, eFieldReplaceableUnit=eFieldReplaceableUnit, a1012EmailMessage=a1012EmailMessage, a55TemperatureStatus=a55TemperatureStatus, a1011PepString2=a1011PepString2, a1011TestString=a1011TestString, a83VideoPhysicalLocation=a83VideoPhysicalLocation, a114EventSeverity=a114EventSeverity, a1004SavePersistentData=a1004SavePersistentData, notification1ForTemperatureProbe=notification1ForTemperatureProbe, a17ActiveInputVoltageRange=a17ActiveInputVoltageRange, tSystemHardwareSecurity=tSystemHardwareSecurity, a91MouseDriverName=a91MouseDriverName, a91MouseDriverVersion=a91MouseDriverVersion, a5BiosCharacteristic=a5BiosCharacteristic, a19SlotCategory=a19SlotCategory, tPhysicalContainerGlobalTable=tPhysicalContainerGlobalTable)
mibBuilder.exportSymbols("INTELCORPORATIONBASEBOARDMAPPER-MIB", a75SerialPortCapabilities=a75SerialPortCapabilities, eEventGenerationForPhysicalMemory=eEventGenerationForPhysicalMemory, a201EventStateKey=a201EventStateKey, a31FatalErrorCount=a31FatalErrorCount, tEventGenerationForPowerSupply=tEventGenerationForPowerSupply, a202IsEventState_based=a202IsEventState_based, tLocalPagingConfig=tLocalPagingConfig, a54VoltageLevelUpperThreshold_Non_recove=a54VoltageLevelUpperThreshold_Non_recove, a201EventType=a201EventType, tEmailConfig=tEmailConfig, a64ContainerLocation=a64ContainerLocation, tCoolingSensorsTraps=tCoolingSensorsTraps, notification4ForTemperatureProbe=notification4ForTemperatureProbe, a1004SystemPowerStatus=a1004SystemPowerStatus, a67PowerUnitRedundancyStatus=a67PowerUnitRedundancyStatus, a202EventMessage=a202EventMessage, a54VoltageLevelLowerThreshold_Critical=a54VoltageLevelLowerThreshold_Critical, a1010GlobalPaging=a1010GlobalPaging, notification3ForSystemSlots=notification3ForSystemSlots, a122PointingDeviceType=a122PointingDeviceType, a91SecuritySettings=a91SecuritySettings, a66OperatingSystemIndex=a66OperatingSystemIndex, a6ProcessorType=a6ProcessorType, tProcessorTraps=tProcessorTraps, a66OperatingSystemBootDeviceIndex=a66OperatingSystemBootDeviceIndex, a128BusId=a128BusId, a64ContainerSecurityStatus=a64ContainerSecurityStatus, a116EventType=a116EventType, a30WarrantyDuration=a30WarrantyDuration, a1004EventLoggingCapability=a1004EventLoggingCapability, a17PowerUnitIndex=a17PowerUnitIndex, a19SlotThermalRating=a19SlotThermalRating, a113EventStateKey=a113EventStateKey, a55TemperatureProbeMinimum=a55TemperatureProbeMinimum, a55TemperatureUpperThreshold_Critical=a55TemperatureUpperThreshold_Critical, eKeyboard=eKeyboard, notification3ForPhysicalMemory=notification3ForPhysicalMemory, a37Partition=a37Partition, a1004WatchdogTimerIncrement=a1004WatchdogTimerIncrement, a36OperationalGroupIndex=a36OperationalGroupIndex, a19SlotDescription=a19SlotDescription, a36DeviceSet=a36DeviceSet, a50Power_onPasswordStatus=a50Power_onPasswordStatus, tPhysicalMemoryArrayTraps=tPhysicalMemoryArrayTraps, a10SystemCacheWritePolicy=a10SystemCacheWritePolicy, a17InputVoltageRangeSwitching=a17InputVoltageRangeSwitching, a55FruGroupIndex=a55FruGroupIndex, a140EventSeverity=a140EventSeverity, a1005Non_criticalThreshold=a1005Non_criticalThreshold, a31CurrentErrorStatus=a31CurrentErrorStatus, a31MajorErrorCount=a31MajorErrorCount, a54FruGroupIndex=a54FruGroupIndex, eProcessor=eProcessor, a19VppMixedVoltageSupport=a19VppMixedVoltageSupport, a92FruGroupIndex=a92FruGroupIndex, eEventGenerationForProcessor=eEventGenerationForProcessor, a108EventSystem=a108EventSystem, a64OperationalGroupIndex=a64OperationalGroupIndex, a36BankLocator=a36BankLocator, a37MappedRangeStartingAddress=a37MappedRangeStartingAddress, a84VideoBiosVersion=a84VideoBiosVersion, notification5ForTemperatureProbe=notification5ForTemperatureProbe, a100EventSeverity=a100EventSeverity, tVideoBios=tVideoBios, a34LastErrorUpdate=a34LastErrorUpdate, a19CurrentUsage=a19CurrentUsage, a1005MaximumReading=a1005MaximumReading, a17PowerSupplyType=a17PowerSupplyType, a30FruSerialNumber=a30FruSerialNumber, a74ParallelPortIndex=a74ParallelPortIndex, eDpcDiscovery=eDpcDiscovery, a205EventSeverity=a205EventSeverity, a7NumberOfExpansionSlots=a7NumberOfExpansionSlots, a140AssociatedGroup=a140AssociatedGroup, a31DevicePredictedFailureStatus=a31DevicePredictedFailureStatus, a1005SensorAccuracy=a1005SensorAccuracy, notification1ForProcessor=notification1ForProcessor, eLocalPagingConfig=eLocalPagingConfig, notification1ForPowerUnit=notification1ForPowerUnit, a201EventSeverity=a201EventSeverity, a1012EmailToAddress=a1012EmailToAddress, a6ProcessorVersionInformation=a6ProcessorVersionInformation, a1012SmtpServer=a1012SmtpServer, eMemoryDevice=eMemoryDevice, a1005Selfid=a1005Selfid, a17Range1VoltageProbeIndex=a17Range1VoltageProbeIndex, a75IrqUsed=a75IrqUsed, a108AssociatedGroup=a108AssociatedGroup, a74ParallelBaseIoAddress=a74ParallelBaseIoAddress, a34ErrorAddress=a34ErrorAddress, tPowerSupplyTraps=tPowerSupplyTraps, a54OperationalGroupIndex=a54OperationalGroupIndex, a37MemoryDeviceSetId=a37MemoryDeviceSetId, notification2ForVoltageProbe=notification2ForVoltageProbe, tSerialPorts=tSerialPorts, a34NumberOfMemoryDeviceSocketsUsed=a34NumberOfMemoryDeviceSocketsUsed, a55TemperatureProbeResolution=a55TemperatureProbeResolution, a114AssociatedGroup=a114AssociatedGroup, a4BiosEndingAddress=a4BiosEndingAddress, a34ErrorData=a34ErrorData, a55TemperatureUpperThreshold_Non_critica=a55TemperatureUpperThreshold_Non_critica, notification4ForPowerUnit=notification4ForPowerUnit, a1005NominalReading=a1005NominalReading, a2SystemBootupTime=a2SystemBootupTime, a104EventSeverity=a104EventSeverity, a2SystemName=a2SystemName, a81TemperatureProbeIndex=a81TemperatureProbeIndex, a114EventType=a114EventType, a6MaximumSpeed=a6MaximumSpeed, a34ErrorResolution=a34ErrorResolution, a205AssociatedGroup=a205AssociatedGroup, tFieldReplaceableUnit=tFieldReplaceableUnit, a17TotalOutputPower=a17TotalOutputPower, a122FruGroupIndex=a122FruGroupIndex, a66OperatingSystemVersion=a66OperatingSystemVersion, a1005CoolingDeviceType=a1005CoolingDeviceType, DmiDisplaystring=DmiDisplaystring, a36MemoryDeviceTableIndex=a36MemoryDeviceTableIndex, a7FruGroupIndex=a7FruGroupIndex, notification1ForSystemSlots=notification1ForSystemSlots, a54VoltageProbeMinimum=a54VoltageProbeMinimum, a104AssociatedGroup=a104AssociatedGroup, a202AssociatedGroup=a202AssociatedGroup, a75MaximumSpeed=a75MaximumSpeed, ePciHotplugDevice=ePciHotplugDevice, notification7ForTemperatureProbe=notification7ForTemperatureProbe, a64PowerState=a64PowerState, a1012EmailFromAddress=a1012EmailFromAddress, a54VoltageProbeDescription=a54VoltageProbeDescription, a74ConnectorType=a74ConnectorType, tDpcDiscovery=tDpcDiscovery, a100EventMessage=a100EventMessage, a1004ImmediatePowerDown=a1004ImmediatePowerDown, a54VoltageProbeVoltageLevel=a54VoltageProbeVoltageLevel, a122SecuritySettings=a122SecuritySettings, a75OperationalGroupIndex=a75OperationalGroupIndex, notification3ForPowerUnit=notification3ForPowerUnit, a114EventSubsystem=a114EventSubsystem, notification4ForVoltageProbe=notification4ForVoltageProbe, a17PowerSupplyIndex=a17PowerSupplyIndex, a34ErrorDataSize=a34ErrorDataSize, a83ScanMode=a83ScanMode, a31OperationalStatus=a31OperationalStatus, eBusGlobalTable=eBusGlobalTable, a202IsInstanceDataPresent=a202IsInstanceDataPresent, a113EventType=a113EventType, a34MaximumMemoryCapacity=a34MaximumMemoryCapacity, a4BiosRomSize=a4BiosRomSize, a92KeyboardLayout=a92KeyboardLayout, a6ProcessorFamily=a6ProcessorFamily, notification2ForPowerUnit=notification2ForPowerUnit, a75SerialBaseIoAddress=a75SerialBaseIoAddress, a1004TimedResetIncrement=a1004TimedResetIncrement, a17OperationalGroupIndex=a17OperationalGroupIndex, a64ContainerIndex=a64ContainerIndex, a1004SystemPowerCapabilities=a1004SystemPowerCapabilities, a36ErrorOperation=a36ErrorOperation, mapperdmtfGroups=mapperdmtfGroups, a4BiosLoaderVersion=a4BiosLoaderVersion, notification7ForPowerSupply=notification7ForPowerSupply, a54VoltageProbeLocation=a54VoltageProbeLocation, a30DeviceGroupIndex=a30DeviceGroupIndex, a36LastErrorUpdate=a36LastErrorUpdate, a205EventType=a205EventType, a1011RepeatCount=a1011RepeatCount, tOperationalState=tOperationalState, a83MaximumRefreshRate=a83MaximumRefreshRate, a113EventMessage=a113EventMessage, a55TemperatureProbeAccuracy=a55TemperatureProbeAccuracy, notification5ForPowerUnit=notification5ForPowerUnit, a54VoltageProbeAccuracy=a54VoltageProbeAccuracy, eMemoryArrayMappedAddresses=eMemoryArrayMappedAddresses, a122PointingDeviceDriverVersion=a122PointingDeviceDriverVersion, a19ResourceUserId=a19ResourceUserId, a202EventSystem=a202EventSystem, a104IsInstanceDataPresent=a104IsInstanceDataPresent, a100EventStateKey=a100EventStateKey, eVideo=eVideo, a6CurrentSpeed=a6CurrentSpeed, notification1ForVoltageProbe=notification1ForVoltageProbe, notification6ForSystemSlots=notification6ForSystemSlots, a55TemperatureLowerThreshold_Non_recover=a55TemperatureLowerThreshold_Non_recover, eGeneralInformation=eGeneralInformation, a2SystemPrimaryUserName=a2SystemPrimaryUserName, eOperationalState=eOperationalState, a108EventType=a108EventType, tMemoryDeviceMappedAddresses=tMemoryDeviceMappedAddresses, a17Range2InputFrequencyLow=a17Range2InputFrequencyLow, a55TemperatureProbeLocation=a55TemperatureProbeLocation, eEventGenerationForVoltageProbe=eEventGenerationForVoltageProbe, a1004ShutdownOsAndPowerOff=a1004ShutdownOsAndPowerOff, notification8ForPowerSupply=notification8ForPowerSupply, a1004RestoreFactoryDefaults=a1004RestoreFactoryDefaults, a17Range1InputVoltageHigh=a17Range1InputVoltageHigh, a140EventStateKey=a140EventStateKey, a1006RecordLength=a1006RecordLength, tBusGlobalTable=tBusGlobalTable, a75SerialPortSecuritySettings=a75SerialPortSecuritySettings, a84VideoBiosShadowingState=a84VideoBiosShadowingState, ePointingDevice=ePointingDevice, tSystemPowerControls=tSystemPowerControls, tPhysicalExpansionSitesTable=tPhysicalExpansionSitesTable, a1005HighestNormalReading=a1005HighestNormalReading, tEventGenerationForSystemSlots=tEventGenerationForSystemSlots, a1006Selfid=a1006Selfid, a92SecuritySettings=a92SecuritySettings, a116EventStateKey=a116EventStateKey, a54VoltageLevelUpperThreshold_Non_critic=a54VoltageLevelUpperThreshold_Non_critic, a108IsInstanceDataPresent=a108IsInstanceDataPresent, a36ErrorDataSize=a36ErrorDataSize, a64ChassisLockPresent=a64ChassisLockPresent, a140EventSystem=a140EventSystem, notification1ForPhysicalMemory=notification1ForPhysicalMemory, a108IsEventState_based=a108IsEventState_based, eSystemSlots=eSystemSlots, notification6ForTemperatureProbe=notification6ForTemperatureProbe, a64FruGroupIndex=a64FruGroupIndex, a55TemperatureLowerThreshold_Critical=a55TemperatureLowerThreshold_Critical, a1005SensorTolerancePlus=a1005SensorTolerancePlus, a100AssociatedGroup=a100AssociatedGroup, a100IsInstanceDataPresent=a100IsInstanceDataPresent, a35OperationalGroupIndex=a35OperationalGroupIndex, a1005FanUnits=a1005FanUnits, eSystemEventLog=eSystemEventLog, tCoolingUnitGlobalTable=tCoolingUnitGlobalTable, a1011PepString1=a1011PepString1, a92KeyboardType=a92KeyboardType, tTemperatureProbeTraps=tTemperatureProbeTraps, eEmailConfig=eEmailConfig, a55MonitoredTemperatureNormalMinimum=a55MonitoredTemperatureNormalMinimum, eEventGenerationForTemperatureProbe=eEventGenerationForTemperatureProbe, a1004Selfid=a1004Selfid, a1004ResetSystem=a1004ResetSystem, a36VendorSyndrome=a36VendorSyndrome, a91FruGroupIndex=a91FruGroupIndex, products=products, a4BiosStartingAddress=a4BiosStartingAddress, a36ErrorResolution=a36ErrorResolution, a19SlotIndex=a19SlotIndex, tOperatingSystem=tOperatingSystem, tEventGenerationForPhysicalMemory=tEventGenerationForPhysicalMemory, a122PointingDeviceDriverName=a122PointingDeviceDriverName, a201IsInstanceDataPresent=a201IsInstanceDataPresent, a35MappedRangeEndingAddress=a35MappedRangeEndingAddress, eSerialPorts=eSerialPorts, a129ExpansionSiteCurrentlyOccupied=a129ExpansionSiteCurrentlyOccupied, a9000BootServicePartitionFlag=a9000BootServicePartitionFlag, DmiCounter=DmiCounter, tSystemSlots=tSystemSlots, a19SlotPowerState=a19SlotPowerState, a1005CriticalThreshold=a1005CriticalThreshold, tEventGenerationForCoolingSensors=tEventGenerationForCoolingSensors, eTemperatureProbe=eTemperatureProbe, notification3ForVoltageProbe=notification3ForVoltageProbe)
mibBuilder.exportSymbols("INTELCORPORATIONBASEBOARDMAPPER-MIB", a1008DeviceManufacturer=a1008DeviceManufacturer, ePagingConfig=ePagingConfig, eEventGenerationForCoolingDevice=eEventGenerationForCoolingDevice, a50KeyboardPasswordStatus=a50KeyboardPasswordStatus, tPowerSupply=tPowerSupply, a19VirtualSlot=a19VirtualSlot, a34MemoryArrayLocation=a34MemoryArrayLocation, eMemoryDeviceMappedAddresses=eMemoryDeviceMappedAddresses, a1010IssuePaging=a1010IssuePaging, a1005CurrentReading=a1005CurrentReading, a83CurrentNumberOfBitsPerPixel=a83CurrentNumberOfBitsPerPixel, a34NumberOfMemoryDeviceSockets=a34NumberOfMemoryDeviceSockets, eEventGenerationForPowerUnit=eEventGenerationForPowerUnit, tSystemSlotsTraps=tSystemSlotsTraps, dmtfStdMifs=dmtfStdMifs, a114IsEventState_based=a114IsEventState_based, a114EventStateKey=a114EventStateKey, a17InputVoltageCapabilityDescription=a17InputVoltageCapabilityDescription, a140EventSubsystem=a140EventSubsystem, a37MappedRangeEndingAddress=a37MappedRangeEndingAddress, a1008DeviceRevision=a1008DeviceRevision, a54MonitoredVoltageNominalLevel=a54MonitoredVoltageNominalLevel, tMotherboard=tMotherboard, a66OperatingSystemName=a66OperatingSystemName, a54MonitoredVoltageNormalMinimum=a54MonitoredVoltageNormalMinimum, tPhysicalMemoryArray=tPhysicalMemoryArray, eSystemBiosCharacteristics=eSystemBiosCharacteristics, a30FruIndex=a30FruIndex, a6OperationalGroupIndex=a6OperationalGroupIndex, a34ArrayErrorType=a34ArrayErrorType, a55TemperatureProbeTableIndex=a55TemperatureProbeTableIndex, notification2ForTemperatureProbe=notification2ForTemperatureProbe, a19VccMixedVoltageSupport=a19VccMixedVoltageSupport, a75SerialPortIndex=a75SerialPortIndex, a81OperationalGroupIndex=a81OperationalGroupIndex, a55TemperatureUpperThreshold_Non_recover=a55TemperatureUpperThreshold_Non_recover, a34VendorSyndrome=a34VendorSyndrome, a52TimedPower_onAvailable=a52TimedPower_onAvailable, a4BiosVersion=a4BiosVersion, a31AvailabilityStatus=a31AvailabilityStatus, intel=intel, notification6ForVoltageProbe=notification6ForVoltageProbe, a100EventType=a100EventType, tEventGenerationForPowerUnit=tEventGenerationForPowerUnit, a35MappedRangeStartingAddress=a35MappedRangeStartingAddress, a74IrqUsed=a74IrqUsed, a201EventMessage=a201EventMessage, a202EventStateKey=a202EventStateKey, a66OperatingSystemDescription=a66OperatingSystemDescription, eSystemBios=eSystemBios, a64AssetTag=a64AssetTag, a36FormFactor=a36FormFactor, a19SlotType=a19SlotType, a28CoolingUnitStatus=a28CoolingUnitStatus, tKeyboard=tKeyboard, a205IsInstanceDataPresent=a205IsInstanceDataPresent, a83VideoRamMemorySize=a83VideoRamMemorySize, a66OperatingSystemBootDeviceStorageType=a66OperatingSystemBootDeviceStorageType, a54VoltageProbeIndex=a54VoltageProbeIndex, tPciHotplugDevice=tPciHotplugDevice, tPowerUnitGlobalTable=tPowerUnitGlobalTable, DmiInteger=DmiInteger, a1005OperationalGroupIndex=a1005OperationalGroupIndex, tPagingConfig=tPagingConfig, a34OperationalGroupIndex=a34OperationalGroupIndex, tMemoryArrayMappedAddresses=tMemoryArrayMappedAddresses, eCoolingSensors=eCoolingSensors, a1004ResetSystemAfterDelay=a1004ResetSystemAfterDelay, a1004WatchdogTimerResolution=a1004WatchdogTimerResolution, notification3ForProcessor=notification3ForProcessor, notification5ForSystemSlots=notification5ForSystemSlots, a67PowerUnitIndex=a67PowerUnitIndex, ePowerSupply=ePowerSupply, a1006Timestamp=a1006Timestamp, a104EventSubsystem=a104EventSubsystem, a91MouseInterface=a91MouseInterface, a6Status=a6Status, notification2ForSystemSlots=notification2ForSystemSlots, notification2ForPhysicalContainer=notification2ForPhysicalContainer, ePhysicalContainerGlobalTable=ePhysicalContainerGlobalTable, a114IsInstanceDataPresent=a114IsInstanceDataPresent, a66PrimaryOperatingSystem=a66PrimaryOperatingSystem, a1008PciHotplugSlotNumber=a1008PciHotplugSlotNumber, notification2ForPowerSupply=notification2ForPowerSupply, a1010PepStringSize=a1010PepStringSize, a30WarrantyStartDate=a30WarrantyStartDate, a1004ShutdownOs=a1004ShutdownOs, a64ContainerName=a64ContainerName, a122PointingDeviceButtons=a122PointingDeviceButtons, tSystemEventLog=tSystemEventLog, a36TypeDetail=a36TypeDetail, a129VirtualExpansionSite=a129VirtualExpansionSite, a205EventMessage=a205EventMessage, a54VoltageLevelLowerThreshold_Non_critic=a54VoltageLevelLowerThreshold_Non_critic, a205IsEventState_based=a205IsEventState_based, a113EventSeverity=a113EventSeverity, notification7ForVoltageProbe=notification7ForVoltageProbe, a1010DefaultPepString=a1010DefaultPepString, a19SlotWidth=a19SlotWidth, a83VideoMemoryType=a83VideoMemoryType, tPointingDevice=tPointingDevice, a1005CfmRating=a1005CfmRating, a104EventSystem=a104EventSystem, tPowerUnitGlobalTableTraps=tPowerUnitGlobalTableTraps, a50FrontPanelResetStatus=a50FrontPanelResetStatus, a116EventMessage=a116EventMessage, a83CurrentVideoMode=a83CurrentVideoMode, a74ParallelPortSecuritySettings=a74ParallelPortSecuritySettings, a1006RecordType=a1006RecordType, a10FruGroupIndex=a10FruGroupIndex, a9000DpcDialNumberString=a9000DpcDialNumberString, a74LogicalName=a74LogicalName, a81FruGroupIndex=a81FruGroupIndex, a19SlotSwitchStatus=a19SlotSwitchStatus, a64BootupState=a64BootupState, a1004WatchdogUpdateInterval=a1004WatchdogUpdateInterval, a17Range1InputFrequencyLow=a17Range1InputFrequencyLow, a17FruGroupIndex=a17FruGroupIndex, a6ProcessorIndex=a6ProcessorIndex, a17Range2CurrentProbeIndex=a17Range2CurrentProbeIndex, a36MemoryArrayIndex=a36MemoryArrayIndex, a113EventSystem=a113EventSystem, tProcessor=tProcessor, a83OperationalGroupIndex=a83OperationalGroupIndex, a6Level1CacheIndex=a6Level1CacheIndex, eSystemPowerControls=eSystemPowerControls, a1004IssueAHardwareNmi=a1004IssueAHardwareNmi, a91OperationalGroupIndex=a91OperationalGroupIndex, a1004TimedResetResolution=a1004TimedResetResolution, a50AdministratorPasswordStatus=a50AdministratorPasswordStatus, tCoolingSensors=tCoolingSensors, a83VideoIndex=a83VideoIndex)
|
student = ("dora", 10, 110.5) # Student tuple with name, age, height
print("student", student)
print("name", student[0], "age", student[1], "height", student[2]) # access elements in the tuple
print("Number of elements in the tuple", len(student))
# Iterate thru items tuple
for item in student:
print(item)
# concatenate tuple
student = student + (30, 25) # add weight and bmi
print("student", student)
# Repetition with * operator
print("students", student * 2)
# Nested tuple
rectangle = ((10, 0), (10, 5), (15, 0), (15, 20))
print("Rectangle represented on coordinate plane", rectangle)
# Use index operator to get inner elements in the tuple
print("Co-Ordinate", rectangle[0])
print("Access element inside the Co-Ordinate", rectangle[0][0])
# Single element tuple
student = ("dora",)
print("Single element tuple", student)
# Empty tuple
student = ()
print("Empty tuple", student)
# Parentheses is optional
student = "dora", 10, 110.5 # Student tuple with name, age, height
print("student", student)
# Return tuples
def min_max(items):
return min(items), max(items)
print("Mix max of the elements in list", min_max([1, 2, 3, 4, 5]))
# Tuple unpacking
# Tuple unpacking is a destructuring operation, which allows us to unpack data structures into named references.
low, high = min_max([1, 2, 3, 4, 5])
print("low", low, "high", high)
print("Simple swapping")
a, b = "hello", "world"
print(a, b)
# swapping
a, b = b, a
print(a, b)
# Constructing tuple from a list
student = tuple(["dora", 10, 110.5])
print("student tuple from list", student)
# Constructing tuple from a string
student = tuple("dora")
print("student tuple from string", student)
# Membership testing
print("5 in (1,2,3,4,5)", 5 in (1, 2, 3, 4, 5))
print("5 not in (1,2,3,4,5)", 5 not in (1, 2, 3, 4, 5))
|
import starfile
import eulerangles
import numpy as np
import pandas as pd
def star2pose(star_file):
star = starfile.read(star_file)
positions = star['particles'][[f'rlnCoordinate{ax}' for ax in 'XYZ']] \
.to_numpy()
shifts_angstroms = star['particles'][[f'rlnOrigin{ax}Angst' for ax in
'XYZ']].to_numpy()
pixel_sizes = star['particles']['rlnPixelSize'].to_numpy()
shifts = shifts_angstroms / pixel_sizes[:, np.newaxis]
positions -= shifts
eulers = star['particles'][[f'rlnAngle{e}' for e in ('Rot', 'Tilt',
'Psi')]].to_numpy()
orientations = eulerangles.euler2matrix(
eulers,
axes='zyz',
intrinsic=True,
right_handed_rotation=True
).swapaxes(-1, -2)
sources = star['particles']['rlnMicrographName'].to_numpy()
return positions, orientations, sources
def pose2star(poses, micrograph_names, star_file):
eulers = eulerangles.matrix2euler(
poses.orientations.swapaxes(-1, -2),
axes='zyz',
intrinsic=True,
right_handed_rotation=True,
)
star_data = {
'rlnCoordinateX': poses.positions[:, 0],
'rlnCoordinateY': poses.positions[:, 1],
'rlnCoordinateZ': poses.positions[:, 2],
'rlnAngleRot': eulers[:, 0],
'rlnAngleTilt': eulers[:, 1],
'rlnAnglePsi': eulers[:, 2],
'rlnMicrographName': np.asarray(micrograph_names),
}
for k, v in star_data.items():
star_data[k] = v.reshape(-1)
star_df = pd.DataFrame.from_dict(star_data)
starfile.write(star_df, star_file, overwrite=True)
def read_transformations(subparticle_transformations):
transformations = starfile.read(subparticle_transformations)
shifts = transformations[[f'subboxerShift{ax}' for ax in 'XYZ']]
eulers = transformations[[f'subboxerAngle{ax}' for ax in ('Rot', 'Tilt', 'Psi')]]
rotations = eulerangles.euler2matrix(
eulers,
axes='zyz',
intrinsic=True,
right_handed_rotation=True
).swapaxes(-1, -2)
return shifts, rotations
|
"""
Codemonk link: https://www.hackerearth.com/practice/data-structures/trees/binary-search-tree/practice-problems/algorithm/distinct-count/
Given an array A of N integers, classify it as being Good Bad or Average. It is called Good, if it contains exactly X
distinct integers, Bad if it contains less than X distinct integers and Average if it contains more than X distinct
integers.
Input - Output:
First line consists of a single integer T denoting the number of test cases.
First line of each test case consists of two space separated integers denoting N and X.
Second line of each test case consists of N space separated integers denoting the array elements.
Print the required answer for each test case on a new line.
Sample input:
4
4 1
1 4 2 5
4 2
4 2 1 5
4 3
5 2 4 1
4 4
1 2 4 5
Sample Output:
Average
Average
Average
Good
"""
"""
We create a set (the implementation of a set is NOT easy but it depends on binary search trees) from the array and then
we can directly answer the question. The set in Python is unordered, it has only distinct values inside and the
insertion is O(1). The time complexity to create the set is O(N).
We consider the input cases significant.
Final complexity: O(T*N)
"""
t = int(input())
for _ in range(t):
n, x = map(int, input().split())
array = list(map(int, input().split()))
distinct = set(array)
if len(distinct) == x:
print("Good")
elif len(distinct) > x:
print("Average")
else:
print("Bad")
|
import os
import argparse
from sys import platform
import cv2
from yolov3_depth.models import *
from yolov3_depth.utils.datasets import *
from yolov3_depth.utils.utils import *
from yolov3_depth.utils.parse_config import *
class Detector(object):
def __init__(self, opt):
self.opt = opt
self.img_size = self.opt.img_size # (320, 192) or (416, 256) or (608, 352) for (height, width)
self.weights = self.opt.weights
# Initialize
self.device = torch_utils.select_device(device=self.opt.device)
# Initialize model
self.model = Darknet(self.opt.cfg, self.img_size)
# Load weights
self.model.load_state_dict(torch.load(self.weights, map_location=self.device)['model'])
print("Load", self.weights)
# Eval mode
self.model.to(self.device).eval()
torch.backends.cudnn.benchmark = True # set True to speed up constant image size inference
# Get classes and colors
self.classes = load_classes(parse_data_cfg(self.opt.data)['names'])
print(self.classes)
self.classes[3] = 'start'
self.colors = [[random.randint(0, 255) for _ in range(3)] for _ in range(len(self.classes))]
def img_convert(self, img0):
# Padded resize
img = letterbox(img0, new_shape=self.img_size)[0]
# Normalize RGB
img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB
img = np.ascontiguousarray(img, dtype=np.float32) # uint8 to fp16/fp32
img /= 255.0 # 0 - 255 to 0.0 - 1.0
# Get detections
img = torch.from_numpy(img).to(self.device)
if img.ndimension() == 3:
img = img.unsqueeze(0)
return img
#RawImage(NumpyArray) In, TargetInfo Out
def detect(self, img0, showResultImg=True):
# Run inference
t0 = time.time()
img = self.img_convert(img0)
with torch.no_grad():
pred = self.model(img)[0]
# Apply NMS
pred = non_max_suppression(pred, self.opt.conf_thres, self.opt.nms_thres)
result=[]
result_img = img0
# Process detections
for i, det in enumerate(pred): # detections per image
im0 = img0
result_img = img0
# 如果检测出了目标 #bys
if det is not None and len(det):
# Rescale boxes from img_size to im0 size
det[:, :4] = scale_coords(img.shape[2:], det[:, :4], im0.shape).round()
# Write results
for *xyxy, conf, _, cls in det:
pred_depth = float(det[0][6].cpu())
pred_depth = (pred_depth - 0.5) * 1200 + 877.45
result.append(int(cls))
result.append(conf)
result.append(pred_depth)
result.append(xyxy)
pred_depth /= 1000
label = '%s %.2f %.3fm' % (self.classes[int(cls)], conf, pred_depth)
label = '%s %.2f ' % (self.classes[int(cls)], conf)
result_img = plot_one_box(xyxy, im0, label=label, color=self.colors[int(cls)])
if showResultImg:
cv2.imshow("Result", result_img)
# Save results (image with detections)
#print('Done. (%.3fs)' % (time.time() - t0))
return result, result_img
def parse_result(self, result):
target_class = result[0]
target_conf = float(result[1].cpu())
target_depth = result[2]
target_box = result[3]
target_xmid = int((target_box[0] + target_box[2]) / 2)
target_ymid = int((target_box[1] + target_box[3]) / 2)
return target_class, target_conf, target_depth, target_xmid, target_ymid
def args_init(model_type="tinyyolo"):
parser = argparse.ArgumentParser()
if model_type == "tinyyolo":
parser.add_argument('--cfg', type=str, default='yolov3_depth/cfg/yolov3-tinygesture.cfg', help='cfg file path')
elif model_type == "yolo":
parser.add_argument('--cfg', type=str, default='yolov3_depth/cfg/yolov3-gesture.cfg', help='cfg file path')
parser.add_argument('--data', type=str, default='yolov3_depth/data/gesture.data', help='gesture.data file path')
if model_type=="tinyyolo":
parser.add_argument('--weights', type=str, default='yolov3_depth/weights/td957421.pt', help='path to weights file')
elif model_type=="yolo":
parser.add_argument('--weights', type=str, default='yolov3_depth/weights/d819999.pt',
help='path to weights file')
parser.add_argument('--output', type=str, default='output', help='output folder')
parser.add_argument('--img-size', type=int, default=416, help='inference size (pixels)')
parser.add_argument('--conf-thres', type=float, default=0.1, help='object confidence threshold')
parser.add_argument('--nms-thres', type=float, default=0.3, help='iou threshold for non-maximum suppression')
parser.add_argument('--fourcc', type=str, default='mp4v', help='output video codec (verify ffmpeg support)')
parser.add_argument('--device', default='', help='device id (i.e. 0 or 0,1) or cpu')
return parser
if __name__ == '__main__':
TEST_RGB_DIR = "E:/bishe2/YOLOv3-complete-pruning-master (2)/data/images/test"
opt = args_init().parse_args()
detector = Detector(opt)
test_img_list = os.listdir(TEST_RGB_DIR)
for i in range(len(test_img_list)):
test_img_list[i] = os.path.join(TEST_RGB_DIR,test_img_list[i])
pred_depths = []
for test_img_name in test_img_list:
img = cv2.imread(test_img_name)
label = detector.detect(img)
if len(label)!=0:
pred_depths.append(int(label[2]))
else:
pred_depths.append(0)
pred_depths = np.array(pred_depths,dtype=np.int)
np.savetxt("output/test_pred_depths.txt", pred_depths, fmt='%d')
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
__license__ = """
This file is part of GNU FreeFont.
GNU FreeFont is free software: you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your option) any later
version.
GNU FreeFont is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
GNU FreeFont. If not, see <http://www.gnu.org/licenses/>.
"""
__author__ = "Emmanuel Vallois"
__email__ = "vallois@polytech.unice.fr"
__copyright__ = "Copyright 2011 Emmanuel Vallois"
__date__ = "$Date$"
__version__ = "$Revision$"
__doc__ = """
Writes in the file named by the first argument an HTML page comprising a table
for testing arabic characters, their behavior and consistency with presentation
forms.
Runs under normal Python, version 2.7 or above.
Typical usage:
arabic_test.py "Arabic test page.html"
"""
import sys
from codecs import open
from string import Template
from io import StringIO
from unicodedata import normalize, name, unidata_version, decomposition
_module_missing_msg = """Please run
generate_arabic_shaping.py
to generate
arabic_shaping.py"""
try:
from arabic_shaping import joining_type
except:
print( _module_missing_msg, file=sys.stderr)
sys.exit( 1 )
if len(sys.argv) > 1:
outfile = sys.argv[1]
else:
outfile = 'Arabic test page.html'
sys.stdout = open(outfile, 'w', 'utf-8')
def uniname(char):
return name(char, new_names.get(char, "<reserved-{:04X}>".format(ord(char))))
def non_positional_name(char):
return uniname(char).replace(' INITIAL','').replace(' FINAL','').replace(' MEDIAL','').replace(' ISOLATED','').replace(' FORM','')
arabic_ranges = list(range(0x600, 0x61B + 1))
arabic_ranges.extend(range(0x61E, 0x6FF + 1))
arabic_ranges.extend(range(0x750, 0x77F + 1))
arabic_ranges.extend(range(0x8A0, 0x8B1 + 1))
arabic_ranges.extend(range(0x8E4, 0x8FF + 1))
arabic_ranges.extend(range(0xFB50, 0xFBC1 + 1))
arabic_ranges.extend(range(0xFBD3, 0xFD3F + 1))
arabic_ranges.extend(range(0xFD50, 0xFD8F + 1))
arabic_ranges.extend(range(0xFD92, 0xFDC7 + 1))
arabic_ranges.extend(range(0xFDF0, 0xFDFD + 1))
arabic_ranges.extend(range(0xFE70, 0xFE74 + 1))
arabic_ranges.extend(range(0xFE76, 0xFEFC + 1))
unicode61_new_ranges = [0x604, 0x8A0]
unicode61_new_ranges.extend(range(0x8A2, 0x8AC + 1))
unicode61_new_ranges.extend(range(0x8E4, 0x8FE + 1))
unicode62_new_ranges = [0x605, 0x8A1]
unicode62_new_ranges.extend(range(0x8AD, 0x8B1 + 1))
unicode62_new_ranges.append(0x8FF)
new_names = {}
new_names['\u0604'] = 'ARABIC SIGN SAMVAT'
new_names['\u0605'] = 'ARABIC NUMBER MARK ABOVE'
new_names['\u08A0'] = 'ARABIC LETTER BEH WITH SMALL V BELOW'
new_names['\u08A1'] = 'ARABIC LETTER BEH WITH HAMZA ABOVE'
new_names['\u08A2'] = 'ARABIC LETTER JEEM WITH TWO DOTS ABOVE'
new_names['\u08A3'] = 'ARABIC LETTER TAH WITH TWO DOTS ABOVE'
new_names['\u08A4'] = 'ARABIC LETTER FEH WITH DOT BELOW AND THREE DOTS ABOVE'
new_names['\u08A5'] = 'ARABIC LETTER QAF WITH DOT BELOW'
new_names['\u08A6'] = 'ARABIC LETTER LAM WITH DOUBLE BAR'
new_names['\u08A7'] = 'ARABIC LETTER MEEM WITH THREE DOTS ABOVE'
new_names['\u08A8'] = 'ARABIC LETTER YEH WITH TWO DOTS BELOW AND HAMZA ABOVE'
new_names['\u08A9'] = 'ARABIC LETTER YEH WITH TWO DOTS BELOW AND DOT ABOVE'
new_names['\u08AA'] = 'ARABIC LETTER REH WITH LOOP'
new_names['\u08AB'] = 'ARABIC LETTER WAW WITH DOT WITHIN'
new_names['\u08AC'] = 'ARABIC LETTER ROHINGYA YEH'
new_names['\u08E4'] = 'ARABIC CURLY FATHA'
new_names['\u08E5'] = 'ARABIC CURLY DAMMA'
new_names['\u08E6'] = 'ARABIC CURLY KASRA'
new_names['\u08E7'] = 'ARABIC CURLY FATHATAN'
new_names['\u08E8'] = 'ARABIC CURLY DAMMATAN'
new_names['\u08E9'] = 'ARABIC CURLY KASRATAN'
new_names['\u08EA'] = 'ARABIC TONE ONE DOT ABOVE'
new_names['\u08EB'] = 'ARABIC TONE TWO DOTS ABOVE'
new_names['\u08EC'] = 'ARABIC TONE LOOP ABOVE'
new_names['\u08ED'] = 'ARABIC TONE ONE DOT BELOW'
new_names['\u08EE'] = 'ARABIC TONE TWO DOTS BELOW'
new_names['\u08EF'] = 'ARABIC TONE LOOP BELOW'
new_names['\u08F0'] = 'ARABIC OPEN FATHATAN'
new_names['\u08F1'] = 'ARABIC OPEN DAMMATAN'
new_names['\u08F2'] = 'ARABIC OPEN KASRATAN'
new_names['\u08F3'] = 'ARABIC SMALL HIGH WAW'
new_names['\u08F4'] = 'ARABIC FATHA WITH RING'
new_names['\u08F5'] = 'ARABIC FATHA WITH DOT ABOVE'
new_names['\u08F6'] = 'ARABIC KASRA WITH DOT BELOW'
new_names['\u08F7'] = 'ARABIC LEFT ARROWHEAD ABOVE'
new_names['\u08F8'] = 'ARABIC RIGHT ARROWHEAD ABOVE'
new_names['\u08F9'] = 'ARABIC LEFT ARROWHEAD BELOW'
new_names['\u08FA'] = 'ARABIC RIGHT ARROWHEAD BELOW'
new_names['\u08FB'] = 'ARABIC DOUBLE RIGHT ARROWHEAD ABOVE'
new_names['\u08FC'] = 'ARABIC DOUBLE RIGHT ARROWHEAD ABOVE WITH DOT'
new_names['\u08FD'] = 'ARABIC RIGHT ARROWHEAD ABOVE WITH DOT'
new_names['\u08FE'] = 'ARABIC DAMMA WITH DOT'
new_names['\u08AD'] = 'ARABIC LETTER LOW ALEF'
new_names['\u08AE'] = 'ARABIC LETTER DAL WITH THREE DOTS BELOW'
new_names['\u08AF'] = 'ARABIC LETTER SAD WITH THREE DOTS BELOW'
new_names['\u08B0'] = 'ARABIC LETTER GAF WITH INVERTED STROKE'
new_names['\u08B1'] = 'ARABIC LETTER STRAIGHT WAW'
new_names['\u08FF'] = 'ARABIC MARK SIDEWAYS NOON GHUNNA'
# Unicode 6.0 additions not present in Python 2.7
new_names['\u0620'] = 'ARABIC LETTER KASHMIRI YEH'
new_names['\u065F'] = 'ARABIC WAVY HAMZA BELOW'
new_names['\uFBB2'] = 'ARABIC SYMBOL DOT ABOVE'
new_names['\uFBB3'] = 'ARABIC SYMBOL DOT BELOW'
new_names['\uFBB4'] = 'ARABIC SYMBOL TWO DOTS ABOVE'
new_names['\uFBB5'] = 'ARABIC SYMBOL TWO DOTS BELOW'
new_names['\uFBB6'] = 'ARABIC SYMBOL THREE DOTS ABOVE'
new_names['\uFBB7'] = 'ARABIC SYMBOL THREE DOTS BELOW'
new_names['\uFBB8'] = 'ARABIC SYMBOL THREE DOTS POINTING DOWNWARDS ABOVE'
new_names['\uFBB9'] = 'ARABIC SYMBOL THREE DOTS POINTING DOWNWARDS BELOW'
new_names['\uFBBA'] = 'ARABIC SYMBOL FOUR DOTS ABOVE'
new_names['\uFBBB'] = 'ARABIC SYMBOL FOUR DOTS BELOW'
new_names['\uFBBC'] = 'ARABIC SYMBOL DOUBLE VERTICAL BAR BELOW'
new_names['\uFBBD'] = 'ARABIC SYMBOL TWO DOTS VERTICALLY ABOVE'
new_names['\uFBBE'] = 'ARABIC SYMBOL TWO DOTS VERTICALLY BELOW'
new_names['\uFBBF'] = 'ARABIC SYMBOL RING'
new_names['\uFBC0'] = 'ARABIC SYMBOL SMALL TAH ABOVE'
new_names['\uFBC1'] = 'ARABIC SYMBOL SMALL TAH BELOW'
'''Class Equiv stores the correspondence between a code point and its NFKC-normalized equivalent,
for usual characters it is the character itself, for decomposable characters it is the compatibility
decompostion.'''
class Equiv:
code_point = 0
compat = 0
def __init__(self, code_point, compat):
self.code_point = code_point
self.compat = compat
def sort_key(self):
return '{:02X}'.format(len(self.compat.lstrip(' '))) + self.compat.lstrip(' ')
def __repr__(self):
return 'Equiv(0x{:04X}, compat={})'.format(self.code_point, self.compat)
equivs = []
for cp in arabic_ranges:
normalized = normalize('NFKC', unichr(cp))
equivs.append(Equiv(cp, normalized))
# Sort our characters by length of the decomposition and by decomposition itself
equivs.sort(key=Equiv.sort_key)
#for e in equivs:
# print(e, file=sys.stderr)
contextual_form_formats = { 'isolat':'{}', 'final>':'‍{}', 'medial':'‍{}‍', 'initia':'{}‍' }
contextual_forms = 'isolat', 'final>', 'medial', 'initia'
current_line = {}
equiv = None
char = None
def store_contextual_form():
# print('store_contextual_form', equiv, file=sys.stderr)
compat_disp = equiv.compat
if equiv.compat[0] == ' ': compat_disp = '\u00A0' + compat_disp[1:]
#nonlocal current_line
form_cells = StringIO()
form = decomposition(char)[1:7]
print('<td class="ch">{}{}</td>'.format(contextual_form_formats.get(form, '{}').format(compat_disp),
'<small><br/>{}</small>'.format(ord_mul(compat_disp)) if len(compat_disp) >=2 else ''), file=form_cells)
print('<td class="ch">{}<small><br />{:04X}</small></td>'.format(char, equiv.code_point), file=form_cells)
#if current_line.get(form, 'not found') != 'not found': print('collision', current_line[form].rstrip(), equiv, file=stderr)
current_line[form] = form_cells.getvalue()
form_cells.close()
table_head = '''
<table frame="box" rules="rows">
{}
<colgroup><col/><col/></colgroup>
<colgroup id="characterCols"><col span="2"/><col span="2"/><col span="2"/><col span="2"/></colgroup>
<tr>
<th rowspan="2">General<br />Unicode</th>
<th rowspan="2">Name</th>
<th colspan="8">Contextual Forms</th>
</tr>
<tr><th>Isolated</th><th>Isolated (compat)</th><th>Final</th><th>Final (compat)</th>
<th>Medial</th><th>Medial (compat)</th><th>Initial</th><th>Initial (compat)</th></tr>'''
def print_table():
global current_line, char
def end_line():
for form in contextual_forms:
print(current_line.get(form, '<td colspan="2"></td>').rstrip())
print('</tr>')
current_line.clear()
def print_equiv(equiv):
# print('print_equiv', equiv, file=sys.stderr)
cp = equiv.code_point
char = unichr(cp)
print('<tr{}><td>{}</td>'.format(' class="nextVersion"' if cp in unicode61_new_ranges else ' class="furtherFuture"' if cp in unicode62_new_ranges else '',
'compat' if len(equiv.compat.replace(' ', '')) > 1 else '{:04X}'.format(ord(equiv.compat.lstrip()[0]))))
print('<td>{}</td>'.format(non_positional_name(char)))
if equiv.compat.replace(' ', '') == char: # character is not a decomposable character, or is a standalone combining mark (decomposable to space + combining mark)
i = 0
for form in contextual_forms:
print('<td class="ch">{}</td><td></td>'.format(contextual_form_formats[form].format(char)))
i += 1
if { 'T':'isolat', 'U':'isolat', 'C':'isolat', 'R':'final>', 'D':'' }[joining_type(cp)] == form:
break
if i < 4:
print('<td colspan="{}"></td>'.format((4 - i) * 2))
print('</tr>')
else:
end_line()
print(table_head.format(caption))
last_equiv = None
global equiv
for equiv in equivs:
char = unichr(equiv.code_point)
if last_equiv:
#special case FC03 because there is one set of plain YEH WITH HAMZA ABOVE WITH ALEF MAKSURA and one of 'uighur kirghiz' compatibility ligatures
if equiv.compat.lstrip() == last_equiv.compat.lstrip() and equiv.code_point != 0xFC03:
store_contextual_form()
else:
print_equiv(last_equiv)
if equiv.compat != char:
store_contextual_form()
last_equiv = equiv
print_equiv(last_equiv)
print('</table>')
def ord_mul(s):
code_points = ''
for c in s:
code_points += '{:X} '.format(ord(c))
return code_points[:-1]
html_heading = Template('''<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html;charset=utf-8"/>
<title>$title</title>
<style type="text/css">
.captionSquare { float: left; width: 2em; height: 1em; margin-right: 0.5em }
caption { width: 60em; text-align: left }
table { text-align: center; font-family: FreeSerif }
td { padding: 10px }
small { font-size: small }
#characterCols { border-left: medium double black; border-right: medium double black }
.nextVersion { background-color: #CCFF99 }
.furtherFuture { background-color: #FFFFCC }
.name { width: 10em }
.ch { vertical-align: baseline; line-height: 75%; font-size: 250%; width: 1em; direction: rtl }
.empty { background:#EEEEEE }
</style>
</head>
<body>
<h1>$title</h1>
<p>Choose the font to test: <select onchange="changefont(this)"><option>FreeSerif</option><option>FreeSerif, bold</option><option>FreeMono</option></select></p>
<script type="text/javascript">//<![CDATA[
function changefont(select) {
var font = select.options.item(select.selectedIndex).value.split(', ');
var bold = font.length > 1 ? font[1] == 'bold' : false;
font = font[0];
var elementsToStyle = document.getElementsByClassName("ch");
for (i = 0; i < elementsToStyle.length; i++) {
elementsToStyle[i].style.fontFamily = font;
elementsToStyle[i].style.fontWeight = bold ? 'bold' : 'normal';
}
}//]]></script>''')
caption='''<caption><span class="captionSquare nextVersion"> </span> New characters in Unicode 6.1, which will be published in February 2012.
These can be relied upon and will not change or be removed. See <a href="http://www.unicode.org/Public/6.1.0/charts/blocks//U08A0.pdf">the
Unicode chart for the new block <b>Arabic Extended-A</b></a>, and for more about these characters, see <a href="http://std.dkuug.dk/JTC1/SC2/WG2/docs/n3734.pdf">N3734</a>
for U+0604, <a href="http://std.dkuug.dk/JTC1/SC2/WG2/docs/n3882.pdf">the complete
proposal</a> for most characters, <a href="http://std.dkuug.dk/JTC1/SC2/WG2/docs/n3791.pdf">N3791</a> for U+08F0-U+08F3.<br/>
<span class="captionSquare furtherFuture"> </span> Future new characters in Unicode 6.2. These can will probably be standardized this way,
but could in principle still change or be removed. See <a href="http://std.dkuug.dk/JTC1/SC2/WG2/docs/n3990.pdf">N3990, in 4.2 Orthography</a> for U+0605,
<a href="http://std.dkuug.dk/JTC1/SC2/WG2/docs/n4072.pdf">N4072 proposal</a> about U+08AD-U+08B1, and
<a href="http://std.dkuug.dk/JTC1/SC2/WG2/docs/n3989.pdf">N3989 proposal</a> about U+08FF.</caption>'''
def print_arabic_test_page():
print(html_heading.substitute(title='Test for Unicode Arabic range'))
print_table()
print('</body>')
print('</html>')
print_arabic_test_page()
|
#!flask/bin/python
###############################################################################
# Training Service
# Handles requests for training sessions
#
# Copyright (c) 2017-2019 Joshua Burt
###############################################################################
###############################################################################
# Dependencies
###############################################################################
from flask import Flask, jsonify, request, make_response, abort
from flask_cors import CORS, cross_origin
import logging
import uuid
import pika
import json
import os
import errno
from dicebox.config.dicebox_config import DiceboxConfig
# Config
config_file='./dicebox.config'
CONFIG = DiceboxConfig(config_file)
###############################################################################
# Allows for easy directory structure creation
# https://stackoverflow.com/questions/273192/how-can-i-create-a-directory-if-it-does-not-exist
###############################################################################
def make_sure_path_exists(path):
try:
if os.path.exists(path) is False:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
###############################################################################
# Setup logging.
###############################################################################
make_sure_path_exists(CONFIG.LOGS_DIR)
logging.basicConfig(
format='%(asctime)s - %(levelname)s - %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p',
level=logging.DEBUG,
filemode='w',
filename="%s/trainingservice.%s.log" % (CONFIG.LOGS_DIR, os.uname()[1])
)
###############################################################################
# Create the flask, and cors config
###############################################################################
app = Flask(__name__)
cors = CORS(app, resources={r"/api/*": {"origins": "http://localhost:*"}})
###############################################################################
# Handles submission of the actual training request to the message system
###############################################################################
def train_request():
training_request_id = uuid.uuid4()
try:
## Submit our message
url = CONFIG.TRAINING_SERVICE_RABBITMQ_URL
logging.debug(url)
parameters = pika.URLParameters(url)
connection = pika.BlockingConnection(parameters=parameters)
channel = connection.channel()
channel.queue_declare(queue=CONFIG.TRAINING_SERVICE_RABBITMQ_TRAIN_REQUEST_TASK_QUEUE, durable=True)
training_request = {}
training_request['training_request_id'] = str(training_request_id)
channel.basic_publish(exchange=CONFIG.TRAINING_SERVICE_RABBITMQ_EXCHANGE,
routing_key=CONFIG.TRAINING_SERVICE_RABBITMQ_TRAINING_REQUEST_ROUTING_KEY,
body=json.dumps(training_request),
properties=pika.BasicProperties(
delivery_mode=2, # make message persistent
))
logging.debug(" [x] Sent %r" % json.dumps(training_request))
except:
# something went wrong..
training_request_id = None
logging.error('we had a failure sending the request to the message system')
finally:
connection.close()
return training_request_id
###############################################################################
# Accepts requests for async training sessions
###############################################################################
@app.route('/api/train/request', methods=['GET'])
def make_api_train_request_public():
if request.headers['API-ACCESS-KEY'] != CONFIG.API_ACCESS_KEY:
logging.debug('bad access key')
abort(403)
if request.headers['API-VERSION'] != CONFIG.API_VERSION:
logging.debug('bad access version')
abort(400)
training_request_id = train_request()
return make_response(jsonify({'training_request_id': training_request_id}), 202)
###############################################################################
# Returns API version
###############################################################################
@app.route('/api/version', methods=['GET'])
def make_api_version_public():
return make_response(jsonify({'version': str(CONFIG.API_VERSION)}), 200)
###############################################################################
# Super generic health end-point
###############################################################################
@app.route('/health/plain', methods=['GET'])
@cross_origin()
def make_health_plain_public():
return make_response('true', 200)
###############################################################################
# 404 Handler
###############################################################################
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify({'error': 'Not found'}), 404)
###############################################################################
# main entry point, thread safe
###############################################################################
if __name__ == '__main__':
logging.debug('starting flask app')
app.run(debug=CONFIG.FLASK_DEBUG, host=CONFIG.LISTENING_HOST, threaded=True)
|
import random
import unittest
import igraph
from circulo.metrics import VertexCoverMetric
class TestMetrics(unittest.TestCase):
def setUp(self):
self.G=igraph.load("karate.gml")
membership=[
[0,1,2,3,7,11,12,13,17,19,21],
[4,5,6,10,16],
[8,9,14,15,18,20,22,23,24,25,26,27,28,29,30,31,32,33]]
cover=igraph.VertexCover(self.G, membership)
metrics=VertexCoverMetric.run_analysis(cover, weights=None)
metrics.report()
self.comm_metrics=metrics.comm_metrics
def test_density(self):
self.assertEqual(round(.4181818, 2), round(self.comm_metrics[0].density, 2))
self.assertEqual(round(.6, 2), round(self.comm_metrics[1].density,2))
self.assertEqual(round(.22875817, 2), round(self.comm_metrics[2].density,2))
def test_avgdegree(self):
self.assertEqual(round(4.181818182, 2), round(self.comm_metrics[0].degree_avg,2))
self.assertEqual(round(2.4, 2), round(self.comm_metrics[1].degree_avg,2))
self.assertEqual(round(3.8888889,2), round(self.comm_metrics[2].degree_avg,2))
def test_FOMD(self):
self.assertEqual(round(0.545454545,2), round(self.comm_metrics[0].fomd, 2))
self.assertEqual(round(0, 2), round(self.comm_metrics[1].fomd, 2))
self.assertEqual(round(0.277777778,2), round(self.comm_metrics[2].fomd,2))
def test_expansion(self):
self.assertEqual(round(1.272727, 2), round(self.comm_metrics[0].degree_boundary_avg, 2))
self.assertEqual(round(0.8, 2), round(self.comm_metrics[1].degree_boundary_avg, 2))
self.assertEqual(round(0.555556, 2), round(self.comm_metrics[2].degree_boundary_avg,2))
def test_cutratio(self):
self.assertEqual(round(.05534, 2), round(self.comm_metrics[0].cut_ratio, 2))
self.assertEqual(round(.02759, 2), round(self.comm_metrics[1].cut_ratio, 2))
self.assertEqual(round(.03472, 2), round(self.comm_metrics[2].cut_ratio, 2))
def test_conductance(self):
self.assertEqual(round(0.2333333,2), round(self.comm_metrics[0].conductance,2))
self.assertEqual(round(0.25,2), round(self.comm_metrics[1].conductance,2))
self.assertEqual(round(0.125,2), round(self.comm_metrics[2].conductance,2))
def test_normalizedcut(self):
self.assertEqual(round(0.346236559,2), round(self.comm_metrics[0].normalized_cut,2))
self.assertEqual(round(0.277027027,2), round(self.comm_metrics[1].normalized_cut,2))
self.assertEqual(round(0.229166667, 2), round(self.comm_metrics[2].normalized_cut,2))
def test_TPR(self):
self.assertEqual(round(0.9091, 2), round(self.comm_metrics[0].tpr[1], 2))
self.assertEqual(round(0.6, 2), round(self.comm_metrics[1].tpr[1], 2))
self.assertEqual(round(0.9444, 2), round(self.comm_metrics[2].tpr[1], 2))
def test_MaxODF(self):
self.assertEqual(round(0.5,2), round(self.comm_metrics[0].odf_dict["max"], 2))
self.assertEqual(round(0.3333333,2), round(self.comm_metrics[1].odf_dict["max"], 2))
self.assertEqual(round(0.5, 2), round(self.comm_metrics[2].odf_dict["max"], 2))
def test_avgODF(self):
self.assertEqual(round(0.138131313,2), round(self.comm_metrics[0].odf_dict["average"], 2))
self.assertEqual(round(0.233333333,2), round(self.comm_metrics[1].odf_dict["average"], 2))
self.assertEqual(round(0.117592593, 2), round(self.comm_metrics[2].odf_dict["average"], 2))
def test_FlakeODF(self):
self.assertEqual(round(0, 2), round(self.comm_metrics[0].odf_dict["flake"], 2))
self.assertEqual(round(0, 2), round(self.comm_metrics[1].odf_dict["flake"], 2))
self.assertEqual(round(0, 2), round(self.comm_metrics[2].odf_dict["flake"], 2))
def test_separability(self):
self.assertEqual(round(1.6428571,2), round(self.comm_metrics[0].separability, 2))
self.assertEqual(round(1.5, 2), round(self.comm_metrics[1].separability, 2))
self.assertEqual(round(3.5, 2), round(self.comm_metrics[2].separability, 2))
def test_clusteringcoefficient(self):
self.assertEqual(round(0.72503608, 2), round(self.comm_metrics[0].clustering_coefficient, 2))
self.assertEqual(round(0.66666667, 2), round(self.comm_metrics[1].clustering_coefficient, 2))
self.assertEqual(round(0.72045177, 2), round(self.comm_metrics[2].clustering_coefficient, 2))
if __name__ == '__main__' :
unittest.main()
|
#!/usr/bin/env python
import numpy as np
import tensorflow as tf
import tensorflow.keras as keras
import matplotlib.pyplot as plt
import my_layers
def plot_sample(imgs_g):
for i in range(25):
plt.subplot(5, 5, i+1)
plt.imshow(imgs_g[i])
plt.tick_params(bottom=False, left=False, labelbottom=False, labelleft=False)
plt.show()
(x_train, y_train), (x_test, y_test) = keras.datasets.cifar10.load_data()
x_train = x_train.astype(np.float32) / 255.0
# Reflect
input_node = keras.layers.Input(shape=x_train.shape[1:])
pad = my_layers.Padding2D(5, "reflect")(input_node)
model = keras.models.Model(inputs=input_node, outputs=pad)
model.summary()
x_reflect = model.predict(x_train)
plot_sample(x_reflect)
# Symmetric
input_node = keras.layers.Input(shape=x_train.shape[1:])
pad = my_layers.Padding2D(5, "symmetric")(input_node)
model = keras.models.Model(inputs=input_node, outputs=pad)
model.summary()
x_symmetric = model.predict(x_train)
plot_sample(x_symmetric)
# Replicate
input_node = keras.layers.Input(shape=x_train.shape[1:])
pad = my_layers.Padding2D(1, "symmetric")(input_node)
for i in range(4):
pad = my_layers.Padding2D(1, "symmetric")(pad)
model = keras.models.Model(inputs=input_node, outputs=pad)
model.summary()
x_replicate = model.predict(x_train)
plot_sample(x_replicate)
|
import datetime
import json
import os
import pandas as pd
from params import *
import requests
url_api = 'https://www.data.gouv.fr/api/1/datasets/5e5d89c28b4c410f100c3242/resources/'
resource_XLSX = '352b3cea-21d7-4cfc-87d3-dddab11b4021'
resource_CSV = 'bab27c3e-5620-47b2-8ed8-797c8192d905'
efs_collection = 'https://api.efs.sante.fr/carto-api/v2/SamplingCollection/'
efs_location = 'https://api.efs.sante.fr/carto-api/v2/SamplingLocation/'
efs_user = '&UserLatitude=48.85&UserLongitude=2.35&Limit=100000'
def add_prefix(dictionnary, prefix):
return {prefix + key: value for key, value in dictionnary.items()}
def build_database():
collections = []
response = requests.get(efs_location + 'SearchNearPoint?CenterLatitude=48.85&CenterLongitude=2.35&DiameterLatitude=180&DiameterLongitude=360' + efs_user)
if response.status_code != 200:
return None
for location in response.json():
response = requests.get(efs_collection + 'SearchByFileNumber?FileNumber=' + str(location['id']) + efs_user)
print(str(location['id']) + ' > ' + str(response.status_code))
if response.status_code == 200:
result = response.json()
location = result.copy()
del location['collections']
del location['distance']
location = add_prefix(location, 'location_')
for collection in result['collections']:
collection['date'] = collection['date'][0:10]
collection = add_prefix(collection, 'collection_')
row = location.copy()
row.update(collection)
collections.append(row)
return pd.DataFrame(collections)
def upload_file(local_name, resource_id):
print('Uploading file')
headers = {
'X-API-KEY': X_API_KEY
}
response = requests.post(url_api + resource_id + '/upload/', files={'file': open(local_name, 'rb')}, headers=headers)
print('Uploaded file')
print('Uploading metadata')
headers = {
'Content-Type': 'application/json',
'X-API-KEY': X_API_KEY
}
old_data = response.json()
data = {
'published': old_data['last_modified']
}
response = requests.put(url_api + resource_id + '/', data=json.dumps(data), headers=headers)
print('Uploaded metadata')
def save_and_upload(database):
# Save a local copy with date for historical bookkeeping
today = datetime.date.today().strftime('%Y%m%d')
database.to_csv('collections' + today + '.csv', index=False, encoding='UTF-8')
with pd.ExcelWriter('collections' + today + '.xlsx') as writer:
database.to_excel(writer, sheet_name='collections', index=False)
# Upload to server with constant name
database.to_csv('collections.csv', index=False, encoding='UTF-8')
with pd.ExcelWriter('collections.xlsx') as writer:
database.to_excel(writer, sheet_name='collections', index=False)
upload_file('collections.csv', resource_CSV)
upload_file('collections.xlsx', resource_XLSX)
print('Downloading data')
db = build_database()
if db is None:
print('Failed. Main request returned error code.')
else:
if db.shape[0] < 10:
print('Failed. Very few collections.')
save_and_upload(db)
print('Done')
|
from tcf_connector.worker_registry_jrpc_impl import WorkerRegistryJRPCImpl
import logging
import unittest
import toml
from os import path, environ
import errno
import secrets
import json
from utils.tcf_types import WorkerType, WorkerStatus
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s', level=logging.INFO)
class TestWorkerRegistryJRPCImpl(unittest.TestCase):
def __init__(self, config_file):
super(TestWorkerRegistryJRPCImpl, self).__init__()
if not path.isfile(config_file):
raise FileNotFoundError("File not found at path: {0}".format(path.realpath(config_file)))
try:
with open(config_file) as fd:
self.__config = toml.load(fd)
except IOError as e:
if e.errno != errno.ENOENT:
raise Exception('Could not open config file: %s',e)
self.__worker_registry_wrapper = WorkerRegistryJRPCImpl(self.__config)
self.__worker_id = secrets.token_hex(32)
self.__worker_type = WorkerType.TEE_SGX
self.__org_id = secrets.token_hex(32)
self.__details = json.dumps({"workOrderSyncUri":"http://worker-order:8008".encode("utf-8").hex()})
self.__app_ids = [secrets.token_hex(32),secrets.token_hex(32)]
def test_worker_register(self):
req_id = 12
logging.info('Calling test_worker_register with\n worker_id %s\n worker_type %d\n details %s\n org_id %s\n app_ids %s\n',
self.__worker_id, self.__worker_type.value, self.__details,self.__org_id, self.__app_ids)
res = self.__worker_registry_wrapper.worker_register(self.__worker_id, self.__worker_type,
self.__org_id, self.__app_ids, self.__details, req_id)
logging.info('Result: %s\n', res)
self.assertEqual(res['id'], req_id, "test_worker_registry Response id doesn't match")
self.assertEqual(res['error']['code'], 0, "WorkerRegistry Response error code doesn't match")
self.assertEqual(res['error']['message'], 'Successfully Registered', "WorkerRegistry Response error message doesn't match")
def test_worker_update(self):
req_id = 13
self.__details = json.dumps(
{
"workOrderSyncUri": "http://worker-order:8008".encode("utf-8").hex(),
"workOrderNotifyUri": "http://worker-notify:8008".encode("utf-8").hex()
})
logging.info('Calling testworker_update with\n worker_id %s\n details %s\n',
self.__worker_id, self.__details)
res = self.__worker_registry_wrapper.worker_update(self.__worker_id, self.__details, req_id)
logging.info('Result: %s\n', res)
self.assertEqual(res['id'], req_id, "worker_update Response id doesn't match")
self.assertEqual(res['error']['code'], 0, "worker_update Response error code doesn't match")
self.assertEqual(res['error']['message'], "Successfully Updated", "worker_update Response error message doesn't match")
def test_worker_set_status(self):
req_id = 14
self.__status = WorkerStatus.OFF_LINE
logging.info('Calling testworker_set_status with\n worker_id %s\n status %d\n',
self.__worker_id, self.__status.value)
res = self.__worker_registry_wrapper.worker_set_status(self.__worker_id, self.__status, req_id)
logging.info('Result: %s\n', res)
self.assertEqual(res['id'], req_id, "worker_set_status Response id doesn't match")
self.assertEqual(res['error']['code'], 0, "worker_set_status Response error code doesn't match")
self.assertEqual(res['error']['message'], "Successfully Set Status", "worker_set_status Response error message doesn't match")
def test_worker_retrieve(self):
req_id = 15
logging.info('Calling testworker_retrieve with\n worker_id %s\n',
self.__worker_id)
res = self.__worker_registry_wrapper.worker_retrieve(self.__worker_id, req_id)
logging.info('Result: %s\n', res)
self.assertEqual(res['id'], req_id, "worker_retrieve Response id doesn't match")
self.assertEqual(res['result']['workerType'], self.__worker_type.value, "worker_retrieve Response result workerType doesn't match")
self.assertEqual(res['result']['organizationId'], self.__org_id, "worker_retrieve Response result organizationId doesn't match")
self.assertEqual(res['result']['applicationTypeId'][0], self.__app_ids[0], "worker_retrieve Response result applicationTypeId[0] doesn't match")
self.assertEqual(res['result']['applicationTypeId'][1], self.__app_ids[1], "worker_retrieve Response result applicationTypeId[1] doesn't match")
self.assertEqual(res['result']['details'], self.__details, "worker_retrieve Response result details doesn't match")
self.assertEqual(res['result']['status'], self.__status.value, "worker_retrieve Response result status doesn't match")
def test_worker_lookup(self):
req_id = 16
logging.info('Calling testworker_lookup with\n worker type %d\n org_id %s\n application ids %s\n',
self.__worker_type.value, self.__org_id, self.__app_ids)
res = self.__worker_registry_wrapper.worker_lookup(self.__worker_type, self.__org_id, self.__app_ids, req_id)
logging.info('Result: %s\n', res)
self.assertEqual(res['id'], req_id, "worker_lookup Response id doesn't match")
self.assertEqual(res['result']['totalCount'], 1, "worker_lookup Response totalCount doesn't match")
self.assertEqual(res['result']['lookupTag'], self.__worker_id, "worker_lookup Response lookup tag doesn't match")
self.assertEqual(res['result']['ids'][0], self.__worker_id, "worker_lookup Response worker id doesn't match")
def test_worker_lookup_next(self):
req_id = 17
logging.info('Calling worker_lookup_next with\n worker type %d\n org_id %s\n app_ids %s\n lookUpTag %s\n',
self.__worker_type.value,self.__org_id, self.__app_ids, "sample tag")
res = self.__worker_registry_wrapper.worker_lookup_next(self.__worker_type, self.__org_id, self.__app_ids, "sample tag", req_id)
logging.info('Result: %s\n', res)
self.assertEqual(res['id'], req_id, "worker_lookup_next Response id doesn't match")
"""
self.assertEqual(res['result']['totalCount'], 0, "worker_lookup_next Response totalCount doesn't match")
self.assertEqual(res['result']['lookupTag'], '', "worker_lookup_next Response lookup tag doesn't match")
self.assertEqual(res['result']['ids'][0], '0x0000a3', "worker_lookup_next Response worker id doesn't match")
"""
def main():
logging.info("Running test cases...\n")
tcf_home = environ.get("TCF_HOME", "../../")
test = TestWorkerRegistryJRPCImpl(tcf_home + "/common/tcf_connector/" + "tcf_connector.toml")
test.test_worker_register()
test.test_worker_update()
test.test_worker_set_status()
test.test_worker_retrieve()
test.test_worker_lookup()
test.test_worker_lookup_next()
if __name__ == '__main__':
main()
|
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
from pyramid.renderers import JSONP
class GeoJSON(JSONP):
def __call__(self, info):
def _render(value, system):
request = system.get('request')
# Response should have appropriate Content-Type
response = request.response
ct = response.content_type
if ct == response.default_content_type:
response.content_type = 'application/vnd.geo+json'
# Inspect model definition
geom_fields = {}
model_id = request.matchdict.get('model_id')
if model_id:
definition = request.db.get_model_definition(model_id)
if definition:
geom_fields = self._geomFields(definition)
# Transform records into GeoJSON feature collection
records = value.get('records')
if records is not None:
geojson = dict(type='FeatureCollection', features=[])
for record in records:
feature = self._buildFeature(geom_fields, record)
geojson['features'].append(feature)
value = geojson
jsonp = super(GeoJSON, self).__call__(info)
return jsonp(value, system)
return _render
def _geomFields(self, definition):
"""Returns mapping between definition field names and geometry types
"""
# Supported geometry types
mapping = {'point': 'Point',
'line': 'Linestring',
'polygon': 'Polygon'}
geom_types = ['geojson'] + list(mapping.keys())
# Gather all geometry fields for this definition
geom_fields = []
for field in definition['fields']:
if field['type'] in geom_types:
geom_fields.append((field['name'],
mapping.get(field['type'],
field['type'])))
return OrderedDict(geom_fields)
def _buildFeature(self, geom_fields, record):
"""Return GeoJSON feature (properties + geometry(ies))
"""
feature = dict(type='Feature')
feature['id'] = record.pop('id', None)
first = True
for name, geomtype in geom_fields.items():
if geomtype == 'geojson':
geometry = record.pop(name)
else:
# Note for future: this won't work for GeometryCollection
coords = record.pop(name)
geometry = dict(type=geomtype, coordinates=coords)
name = 'geometry' if first else name
feature[name] = geometry
first = False
feature['properties'] = record
return feature
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import torch
import ignite
def pixelwise_accuracy(num_classes, output_transform=lambda x: x, device=None):
"""Calculates class accuracy
Args:
num_classes (int): number of classes
output_transform (callable, optional): a callable that is used to transform the
output into the form expected by the metric.
Returns:
MetricsLambda
"""
cm = ignite.metrics.ConfusionMatrix(num_classes=num_classes, output_transform=output_transform, device=device)
# Increase floating point precision and pass to CPU
cm = cm.type(torch.DoubleTensor)
pix_cls = ignite.metrics.confusion_matrix.cmAccuracy(cm)
return pix_cls
def class_accuracy(num_classes, output_transform=lambda x: x, device=None):
"""Calculates class accuracy
Args:
num_classes (int): number of classes
output_transform (callable, optional): a callable that is used to transform the
output into the form expected by the metric.
Returns:
MetricsLambda
"""
cm = ignite.metrics.ConfusionMatrix(num_classes=num_classes, output_transform=output_transform, device=device)
# Increase floating point precision and pass to CPU
cm = cm.type(torch.DoubleTensor)
acc_cls = cm.diag() / (cm.sum(dim=1) + 1e-15)
return acc_cls
def mean_class_accuracy(num_classes, output_transform=lambda x: x, device=None):
"""Calculates mean class accuracy
Args:
num_classes (int): number of classes
output_transform (callable, optional): a callable that is used to transform the
output into the form expected by the metric.
Returns:
MetricsLambda
"""
return class_accuracy(num_classes=num_classes, output_transform=output_transform, device=device).mean()
def class_iou(num_classes, output_transform=lambda x: x, device=None, ignore_index=None):
"""Calculates per-class intersection-over-union
Args:
num_classes (int): number of classes
output_transform (callable, optional): a callable that is used to transform the
output into the form expected by the metric.
Returns:
MetricsLambda
"""
cm = ignite.metrics.ConfusionMatrix(num_classes=num_classes, output_transform=output_transform, device=device)
return ignite.metrics.IoU(cm, ignore_index=ignore_index)
def mean_iou(num_classes, output_transform=lambda x: x, device=None, ignore_index=None):
"""Calculates mean intersection-over-union
Args:
num_classes (int): number of classes
output_transform (callable, optional): a callable that is used to transform the
output into the form expected by the metric.
Returns:
MetricsLambda
"""
cm = ignite.metrics.ConfusionMatrix(num_classes=num_classes, output_transform=output_transform, device=device)
return ignite.metrics.mIoU(cm, ignore_index=ignore_index)
|
import json
import re
import scrapy
from locations.items import GeojsonPointItem
class StateFarmSpider(scrapy.Spider):
name = "statefarm"
item_attributes = { 'brand': "State Farm" }
allowed_domains = ["statefarm.com"]
download_delay = 0.2
start_urls = [
'https://www.statefarm.com/agent/us',
]
def parse_location(self, response):
name = response.xpath('//*[@id="AgentNameLabelId"]//span[@itemprop="name"]/text()').extract_first()
if name:
name += ' - State Farm Insurance Agent'
lat = response.xpath('//*[@id="agentOfficePrimaryLocLat"]/@value').extract_first()
lon = response.xpath('//*[@id="agentOfficePrimaryLocLong"]/@value').extract_first()
properties = {
'ref': "_".join(response.url.split('/')[-3:]),
'name': name,
'addr_full': response.xpath('normalize-space(//div[@itemtype="http://schema.org/PostalAddress"]//span[@id="locStreetContent_mainLocContent"]/text())').extract_first(),
'city': response.xpath('//div[@itemtype="http://schema.org/PostalAddress"]/div[2]/span/span[1]/text()').extract_first().strip(', '),
'state': response.xpath('//div[@itemtype="http://schema.org/PostalAddress"]/div[2]/span/span[2]/text()').extract_first(),
'postcode': response.xpath('//div[@itemtype="http://schema.org/PostalAddress"]/div[2]/span/span[3]/text()').extract_first(),
'phone': response.xpath('normalize-space(//span[@id="offNumber_mainLocContent"]/span/text())').extract_first(),
'lat': float(lat) if lat else None,
'lon': float(lon) if lon else None,
'website': response.url,
}
yield GeojsonPointItem(**properties)
def parse(self, response):
agents = response.xpath('//div[contains(@id, "agent-details")]')
# agent_sites = response.xpath('//a[contains(text(), "Visit agent site")]/@href').extract()
if agents:
for agent in agents:
agent_site = agent.xpath('.//a[contains(text(), "Visit agent site")]/@href').extract_first()
if not agent_site:
raise Exception('no agent site found')
yield scrapy.Request(response.urljoin(agent_site), callback=self.parse_location)
else:
urls = response.xpath('//li/div/a/@href').extract()
for url in urls:
yield scrapy.Request(response.urljoin(url))
|
import getpass
try:
p=getpass.getpass()
except Exception as err:
print('ERROR', err)
else:
print('You Enter', p)
|
# -*- coding: utf-8 -*-
import sys
import re
import requests
import netaddr
import json
from collections import OrderedDict
from ipamcli.libs.phpipam import exception
VLANS = {'22': {'name': 'smev-vipnet-vlan', 'prefix': '10.32.250.0/24'},
'23': {'name': 'uek-vlan', 'prefix': '10.38.33.72/29'},
'24': {'name': 'lpu-vlan', 'prefix': '10.33.80.0/24'},
'25': {'name': 'obr-vipnet-vlan', 'prefix': '91.227.95.96/29'},
'26': {'name': 'minzdrav-vipnet-vlan', 'prefix': '91.227.95.112/29'},
'27': {'name': 'crpsec-vipnet-vlan', 'prefix': '10.32.252.0/24'},
'28': {'name': 'crprmt-vipnet-vlan', 'prefix': '10.33.76.0/24'},
'29': {'name': 'sooz-vipnet-vlan', 'prefix': '10.32.253.0/24'},
'33': {'name': 'dragnet-vipnet-vlan', 'prefix': '10.33.72.0/24'},
'34': {'name': 'zastava-vipnet-vlan', 'prefix': '10.11.26.0/24'},
'54': {'name': 'users-vlan', 'prefix': '172.17.0.0/16'},
'55': {'name': 'voip-vlan', 'prefix': '10.32.8.0/22'},
'100': {'name': 'dmz-rt-vlan', 'prefix': '91.227.94.0/25'},
'102': {'name': 'yb-vpn-vlan', 'prefix': '10.32.14.116/30'},
'201': {'name': 'dmz-vlan', 'prefix': '91.227.93.0/24'},
'203': {'name': 'mgmt-vlan', 'prefix': '10.33.16.0/20'},
'205': {'name': 'infr-vlan', 'prefix': '10.33.64.0/23'},
'206': {'name': 'is-vlan', 'prefix': '10.33.68.0/22'},
'213': {'name': 'azk-vlan', 'prefix': '10.33.66.0/24'},
'219': {'name': 'saperion-vlan', 'prefix': '10.32.14.112/30'},
'221': {'name': 'yb-mgmt-vlan', 'prefix': '10.46.77.0/24'},
'222': {'name': 'vks-vlan', 'prefix': '91.227.94.144/28'},
'223': {'name': 'nessus-vlan', 'prefix': '91.227.94.144/28'},
'224': {'name': 'ex-vlan', 'prefix': '10.33.60.0/24'},
'226': {'name': 'jkh-vlan', 'prefix': '10.33.7.32/27'},
'897': {'name': 'jdoc-db-vlan', 'prefix': '10.38.200.0/24'}
}
def in_dictlist(key, value, my_dictlist):
for this in my_dictlist:
if this[key] == value:
return this
return {}
def checkIP(ip):
a = ip.split('.')
if len(a) != 4:
return False
for x in a:
if not x.isdigit():
return False
i = int(x)
if i < 0 or i > 255:
return False
return netaddr.IPAddress(ip)
def checkMAC(mac):
if re.match('[0-9a-f]{2}([-:])[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$',
mac.lower()):
return True
else:
return False
def get_token(ctx):
try:
r = requests.post('{}/user/'.format(ctx.url),
auth=(ctx.username, ctx.password))
except Exception:
ctx.logerr('Oops. HTTP API error occured.')
return
if r.status_code == 500:
ctx.logerr(r.json()['message'])
sys.exit(1)
elif r.status_code == 200 and r.json():
return r.json()['data']['token']
else:
return None
def get_network_mask_by_subnet(subnet):
try:
netmask = str(netaddr.IPNetwork(subnet).netmask)
except Exception:
return None
return netmask
def get_subnet_id(ctx, network):
try:
r = requests.get('{}/subnets/cidr/{}'.format(ctx.url, str(network)),
headers={'token': ctx.token})
except Exception:
ctx.logerr('Oops. HTTP API error occured.')
return None
if r.status_code == 401:
ctx.logerr('Wrong authorization token.')
return None
elif r.status_code == 200 and r.json():
network_id = r.json()['data'][0]['id']
return network_id
def get_subnet_by_id(ctx, subnetId):
try:
r = requests.get('{}/subnets/{}/'.format(ctx.url, str(subnetId)),
headers={'token': ctx.token})
except Exception:
ctx.logerr('Oops. HTTP API error occured.')
return None
if r.status_code == 401:
ctx.logerr('Wrong authorization token.')
return None
elif r.status_code == 200 and r.json():
subnet = r.json()['data']
return subnet
def get_subnet_by_ip(ctx, ip):
try:
r = requests.get('{}/subnets/overlapping/{}/32'.format(ctx.url, str(ip)),
headers={'token': ctx.token})
except Exception:
ctx.logerr('Oops. HTTP API error occured.')
return None
if r.status_code == 401:
ctx.logerr('Wrong authorization token.')
return None
elif r.status_code == 200 and r.json():
subnet = r.json()['data'][0]
return subnet
def get_addresses_from_subnet(ctx, subnet_id):
try:
r = requests.get('{}/subnets/{}/addresses/'.format(ctx.url, subnet_id),
headers={'token': ctx.token})
except Exception:
ctx.logerr('Oops. HTTP API error occured.')
return None
if r.status_code == 200 and r.json():
return r.json()['data']
else:
return None
def get_network_prefix_by_subnet(subnet):
try:
prefix = str(netaddr.IPNetwork(subnet).prefixlen)
except Exception:
return None
return prefix
def show_network_addresses(ctx, network, free_only, verbose):
network_id = get_subnet_id(ctx, network)
if network_id:
network_set = netaddr.IPSet(network)
else:
if verbose:
ctx.logerr('Subnet %s not found.', str(network))
return False
addresses = get_addresses_from_subnet(ctx, network_id)
result = []
network_set.remove(network.network)
network_set.remove(network.broadcast)
for item in network_set:
phpipam_ip_info = in_dictlist('ip', str(item), addresses)
if phpipam_ip_info:
if not free_only:
if phpipam_ip_info['description']:
phpipam_ip_info['description'] = phpipam_ip_info['description'].replace('\r\n', ' ')[:95]
result.append(OrderedDict([
('Address', phpipam_ip_info['ip']),
('MAC', phpipam_ip_info['mac']),
('FQDN', phpipam_ip_info['hostname']),
('Description', phpipam_ip_info['description']),
('Task', phpipam_ip_info['custom_NOC_TT'])
]))
else:
result.append(OrderedDict([
('Address', str(item)),
('MAC', ''),
('FQDN', ''),
('Description', 'free'),
('Task', '')
]))
return result
def search_by_ip(ctx, ip):
try:
r = requests.get('{}/addresses/search/{}/'.format(ctx.url, ip),
headers={'token': ctx.token})
except Exception:
ctx.logerr('Oops. HTTP API error occured.')
return None
if r.status_code == 200 and r.json() and r.json()['success']:
return r.json()['data']
else:
return None
def search_by_id(ctx, id):
try:
r = requests.get('{}/addresses/{}/'.format(ctx.url, id),
headers={'token': ctx.token})
except Exception:
ctx.logerr('Oops. HTTP API error occured.')
return None
if r.status_code == 200 and r.json() and r.json()['success']:
return r.json()['data']
else:
return None
def remove_by_id(ctx, id):
try:
r = requests.delete('{}/addresses/{}/'.format(ctx.url, id),
headers={'token': ctx.token})
except Exception:
ctx.logerr('Oops. HTTP API error occured.')
return None
if r.status_code == 200 and r.json() and r.json()['success']:
return True
else:
return None
def search_by_hostname(ctx, hostname):
try:
r = requests.get('{}/addresses/search_hostname/{}/'.format(ctx.url, hostname),
headers={'token': ctx.token})
except Exception:
ctx.logerr('Oops. HTTP API error occured.')
return None
if r.status_code == 200 and r.json() and r.json()['success']:
return r.json()['data']
else:
return None
def get_first_empty(ctx, network):
network_id = get_subnet_id(ctx, network)
if network_id is None:
return None
try:
r = requests.get('{}/subnets/{}/first_free/'.format(ctx.url, network_id),
headers={'token': ctx.token})
except Exception:
ctx.logerr('Oops. HTTP API error occured.')
return None
if r.status_code == 200 and r.json():
ip = r.json()['data']
return ip
else:
return None
def get_last_empty(ctx, network):
network_id = get_subnet_id(ctx, network)
if network_id:
network_set = netaddr.IPSet(network)
else:
return None
addresses = get_addresses_from_subnet(ctx, network_id)
result = []
network_set = netaddr.IPSet(network)
network_set.remove(network.network)
network_set.remove(network.broadcast)
for item in network_set:
ip_entry_exist = in_dictlist('ip', str(item), addresses)
if not ip_entry_exist:
result.append(item)
return result[-1]
def add_address(ctx, payload):
try:
r = requests.post('{}/addresses/'.format(ctx.url),
headers={'Content-type': 'application/json', 'token': ctx.token},
data=json.dumps(payload))
except Exception:
ctx.logerr('Oops. HTTP API error occured.')
return None
if r.status_code == 201 and r.json() and r.json()['success']:
return r.json()['id']
elif r.status_code == 409 and r.json() and r.json()['message'] == 'IP address already exists':
raise exception.ipamCLIIPExists
else:
return None
def edit_address(ctx, id, payload):
try:
r = requests.patch('{}/addresses/{}/'.format(ctx.url, id),
headers={'Content-type': 'application/json', 'token': ctx.token},
data=json.dumps(payload))
except Exception:
ctx.logerr('Oops. HTTP API error occured.')
return None
if r.status_code == 200 and r.json() and r.json()['success']:
return True
else:
return None
|
import os
# Playing music on mac will not work
mac = False
# os.name == "posix"
|
import amass
class Command(amass.commands.Command):
is_command = False
def __init__(self):
amass.commands.Command.__init__(self)
self.file = __file__
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.