blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4cba5aaf29add8565a672b7eacd28035fad23519 | 543286f4fdefe79bd149ff6e103a2ea5049f2cf4 | /Exercicios&cursos/eXcript/Aula 10 - Gerenciador de leiaute pack.py | 729776a1ddf43603ea188c61d45cdb88de9d361c | [] | no_license | antonioleitebr1968/Estudos-e-Projetos-Python | fdb0d332cc4f12634b75984bf019ecb314193cc6 | 9c9b20f1c6eabb086b60e3ba1b58132552a84ea6 | refs/heads/master | 2022-04-01T20:03:12.906373 | 2020-02-13T16:20:51 | 2020-02-13T16:20:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 431 | py | from tkinter import *
janela = Tk()
lb1 = Label(janela, text='label1', bg='green')
lb2 = Label(janela, text='label2', bg='red')
lb3 = Label(janela, text='label3', bg='yellow')
lb4 = Label(janela, text='label4', bg='blue')
lb2.pack()
lb1.pack()
lb3.pack()
lb4.pack(side=BOTTOM)
janela.geometry('400x300+200+200')
janela.mainloop()
#Nota: Propriedade SIDE
#TOP == TOPO
#LEFT == ESQUERDA
#RIGHT == DIREITA
#BOTTOM == INFERIOR | [
"progmatheusmorais@gmail.com"
] | progmatheusmorais@gmail.com |
42f3e50b0b2eb2a7ded8a894750027dba4d4896a | e71b6d14fbdbc57c7234ca45a47329d7d02fc6f7 | /flask_api/venv/lib/python3.7/site-packages/vsts/npm/v4_1/models/upstream_source_info.py | 21e55cba9de401017666da42d7fc8f6db31cd757 | [] | no_license | u-blavins/secret_sasquatch_society | c36993c738ab29a6a4879bfbeb78a5803f4f2a57 | 0214eadcdfa9b40254e331a6617c50b422212f4c | refs/heads/master | 2020-08-14T00:39:52.948272 | 2020-01-22T13:54:58 | 2020-01-22T13:54:58 | 215,058,646 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,359 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class UpstreamSourceInfo(Model):
"""UpstreamSourceInfo.
:param id:
:type id: str
:param location:
:type location: str
:param name:
:type name: str
:param source_type:
:type source_type: object
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'source_type': {'key': 'sourceType', 'type': 'object'}
}
def __init__(self, id=None, location=None, name=None, source_type=None):
super(UpstreamSourceInfo, self).__init__()
self.id = id
self.location = location
self.name = name
self.source_type = source_type
| [
"usama.blavins1@gmail.com"
] | usama.blavins1@gmail.com |
c599f1033e7359f995f370ee51498cc0dd147973 | d308fffe3db53b034132fb1ea6242a509f966630 | /pirates/leveleditor/worldData/port_royal_interior_mansion.py | 16b263e14a218f8734b4996a10c0aa6f949ece9a | [
"BSD-3-Clause"
] | permissive | rasheelprogrammer/pirates | 83caac204965b77a1b9c630426588faa01a13391 | 6ca1e7d571c670b0d976f65e608235707b5737e3 | refs/heads/master | 2020-03-18T20:03:28.687123 | 2018-05-28T18:05:25 | 2018-05-28T18:05:25 | 135,193,362 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,474 | py | # uncompyle6 version 3.2.0
# Python bytecode 2.4 (62061)
# Decompiled from: Python 2.7.14 (v2.7.14:84471935ed, Sep 16 2017, 20:19:30) [MSC v.1500 32 bit (Intel)]
# Embedded file name: pirates.leveleditor.worldData.port_royal_interior_mansion
from pandac.PandaModules import Point3, VBase3
objectStruct = {'Interact Links': [], 'Objects': {'1168033330.17kmuller0': {'Type': 'Building Interior', 'Name': 'port_royal_interior_mansion', 'AdditionalData': ['interior_mansion'], 'Instanced': True, 'Objects': {'1171325040.86MAsaduzz': {'Type': 'Townsperson', 'Category': 'Cast', 'AnimSet': 'sit_write', 'CustomModel': 'models/char/es_2000', 'Hpr': VBase3(-27.528, 0.0, 0.0), 'Pos': Point3(-0.101, -8.086, -0.028), 'Private Status': 'All', 'Respawns': True, 'Scale': VBase3(1.0, 1.0, 1.0), 'Start State': 'Idle', 'Team': 'Villager'}, '1176937728.0dxschafe': {'Type': 'Interactive Prop', 'Hpr': VBase3(179.866, 0.0, 0.0), 'Objects': {}, 'Pos': Point3(0.274, -9.202, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/chair_fancy'}, 'interactAble': 'npc', 'interactType': 'sit_write'}}, 'Visual': {'Model': 'models/buildings/interior_mansion_gov'}}}, 'Node Links': [], 'Layers': {}, 'ObjectIds': {'1168033330.17kmuller0': '["Objects"]["1168033330.17kmuller0"]', '1171325040.86MAsaduzz': '["Objects"]["1168033330.17kmuller0"]["Objects"]["1171325040.86MAsaduzz"]', '1176937728.0dxschafe': '["Objects"]["1168033330.17kmuller0"]["Objects"]["1176937728.0dxschafe"]'}} | [
"33942724+itsyaboyrocket@users.noreply.github.com"
] | 33942724+itsyaboyrocket@users.noreply.github.com |
ef588dbdd970cdf3978c52e3416432ac5a91bdc6 | c6e22a6901bc40ba92a0470c6323929368727bbb | /src/virtual_tour/serializers.py | 636293e43ed900ce3f0403d8d5f1ce5f2aa622aa | [] | no_license | iamgaddiel/learners_coner | bdc47c7caac9898ca3a8836f1ad972afa9f88cf8 | fb3ea68de8c02d1f1db6177b7c267a743a0b5a32 | refs/heads/main | 2023-08-04T06:12:08.728355 | 2021-09-13T04:24:42 | 2021-09-13T04:24:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 202 | py | from rest_framework import serializers
from .models import VirtualTour
class VirtualTourSerializer(serializers.ModelSerializer):
class Meta:
model = VirtualTour
fields = '__all__'
| [
"gaddiel@localhost.localdomain"
] | gaddiel@localhost.localdomain |
ce2e26a49d8a3dcfc7b90e49e4999c830e6b0d4f | 0f835a836f1885f8802db10813603862f8aabf7c | /src/todo/urls.py | 778e7ed416a3458d65f6e5c66a824e6dabaf776c | [] | no_license | yordan-marinov/su_dj_basics | 80eecd75630599f4458c36af16366d547d215d00 | 24cbe5b291c495cbb77a650766fd5689ba191451 | refs/heads/main | 2023-06-06T07:25:50.297776 | 2021-06-22T08:21:20 | 2021-06-22T08:21:20 | 370,289,231 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 332 | py | from django.urls import path
from .views import todo_create, todo_delete, todo_edit, todo_list
urlpatterns = [
path("", todo_list, name="todo_list"),
path("create/", todo_create, name="todo_create"),
path("edit/<int:pk>/", todo_edit, name="todo_edit"),
path("delete/<int:pk>/", todo_delete, name="todo_delete"),
]
| [
"jordanmarinov8@gmail.com"
] | jordanmarinov8@gmail.com |
3500ef3acf09023a8b49b10527ae01d5437a8ccc | 5a1f77b71892745656ec9a47e58a078a49eb787f | /1_Kithgard_Dungeon/052-Kithgard_Gates/kithgard_gates.py | 57be0f04eaee30b7cb49cb7d20dd33a2803f9efa | [
"MIT"
] | permissive | ripssr/Code-Combat | 78776e7e67c033d131e699dfeffb72ca09fd798e | fbda1ac0ae4a2e2cbfce21492a2caec8098f1bef | refs/heads/master | 2020-06-11T20:17:59.817187 | 2019-07-21T09:46:04 | 2019-07-21T09:46:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 174 | py | # You need the Elemental codex 1+ to cast "Haste"
# You need the Emperor's gloves to cast "Chain Lightning"
hero.cast("haste", hero);
hero.moveDown(0.7)
hero.moveRight(2.5)
| [
"katik.hello@gmail.com"
] | katik.hello@gmail.com |
7851674a43efd44e592eebdb0548527abaec7298 | 51f887286aa3bd2c3dbe4c616ad306ce08976441 | /pybind/slxos/v17r_1_01a/interface/ethernet/link_oam_interface/remote_failure/link_fault/__init__.py | e4db3e5baa84ab0f2cd42f54cb3ae5914099cac4 | [
"Apache-2.0"
] | permissive | b2220333/pybind | a8c06460fd66a97a78c243bf144488eb88d7732a | 44c467e71b2b425be63867aba6e6fa28b2cfe7fb | refs/heads/master | 2020-03-18T09:09:29.574226 | 2018-04-03T20:09:50 | 2018-04-03T20:09:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,033 | py |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
class link_fault(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-interface - based on the path /interface/ethernet/link-oam-interface/remote-failure/link-fault. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__link_fault_action',)
_yang_name = 'link-fault'
_rest_name = 'link-fault'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__link_fault_action = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'block-interface': {'value': 1}},), is_leaf=True, yang_name="link-fault-action", rest_name="action", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configures an action for the event', u'cli-full-no': None, u'alt-name': u'action'}}, namespace='urn:brocade.com:mgmt:brocade-dot3ah', defining_module='brocade-dot3ah', yang_type='action-type', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'interface', u'ethernet', u'link-oam-interface', u'remote-failure', u'link-fault']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'interface', u'Ethernet', u'link-oam', u'remote-failure', u'link-fault']
def _get_link_fault_action(self):
"""
Getter method for link_fault_action, mapped from YANG variable /interface/ethernet/link_oam_interface/remote_failure/link_fault/link_fault_action (action-type)
"""
return self.__link_fault_action
def _set_link_fault_action(self, v, load=False):
"""
Setter method for link_fault_action, mapped from YANG variable /interface/ethernet/link_oam_interface/remote_failure/link_fault/link_fault_action (action-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_link_fault_action is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_link_fault_action() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'block-interface': {'value': 1}},), is_leaf=True, yang_name="link-fault-action", rest_name="action", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configures an action for the event', u'cli-full-no': None, u'alt-name': u'action'}}, namespace='urn:brocade.com:mgmt:brocade-dot3ah', defining_module='brocade-dot3ah', yang_type='action-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """link_fault_action must be of a type compatible with action-type""",
'defined-type': "brocade-dot3ah:action-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'block-interface': {'value': 1}},), is_leaf=True, yang_name="link-fault-action", rest_name="action", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configures an action for the event', u'cli-full-no': None, u'alt-name': u'action'}}, namespace='urn:brocade.com:mgmt:brocade-dot3ah', defining_module='brocade-dot3ah', yang_type='action-type', is_config=True)""",
})
self.__link_fault_action = t
if hasattr(self, '_set'):
self._set()
def _unset_link_fault_action(self):
self.__link_fault_action = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'block-interface': {'value': 1}},), is_leaf=True, yang_name="link-fault-action", rest_name="action", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configures an action for the event', u'cli-full-no': None, u'alt-name': u'action'}}, namespace='urn:brocade.com:mgmt:brocade-dot3ah', defining_module='brocade-dot3ah', yang_type='action-type', is_config=True)
link_fault_action = __builtin__.property(_get_link_fault_action, _set_link_fault_action)
_pyangbind_elements = {'link_fault_action': link_fault_action, }
| [
"badaniya@brocade.com"
] | badaniya@brocade.com |
2f515b8a5dbe71e724bfe160a4d7abc563bd68f7 | 3d96cee3f0c986c7195e7677d85e91dc837d8dd4 | /venv/bin/pip3 | 4b76198db216e8fbf6aa3d14a970e17ee8167061 | [] | no_license | dannycrief/full-stack-web-dev-couse | 7faffe1c9e6c39baf03d6ee54f716e4f8b4c8733 | 0b22bc84742d8e78bd6a2e03adfbc44137f3d607 | refs/heads/master | 2023-01-12T09:25:16.378035 | 2021-03-21T16:51:18 | 2021-03-21T16:51:18 | 220,825,261 | 0 | 1 | null | 2023-01-05T12:57:14 | 2019-11-10T17:34:02 | Python | UTF-8 | Python | false | false | 272 | #!/home/skozurak/Projects/full-stack-web-dev-couse/venv/bin/python3.8
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"step.kozbvb@gmail.com"
] | step.kozbvb@gmail.com | |
60ae92e25652dac7f0639f7d268ef2bde0015f14 | 723a0ff7e88b5c64bc712be8faa7721c40a9be92 | /torch_chemistry/datasets/utils.py | 57cdea4c70245f8240305a7413b23ce0bdc5e732 | [
"MIT"
] | permissive | wengelearning/pytorch_chemistry | b9b3e328dadf7efa9c6304c15dea0cabbfedac50 | 14ca01ab2a30728016ce6c6793f119438a09ade5 | refs/heads/master | 2022-04-05T11:54:32.773305 | 2020-03-01T07:38:22 | 2020-03-01T07:38:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,461 | py | import os
import shutil
from pathlib import Path
from zipfile import ZipFile
from typing import List
import requests
import torch
import torch.nn.functional as F
from ..utils import to_Path
def check_download_file_size(url: str) -> int:
res = requests.head(url)
size = res.headers['content-length']
return int(size)
def check_local_file_size(filename: str) -> int:
p = to_Path(filename)
info = os.stat(p)
return info.st_size
def download(url: str = '', filename: str = '', savedir: str ='.') -> int:
savefile = to_Path(savedir) / filename
if not savefile.exists():
with requests.get(url, stream=True) as r:
with open(savefile, 'wb') as f:
shutil.copyfileobj(r.raw, f)
return savefile
def extract_zipfile(zfilename: str, extractdir: str ='.') -> List[str]:
with ZipFile(zfilename) as zipfile:
zipfile.extractall(extractdir)
namelist = zipfile.namelist()
return namelist
def to_sparse(x: torch.tensor, max_size: int = None):
""" ref: https://discuss.pytorch.org/t/how-to-convert-a-dense-matrix-to-a-sparse-one/7809 """
""" converts dense tensor x to sparse format """
x_typename = torch.typename(x).split('.')[-1]
sparse_tensortype = getattr(torch.sparse, x_typename)
indices = torch.nonzero(x)
if len(indices.shape) == 0: # if all elements are zeros
return sparse_tensortype(*x.shape)
indices = indices.t()
values = x[tuple(indices[i] for i in range(indices.shape[0]))]
if max_size is None:
return sparse_tensortype(indices, values, x.size())
else:
return sparse_tensortype(indices, values, (max_size, max_size))
def get_mol_edge_index(mol, edge_types: dict):
row, col, bond_idx = [], [], []
for bond in mol.GetBonds():
start, end = bond.GetBeginAtomIdx(), bond.GetEndAtomIdx()
row += [start, end]
col += [end, start]
bond_idx += 2 * [edge_types[bond.GetBondType()]]
edge_index = torch.tensor([row, col], dtype=torch.long)
edge_attr = F.one_hot(torch.tensor(bond_idx).long(),
num_classes=len(edge_types)).to(torch.long)
return edge_index, edge_attr
def to_one_hot(x: torch.tensor, n_classes: int):
length = len(x)
if length < n_classes:
_x = torch.arange(length)
out = torch.zeros(length, n_classes)
out[_x, x] = 1
return out
return torch.eye(length, n_classes)[x, :]
| [
"kbu94982@gmail.com"
] | kbu94982@gmail.com |
e8810fc163644bf1273a6347c50f9e787114be94 | 0e1e643e864bcb96cf06f14f4cb559b034e114d0 | /Exps_7_v3/doc3d/I_to_M_Gk3_no_pad/pyr_Tcrop255_pad20_jit15/Sob_k31_s001/pyr_2s/L4/step10_a.py | daedb9d78096064ed214f053553e979ac509716f | [] | no_license | KongBOy/kong_model2 | 33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307 | 1af20b168ffccf0d5293a393a40a9fa9519410b2 | refs/heads/master | 2022-10-14T03:09:22.543998 | 2022-10-06T11:33:42 | 2022-10-06T11:33:42 | 242,080,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,549 | py | #############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
code_dir = "\\".join(code_exe_path_element[:-1])
kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
sys.path.append(code_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" code_dir:", code_dir)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
#############################################################################################################################################################################################################
kong_to_py_layer = len(code_exe_path_element) - 1 - kong_layer ### 中間 -1 是為了長度轉index
# print(" kong_to_py_layer:", kong_to_py_layer)
if (kong_to_py_layer == 0): template_dir = ""
elif(kong_to_py_layer == 2): template_dir = code_exe_path_element[kong_layer + 1][0:] ### [7:] 是為了去掉 step1x_, 後來覺得好像改有意義的名字不去掉也行所以 改 0
elif(kong_to_py_layer == 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] ### [5:] 是為了去掉 mask_ ,前面的 mask_ 是為了python 的 module 不能 數字開頭, 隨便加的這樣子, 後來覺得 自動排的順序也可以接受, 所以 改0
elif(kong_to_py_layer > 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] + "/" + "/".join(code_exe_path_element[kong_layer + 3: -1])
# print(" template_dir:", template_dir) ### 舉例: template_dir: 7_mask_unet/5_os_book_and_paper_have_dtd_hdr_mix_bg_tv_s04_mae
#############################################################################################################################################################################################################
exp_dir = template_dir
#############################################################################################################################################################################################################
from step06_a_datas_obj import *
from step09_2side_L4 import *
from step10_a2_loss_info_obj import *
from step10_b2_exp_builder import Exp_builder
rm_paths = [path for path in sys.path if code_dir in path]
for rm_path in rm_paths: sys.path.remove(rm_path)
rm_moduless = [module for module in sys.modules if "step09" in module]
for rm_module in rm_moduless: del sys.modules[rm_module]
#############################################################################################################################################################################################################
'''
exp_dir 是 決定 result_dir 的 "上一層"資料夾 名字喔! exp_dir要巢狀也沒問題~
比如:exp_dir = "6_mask_unet/自己命的名字",那 result_dir 就都在:
6_mask_unet/自己命的名字/result_a
6_mask_unet/自己命的名字/result_b
6_mask_unet/自己命的名字/...
'''
use_db_obj = type8_blender_kong_doc3d_in_I_gt_MC
use_loss_obj = [G_sobel_k31_loss_info_builder.set_loss_target("UNet_Mask").copy()] ### z, y, x 順序是看 step07_b_0b_Multi_UNet 來對應的喔
#############################################################
### 為了resul_analyze畫空白的圖,建一個empty的 Exp_builder
empty = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_1__2side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_1__2side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="為了resul_analyze畫空白的圖,建一個empty的 Exp_builder")
#############################################################
ch032_1side_1__2side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_1__2side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_1__2side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_2__2side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_2__2side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
#############################################################
if(__name__ == "__main__"):
print("build exps cost time:", time.time() - start_time)
if len(sys.argv) < 2:
############################################################################################################
### 直接按 F5 或打 python step10_b1_exp_obj_load_and_train_and_test.py,後面沒有接東西喔!才不會跑到下面給 step10_b_subprocss.py 用的程式碼~~~
ch032_1side_1__2side_0.build().run()
# print('no argument')
sys.exit()
### 以下是給 step10_b_subprocess.py 用的,相當於cmd打 python step10_b1_exp_obj_load_and_train_and_test.py 某個exp.build().run()
eval(sys.argv[1])
| [
"s89334roy@yahoo.com.tw"
] | s89334roy@yahoo.com.tw |
4e01761c4fbcb7957337a1f92fd2e922997fc09d | 3ca49c828f75d5d4880dbd4b940f93285e505f26 | /resources/icon_src/plot_ot_bx.py | b61d63391cb87a6827508d0a13d7bd36d4fa0726 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT"
] | permissive | liangwang0734/Viscid | fc15c71725f3c55eafe7d91822347f00de1ea6d4 | 41e19fee8576c5e3fa9c758c48731bc25e1db1b9 | refs/heads/master | 2021-08-30T11:32:21.157520 | 2017-06-08T22:58:29 | 2017-06-08T22:58:29 | 48,387,722 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 384 | py | import matplotlib.pyplot as plt
import seaborn as sns
import viscid
from viscid.plot import vpyplot as vlt
f = viscid.load_file('./otico_001.3d.xdmf')
mymap = sns.diverging_palette(28, 240, s=95, l=50, as_cmap=True)
figure = plt.figure(figsize=(14, 10))
g = f.get_grid(time=12)
vlt.plot(g['bx']['z=0'], cmap=mymap, style='contourf', levels=256)
vlt.savefig('OT_bx.png')
plt.show()
| [
"kristofor.maynard@gmail.com"
] | kristofor.maynard@gmail.com |
8a6957b4940d94bee65df73706811ee6ed17110d | 4ae6482f032a0bf185598e1ee607021cae881f7c | /word2vec.py | ced07e8595fa11971641f8138c9a67fd5e2f6265 | [] | no_license | stiero/sick-spence | 074ce903faf0e730b33c3f95f96db27206c5831e | ce47740ab4f63c2fe4ae9871bb7c8f1b4ffaa34a | refs/heads/master | 2021-10-11T16:35:21.356768 | 2019-01-28T17:00:39 | 2019-01-28T17:00:39 | 125,160,717 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,282 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 9 16:31:26 2018
@author: henson
"""
import re
import os
os.chdir("/home/henson/Desktop/conditions")
from gensim.models.word2vec import Word2Vec
from nltk.corpus import stopwords
from nltk import RegexpTokenizer
from nltk.stem.lancaster import LancasterStemmer
stop = set(stopwords.words('english'))
tkr = RegexpTokenizer('[a-zA-Z0-9@]+')
stemmer = LancasterStemmer()
def file_read(file):
fp = open(file, "r")
lines = [tkr.tokenize(line.lower()) for line in fp.readlines()]
#lines = [line for sublist in lines for line in sublist]
#lines = [stemmer.stem(line) for sublist in lines for line in sublist]
#lines = [line for sublist in lines for line in sublist]
#lines = list(set(lines))
regex = re.compile('([^\s\w]|_)+')
cleaned_lines = []
for line in lines:
line_temp = regex.sub('', str(line))
line_temp = tkr.tokenize(line_temp)
#line_temp = line_temp.lower()
cleaned_lines.append(line_temp)
cleaned_lines = list(filter(None, cleaned_lines))
return cleaned_lines
#return lines
file_list = ["fever.txt", "asthma.txt", "chronic_pain.txt", "cold.txt", "cramps.txt",
"depression.txt", "diarrhea.txt", "dizziness.txt", "fatigue.txt",
"headache.txt", "hypertension.txt", "nausea.txt", "rash.txt",
"swelling.txt", "sleepiness.txt"]
full_text = []
for file in file_list:
text = file_read(file)
#print(text)
full_text.append(text)
full_text = [item for sublist in full_text for item in sublist]
vector_size = 500
window_size=5
word2vec = Word2Vec(sentences=full_text,
size=vector_size,
window=window_size,
negative=20,
iter=100,
seed=1000,
)
#Words most similar to fever
word2vec.most_similar("fever")
#Some similarities between conditions
print(word2vec.wv.similarity("fever", "asthma"))
print(word2vec.wv.similarity("fever", "pain"))
print(word2vec.wv.similarity("cold", "fever"))
print(word2vec.wv.similarity("cold", "headache"))
print(word2vec.wv.similarity("fever", "nausea"))
| [
"you@example.com"
] | you@example.com |
79faefd38606f26118f15d3b6254830942092324 | c4c5f22b796269062f618038fdf0158ae7769487 | /python/quick_der/main.pyi | 4cb732180c3b0c851ccdf5c3605021b7b4cc11b2 | [
"BSD-2-Clause"
] | permissive | tonytheodore/quick-der | c5ce28e16c4f1e92b57825d3b083258790172c19 | 1becc1d3286a05b78eee3aa436e607c67ffd34a9 | refs/heads/master | 2021-05-07T13:46:17.488790 | 2017-11-03T15:45:26 | 2017-11-03T15:45:26 | 109,690,235 | 0 | 0 | null | 2017-11-06T12:05:57 | 2017-11-06T12:05:57 | null | UTF-8 | Python | false | false | 5,651 | pyi | # Stubs for quick_der.main (Python 3.6)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
from asn1ate.sema import *
from typing import Any, Optional
class dprint:
enable: bool = ...
def __init__(self, s, *args) -> None: ...
def tosym(name): ...
api_prefix: str
dertag2atomsubclass: Any
class QuickDERgeneric:
outfile: Any = ...
comma1: Any = ...
comma0: Any = ...
def __init__(self, outfn, outext) -> None: ...
def write(self, txt): ...
def writeln(self, txt: str = ...): ...
def newcomma(self, comma, firstcomma: str = ...): ...
def comma(self): ...
def getcomma(self): ...
def setcomma(self, comma1, comma0): ...
def close(self): ...
class QuickDER2c(QuickDERgeneric):
to_be_defined: Any = ...
to_be_overlaid: Any = ...
cursor_offset: Any = ...
nested_typerefs: Any = ...
nested_typecuts: Any = ...
semamod: Any = ...
refmods: Any = ...
overlay_funmap: Any = ...
pack_funmap: Any = ...
psub_funmap: Any = ...
issued_typedefs: Any = ...
def __init__(self, semamod, outfn, refmods) -> None: ...
def generate_head(self): ...
def generate_tail(self): ...
def generate_overlay(self): ...
def generate_pack(self): ...
def generate_psub(self): ...
def generate_psub_sub(self, node, subquads, tp, fld): ...
def generate_overlay_node(self, node, tp, fld): ...
def generate_pack_node(self, node, **kwargs): ...
def generate_psub_node(self, node, tp, fld, prim): ...
def overlayValueAssignment(self, node, tp, fld): ...
def packValueAssignment(self, node): ...
def psubValueAssignment(self, node, tp, fld, prim): ...
def overlayTypeAssignment(self, node, tp, fld): ...
def packTypeAssignment(self, node, implicit: bool = ...): ...
def psubTypeAssignment(self, node, tp, fld, prim): ...
def overlayDefinedType(self, node, tp, fld): ...
def packDefinedType(self, node, implicit: bool = ..., outer_tag: Optional[Any] = ...): ...
unit: Any = ...
def psubDefinedType(self, node, tp, fld, prim): ...
def overlaySimpleType(self, node, tp, fld): ...
def packSimpleType(self, node, implicit: bool = ..., outer_tag: Optional[Any] = ...): ...
def psubSimpleType(self, node, tp, fld, prim): ...
def overlayTaggedType(self, node, tp, fld): ...
def packTaggedType(self, node, implicit: bool = ..., outer_tag: Optional[Any] = ...): ...
def packTaggedType_TODO(self, node, implicit: bool = ...): ...
def psubTaggedType(self, node, tp, fld, prim): ...
def overlayConstructedType(self, node, tp, fld, naked: bool = ...): ...
def psubConstructedType(self, node, tp, fld, prim): ...
def packSequenceType(self, node, implicit: bool = ..., outer_tag: str = ...): ...
def packSetType(self, node, implicit: bool = ..., outer_tag: str = ...): ...
def packChoiceType(self, node, implicit: bool = ..., outer_tag: Optional[Any] = ...): ...
def overlayRepeatingStructureType(self, node, tp, fld): ...
def psubRepeatingStructureType(self, node, tp, fld, prim): ...
def packSequenceOfType(self, node, implicit: bool = ..., outer_tag: str = ...): ...
def packSetOfType(self, node, implicit: bool = ..., outer_tag: str = ...): ...
class QuickDER2py(QuickDERgeneric):
cursor_offset: Any = ...
nested_typerefs: Any = ...
nested_typecuts: Any = ...
semamod: Any = ...
refmods: Any = ...
funmap_pytype: Any = ...
def __init__(self, semamod, outfn, refmods) -> None: ...
def comment(self, text): ...
def generate_head(self): ...
def generate_tail(self): ...
def generate_values(self): ...
def pygenValueAssignment(self, node): ...
def pyvalInteger(self, valnode): ...
def pyvalOID(self, valnode): ...
def generate_classes(self): ...
def pygenTypeAssignment(self, node): ...
def generate_pytype(self, node, **subarg): ...
unit: Any = ...
def pytypeDefinedType(self, node, **subarg): ...
def pytypeSimple(self, node, implicit_tag: Optional[Any] = ...): ...
def pytypeTagged(self, node, implicit_tag: Optional[Any] = ...): ...
def pytypeNamedType(self, node, **subarg): ...
def pyhelpConstructedType(self, node): ...
def pytypeChoice(self, node, implicit_tag: Optional[Any] = ...): ...
def pytypeSequence(self, node, implicit_tag: str = ...): ...
def pytypeSet(self, node, implicit_tag: str = ...): ...
def pyhelpRepeatedType(self, node, dertag, recptag): ...
def pytypeSequenceOf(self, node, implicit_tag: str = ...): ...
def pytypeSetOf(self, node, implicit_tag: str = ...): ...
class QuickDER2testdata(QuickDERgeneric):
semamod: Any = ...
refmods: Any = ...
type2tdgen: Any = ...
funmap_tdgen: Any = ...
def __init__(self, semamod, outfn, refmods) -> None: ...
def fetch_one(self, typename, casenr): ...
def fetch_multi(self, typename, testcases): ...
def all_typenames(self): ...
def generate_testdata(self): ...
def process_TypeAssignment(self, node): ...
def generate_tdgen(self, node, **subarg): ...
def tdgenDefinedType(self, node, **subarg): ...
def der_prefixhead(self, tag, body): ...
simple_cases: Any = ...
def tdgenSimple(self, node): ...
def tdgenNamedType(self, node, **subarg): ...
nodeclass2basaltag: Any = ...
def tdgenTagged(self, node, implicit_tag: Optional[Any] = ...): ...
def tdgenChoice(self, node, implicit_tag: Optional[Any] = ...): ...
def tdgenConstructed(self, node, implicit_tag: Optional[Any] = ...): ...
def tdgenRepeated(self, node, **subarg): ...
def main(script_name, script_args): ...
| [
"gijs@pythonic.nl"
] | gijs@pythonic.nl |
cd85fd7a701f085a896dcd9fec6d6c2a12562e45 | d488f052805a87b5c4b124ca93494bc9b78620f7 | /google-cloud-sdk/.install/.backup/lib/googlecloudsdk/calliope/display_info.py | 494c7184d5c3c77b64993d9477d0779c6f41a541 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | PacktPublishing/DevOps-Fundamentals | 5ce1fc938db66b420691aa8106ecfb3f9ceb1ace | 60597e831e08325c7e51e8557591917f7c417275 | refs/heads/master | 2023-02-02T04:48:15.346907 | 2023-01-30T08:33:35 | 2023-01-30T08:33:35 | 131,293,311 | 13 | 19 | null | null | null | null | UTF-8 | Python | false | false | 4,805 | py | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Resource display info for the Calliope display module."""
from __future__ import absolute_import
from __future__ import unicode_literals
from googlecloudsdk.core.cache import cache_update_ops
class DisplayInfo(object):
"""Display info accumulator for priming Displayer.
"legacy" logic will be dropped when the incremental Command class refactor
is complete.
NOTICE: If you add an attribute:
(1) document it
(2) handle it in AddLowerDisplayInfo()
Attributes:
_cache_updater: A resource_cache.Updater class that will be instantiated
and called to update the cache to reflect the resources returned by the
calling command.
_filter: The default filter string. args.filter takes precedence.
_format: The default format string. args.format takes precedence.
_transforms: The filter/format transforms symbol dict.
_aliases: The resource name alias dict.
"""
def __init__(self):
self._cache_updater = None
self._filter = None
self._format = None
self._transforms = {}
self._aliases = {}
# pylint: disable=redefined-builtin, name matches args.format and --format
def AddLowerDisplayInfo(self, display_info):
"""Add lower precedence display_info to the object.
This method is called by calliope to propagate CLI low precedence parent
info to its high precedence children.
Args:
display_info: The low precedence DisplayInfo object to add.
"""
if not self._cache_updater:
self._cache_updater = display_info.cache_updater
if not self._filter:
self._filter = display_info.filter
if not self._format:
self._format = display_info.format
if display_info.transforms:
transforms = dict(display_info.transforms)
transforms.update(self.transforms)
self._transforms = transforms
if display_info.aliases:
aliases = dict(display_info.aliases)
aliases.update(self._aliases)
self._aliases = aliases
def AddFormat(self, format):
"""Adds a format to the display info, newer info takes precedence.
Args:
format: The default format string. args.format takes precedence.
"""
if format:
self._format = format
def AddFilter(self, filter):
"""Adds a filter to the display info, newer info takes precedence.
Args:
filter: The default filter string. args.filter takes precedence.
"""
if filter:
self._filter = filter
def AddTransforms(self, transforms):
"""Adds transforms to the display info, newer values takes precedence.
Args:
transforms: A filter/format transforms symbol dict.
"""
if transforms:
self._transforms.update(transforms)
def AddUriFunc(self, uri_func):
"""Adds a uri transform to the display info using uri_func.
Args:
uri_func: func(resource), A function that returns the uri for a
resource object.
"""
def _TransformUri(resource, undefined=None):
try:
return uri_func(resource) or undefined
except (AttributeError, TypeError):
return undefined
self.AddTransforms({'uri': _TransformUri})
def AddAliases(self, aliases):
"""Adds aliases to the display info, newer values takes precedence.
Args:
aliases: The resource name alias dict.
"""
if aliases:
self._aliases.update(aliases)
def AddCacheUpdater(self, cache_updater):
"""Adds a cache_updater to the display info, newer values takes precedence.
The cache updater is called to update the resource cache for CreateCommand,
DeleteCommand and ListCommand commands.
Args:
cache_updater: A resource_cache.Updater class that will be instantiated
and called to update the cache to reflect the resources returned by the
calling command. None disables cache update.
"""
self._cache_updater = cache_updater or cache_update_ops.NoCacheUpdater
@property
def cache_updater(self):
return self._cache_updater
@property
def format(self):
return self._format
@property
def filter(self):
return self._filter
@property
def aliases(self):
return self._aliases
@property
def transforms(self):
return self._transforms
| [
"saneetk@packtpub.com"
] | saneetk@packtpub.com |
7c747fb48d6766c4376bc60b101efbfed54ce545 | 1c43338af57ff781d704ff68a0ea79f908bf94d5 | /python/skyhook/op/all_decorate.py | a952bbbdb0796d3e38ddad7183665c1b2abf7ff6 | [
"MIT"
] | permissive | skyhookml/skyhookml | dcde20626bab09843092e82b3160a80dfaa21061 | 5d2d8d3b80db409d44b4a9e4d0d3c495a116fee1 | refs/heads/master | 2023-06-13T03:51:08.343428 | 2021-07-09T18:00:35 | 2021-07-09T18:00:35 | 320,390,825 | 9 | 3 | MIT | 2021-04-28T17:46:26 | 2020-12-10T21:10:07 | Go | UTF-8 | Python | false | false | 1,166 | py | from skyhook.op.op import Operator
import skyhook.common as lib
import skyhook.io
import requests
class AllDecorateOperator(Operator):
def __init__(self, meta_packet):
super(AllDecorateOperator, self).__init__(meta_packet)
# Function must be set after initialization.
self.f = None
def apply(self, task):
# Use LoadData to fetch datas one by one.
# We combine it with metadata to create the input arguments.
items = [item_list[0] for item_list in task['Items']['inputs']]
args = []
for i, item in enumerate(items):
data, metadata = self.read_item(self.inputs[i], item)
args.append({
'Data': data,
'Metadata': metadata,
})
# Run the user-defined function.
outputs = self.f(*args)
if not isinstance(outputs, tuple):
outputs = (outputs,)
# Write each output item.
for i, data in enumerate(outputs):
if isinstance(data, dict) and 'Data' in data:
data, metadata = data['Data'], data['Metadata']
else:
metadata = {}
self.write_item(self.outputs[i], task['Key'], data, metadata)
def all_decorate(f):
def wrap(meta_packet):
op = AllDecorateOperator(meta_packet)
op.f = f
return op
return wrap
| [
"fbastani@perennate.com"
] | fbastani@perennate.com |
c202f4e4b5ee90002f77d6cb85850f520b861325 | 97da505ec0524d7b214764d198ed9b82e79300ed | /pyiem/ncei/ds3505.py | 0d6b22815fecf47180c38187fa78795c87eda84d | [
"MIT"
] | permissive | morganetanu/pyIEM | c035a1706cccff0afed209f14760f2668259667f | 2a38d1de77d056161408e804b5c246b7e6b38056 | refs/heads/master | 2021-08-20T07:10:40.071377 | 2017-11-28T13:41:29 | 2017-11-28T13:41:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 42,255 | py | """Implementation of the NCEI DS3505 format
ftp://ftp.ncdc.noaa.gov/pub/data/noaa/ish-format-document.pdf
"""
from __future__ import print_function
import re
import warnings
import datetime
import json
import pytz
from pyiem.datatypes import speed, distance, pressure
from metar.Metar import Metar
from metar.Metar import ParserError as MetarParserError
MISSING_RE = re.compile(r"^\+?\-?9+$")
EQD_RE = re.compile(r"^[QPRCDN][0-9][0-9]$")
QNN_RE = re.compile(r"^[A-Z][0-9][0-9][A-Z ][0-9]$")
DS3505_RE = re.compile(r"""
^(?P<chars>[0-9]{4})
(?P<stationid>......)
(?P<wban>.....)
(?P<yyyymmdd>[0-9]{8})
(?P<hhmi>[0-9]{4})
(?P<srcflag>.)
(?P<lat>[\+\-][0-9]{5})
(?P<lon>[\+\-][0-9]{6})
(?P<report_type>.....)
(?P<elevation>[\+\-][0-9]{4})
(?P<call_id>.....)
(?P<qc_process>....)
(?P<drct>[0-9]{3})
(?P<drct_qc>.)
(?P<wind_code>.)
(?P<wind_speed_mps>[0-9]{4})
(?P<wind_speed_mps_qc>.)
(?P<ceiling_m>[0-9]{5})
(?P<ceiling_m_qc>.)
(?P<ceiling_m_how>.)
(?P<ceiling_m_cavok>.)
(?P<vsby_m>[0-9]{6})
(?P<vsby_m_qc>.)
(?P<vsby_m_variable>.)
(?P<vsby_m_variable_qc>.)
(?P<airtemp_c>[\+\-][0-9]{4})
(?P<airtemp_c_qc>.)
(?P<dewpointtemp_c>[\+\-][0-9]{4})
(?P<dewpointtemp_c_qc>.)
(?P<mslp_hpa>[0-9]{5})
(?P<mslp_hpa_qc>.)
""", re.VERBOSE)
def _tonumeric(val, scale_factor=1.):
"""Convert to what we want"""
if MISSING_RE.match(val):
return None
return float(val) / scale_factor
def _d1000(val):
"""Divide the value by 1000"""
return _tonumeric(val, 1000.)
def _d10(val):
"""Divide the value by 1000"""
return _tonumeric(val, 10.)
def _i10(val):
"""Divide the value by 1000"""
val = _tonumeric(val, 10.)
if val is None:
return val
return int(val)
def _i(val):
"""int"""
val = _tonumeric(val, 1.)
if val is None:
return val
return int(val)
SKY_STATE_CODES = {
'0': 'CLR',
'1': 'FEW',
'2': 'SCT',
'3': 'BKN',
'4': 'OVC',
'5': 'OBS',
'6': 'POB',
'9': '///'}
ADDITIONAL = {
# Hourly Precip
'AA1': [['hours', 2, _i], ['depth', 4, _d10],
['cond_code', 1], ['qc', 1]],
'AA2': [['hours', 2, _i], ['depth', 4, _d10],
['cond_code', 1], ['qc', 1]],
'AA3': [['hours', 2, _i], ['depth', 4, _d10],
['cond_code', 1], ['qc', 1]],
'AA4': [['hours', 2, _i], ['depth', 4, _d10],
['cond_code', 1], ['qc', 1]],
# Monthly Precip
'AB1': [['depth', 5], ['cond_code', 1], ['qc', 1]],
# Precip History
'AC1': [['duration', 1], ['char_code', 1], ['qc', 1]],
# Greatest amount in a month
'AD1': [['depth', 5], ['cond_code', 1], ['date1', 4], ['date2', 4],
['date3', 4], ['qc', 1]],
# Precip number of days
'AE1': [['q01_days', 2], ['q01_days_qc', 1],
['q10_days', 2], ['q10_days_qc', 1],
['q50_days', 2], ['q50_days_qc', 1],
['q100_days', 2], ['q100_days_qc', 1]],
# Precip estimated?
'AG1': [['code', 1], ['depth', 3]],
# Short duration precip
'AH1': [['period', 3], ['depth', 4], ['code', 1], ['enddate', 6],
['qc', 1]],
'AH2': [['period', 3], ['depth', 4], ['code', 1], ['enddate', 6],
['qc', 1]],
'AH3': [['period', 3], ['depth', 4], ['code', 1], ['enddate', 6],
['qc', 1]],
'AH4': [['period', 3], ['depth', 4], ['code', 1], ['enddate', 6],
['qc', 1]],
'AH5': [['period', 3], ['depth', 4], ['code', 1], ['enddate', 6],
['qc', 1]],
'AH6': [['period', 3], ['depth', 4], ['code', 1], ['enddate', 6],
['qc', 1]],
# Short duration precip for month
'AI1': [['period', 3], ['depth', 4], ['code', 1], ['enddate', 6],
['qc', 1]],
'AI2': [['period', 3], ['depth', 4], ['code', 1], ['enddate', 6],
['qc', 1]],
'AI3': [['period', 3], ['depth', 4], ['code', 1], ['enddate', 6],
['qc', 1]],
'AI4': [['period', 3], ['depth', 4], ['code', 1], ['enddate', 6],
['qc', 1]],
'AI5': [['period', 3], ['depth', 4], ['code', 1], ['enddate', 6],
['qc', 1]],
'AI6': [['period', 3], ['depth', 4], ['code', 1], ['enddate', 6],
['qc', 1]],
# Snow depth
'AJ1': [['depth', 4], ['cond_code', 1], ['qc', 1], ['swe', 6],
['swe_cond_code', 1],
['swe_qc', 1]],
# Snow depth month
'AK1': [['depth', 4], ['cond_code', 1], ['dates', 6], ['qc', 1]],
# Snow accumulation
'AL1': [['period', 2], ['depth', 3], ['cond_code', 1], ['qc', 1]],
'AL2': [['period', 2], ['depth', 3], ['cond_code', 1], ['qc', 1]],
'AL3': [['period', 2], ['depth', 3], ['cond_code', 1], ['qc', 1]],
'AL4': [['period', 2], ['depth', 3], ['cond_code', 1], ['qc', 1]],
# Snow greatest in month
'AM1': [['depth', 4], ['cond_code', 1], ['dates1', 4], ['dates2', 4],
['dates3', 4], ['qc', 1]],
# snow for day month?
'AN1': [['period', 3], ['depth', 4], ['cond_code', 1], ['qc', 1]],
# precip occurence
'AO1': [['minutes', 2], ['depth', 4], ['cond_code', 1], ['qc', 1]],
'AO2': [['minutes', 2], ['depth', 4], ['cond_code', 1], ['qc', 1]],
'AO3': [['minutes', 2], ['depth', 4], ['cond_code', 1], ['qc', 1]],
'AO4': [['minutes', 2], ['depth', 4], ['cond_code', 1], ['qc', 1]],
# 15 minute precip
'AP1': [['depth', 4], ['cond_code', 1], ['qc', 1]],
'AP2': [['depth', 4], ['cond_code', 1], ['qc', 1]],
'AP3': [['depth', 4], ['cond_code', 1], ['qc', 1]],
'AP4': [['depth', 4], ['cond_code', 1], ['qc', 1]],
# presentweather
'AT1': [['source', 2], ['type', 2], ['abbr', 4], ['qc', 1]],
'AT2': [['source', 2], ['type', 2], ['abbr', 4], ['qc', 1]],
'AT3': [['source', 2], ['type', 2], ['abbr', 4], ['qc', 1]],
'AT4': [['source', 2], ['type', 2], ['abbr', 4], ['qc', 1]],
'AT5': [['source', 2], ['type', 2], ['abbr', 4], ['qc', 1]],
'AT6': [['source', 2], ['type', 2], ['abbr', 4], ['qc', 1]],
'AT7': [['source', 2], ['type', 2], ['abbr', 4], ['qc', 1]],
'AT8': [['source', 2], ['type', 2], ['abbr', 4], ['qc', 1]],
# present weather intensity
'AU1': [['proximity', 1], ['descriptor', 1], ['precip', 2], ['obscure', 1],
['other', 1], ['combo', 1], ['qc', 1]],
'AU2': [['proximity', 1], ['descriptor', 1], ['precip', 2], ['obscure', 1],
['other', 1], ['combo', 1], ['qc', 1]],
'AU3': [['proximity', 1], ['descriptor', 1], ['precip', 2], ['obscure', 1],
['other', 1], ['combo', 1], ['qc', 1]],
'AU4': [['proximity', 1], ['descriptor', 1], ['precip', 2], ['obscure', 1],
['other', 1], ['combo', 1], ['qc', 1]],
'AU5': [['proximity', 1], ['descriptor', 1], ['precip', 2], ['obscure', 1],
['other', 1], ['combo', 1], ['qc', 1]],
'AU6': [['proximity', 1], ['descriptor', 1], ['precip', 2], ['obscure', 1],
['other', 1], ['combo', 1], ['qc', 1]],
'AU7': [['proximity', 1], ['descriptor', 1], ['precip', 2], ['obscure', 1],
['other', 1], ['combo', 1], ['qc', 1]],
'AU8': [['proximity', 1], ['descriptor', 1], ['precip', 2], ['obscure', 1],
['other', 1], ['combo', 1], ['qc', 1]],
'AU9': [['proximity', 1], ['descriptor', 1], ['precip', 2], ['obscure', 1],
['other', 1], ['combo', 1], ['qc', 1]],
# Automated weather
'AW1': [['cond_code', 2], ['qc', 1]],
'AW2': [['cond_code', 2], ['qc', 1]],
'AW3': [['cond_code', 2], ['qc', 1]],
'AW4': [['cond_code', 2], ['qc', 1]],
# Past Weather
'AX1': [['cond_code', 2], ['qc', 1], ['period', 2], ['period_qc', 1]],
'AX2': [['cond_code', 2], ['qc', 1], ['period', 2], ['period_qc', 1]],
'AX3': [['cond_code', 2], ['qc', 1], ['period', 2], ['period_qc', 1]],
'AX4': [['cond_code', 2], ['qc', 1], ['period', 2], ['period_qc', 1]],
'AX5': [['cond_code', 2], ['qc', 1], ['period', 2], ['period_qc', 1]],
'AX6': [['cond_code', 2], ['qc', 1], ['period', 2], ['period_qc', 1]],
# Past weather
'AY1': [['cond_code', 1], ['qc', 1], ['period', 2], ['period_qc', 1]],
'AY2': [['cond_code', 1], ['qc', 1], ['period', 2], ['period_qc', 1]],
# Past weather automated
'AZ1': [['cond_code', 1], ['qc', 1], ['period', 2], ['period_qc', 1]],
'AZ2': [['cond_code', 1], ['qc', 1], ['period', 2], ['period_qc', 1]],
# CRN Secondary Precip
'CB1': [['minutes', 2], ['depth', 6], ['qc', 1], ['precip_flag', 1]],
'CB2': [['minutes', 2], ['depth', 6], ['qc', 1], ['precip_flag', 1]],
# CRN, Fan Speed
'CF1': [['speed', 4], ['qc', 1], ['speed_flag', 1]],
'CF2': [['speed', 4], ['qc', 1], ['speed_flag', 1]],
'CF3': [['speed', 4], ['qc', 1], ['speed_flag', 1]],
# CRN, subhour precip
'CG1': [['depth', 6], ['qc', 1], ['depth_flag', 1]],
'CG2': [['depth', 6], ['qc', 1], ['depth_flag', 1]],
'CG3': [['depth', 6], ['qc', 1], ['depth_flag', 1]],
# CRN, rh
'CH1': [['minutes', 2], ['tmpc', 5], ['tmpc_qc', 1], ['tmpc_flag', 1],
['avg_rh', 4], ['qc', 1], ['avg_rh_flag', 1]],
'CH2': [['minutes', 2], ['tmpc', 5], ['tmpc_qc', 1], ['tmpc_flag', 1],
['avg_rh', 4], ['qc', 1], ['avg_rh_flag', 1]],
# CRN, rh
'CI1': [['min_rh_temp', 5], ['min_rh_temp_qc', 1], ['min_rh_temp_flag', 1],
['max_rh_temp', 5], ['max_rh_temp_qc', 1], ['max_rh_temp_flag', 1],
['std_rh_temp', 5], ['std_rh_temp_qc', 1], ['std_rh_temp_flag', 1],
['std_rh', 5], ['std_rh_qc', 1], ['std_rh_flag', 1]],
# CRN, battery voltage
'CN1': [['batvol', 4], ['batvol_qc', 1], ['batvol_flag', 1],
['batvol_fl', 4], ['batvol_fl_qc', 1], ['batvol_fl_flag', 1],
['batvol_dl', 4], ['batvol_dl_qc', 1], ['batvol_dl_flag', 1]],
# CRN, misc diagnostics
'CN2': [['tranel', 5], ['tranel_qc', 1], ['tranel_flag', 1],
['tinlet_max', 5], ['tinlet_max_qc', 1], ['trinlet_max_flag', 1],
['opendoor_tm', 2], ['opendoor_tm_qc', 1],
['opendoor_tm_flag', 1]],
# CRN, secondary diagnostic
'CN3': [['refresavg', 6], ['refresavg_qc', 1], ['refresavg_flag', 1],
['dsignature', 6], ['dsignature__qc', 1], ['dsignature_flag', 1]],
# CRN, secondary hourly diagnostic
'CN4': [['heater_flag', 1], ['heater_flag_code', 1],
['heater_flag_code2', 1],
['doorflag', 1], ['doorflag_code', 1], ['doorflag_code2', 1],
['fortrans', 1], ['fortrans_code', 1], ['fortrans_code2', 1],
['refltrans', 3], ['refltrans_code', 1], ['refltrans_code2', 1]],
# CRN, metadata
'CO1': [['climat_division', 2], ['lst_conversion', 3]],
'CO2': [['elementid', 3], ['time_offset', 5]],
'CO3': [['elementid', 3], ['time_offset', 5]],
'CO4': [['elementid', 3], ['time_offset', 5]],
'CO5': [['elementid', 3], ['time_offset', 5]],
'CO6': [['elementid', 3], ['time_offset', 5]],
'CO7': [['elementid', 3], ['time_offset', 5]],
'CO8': [['elementid', 3], ['time_offset', 5]],
'CO9': [['elementid', 3], ['time_offset', 5]],
# CRN, control section
'CR1': [['dl_vn', 5], ['dl_vn_qc', 1], ['dl_vn_flag', 1]],
# CRN, sub-hourly temperature
'CT1': [['avg_temp', 5], ['avg_temp_qc', 1], ['avg_temp_flag', 1]],
'CT2': [['avg_temp', 5], ['avg_temp_qc', 1], ['avg_temp_flag', 1]],
'CT3': [['avg_temp', 5], ['avg_temp_qc', 1], ['avg_temp_flag', 1]],
# CRN, colocated temp sensors
'CU1': [['avg_temp', 5], ['avg_temp_qc', 1], ['avg_temp_flag', 1],
['temp_std', 4], ['temp_std_qc', 1], ['temp_std_flag', 1]],
'CU2': [['avg_temp', 5], ['avg_temp_qc', 1], ['avg_temp_flag', 1],
['temp_std', 4], ['temp_std_qc', 1], ['temp_std_flag', 1]],
'CU3': [['avg_temp', 5], ['avg_temp_qc', 1], ['avg_temp_flag', 1],
['temp_std', 4], ['temp_std_qc', 1], ['temp_std_flag', 1]],
# CRN, hourly temp extreme
'CV1': [['temp_min', 5], ['temp_min_qc', 1], ['temp_min_flag', 1],
['temp_min_time', 4], ['temp_min_time_qc', 1],
['temp_min_time_flag', 1],
['temp_max', 5], ['temp_max_qc', 1], ['temp_max_flag', 1],
['temp_max_time', 4], ['temp_max_time_qc', 1],
['temp_max_time_flag', 1]],
'CV2': [['temp_min', 5], ['temp_min_qc', 1], ['temp_min_flag', 1],
['temp_min_time', 4], ['temp_min_time_qc', 1],
['temp_min_time_flag', 1],
['temp_max', 5], ['temp_max_qc', 1], ['temp_max_flag', 1],
['temp_max_time', 4], ['temp_max_time_qc', 1],
['temp_max_time_flag', 1]],
'CV3': [['temp_min', 5], ['temp_min_qc', 1], ['temp_min_flag', 1],
['temp_min_time', 4], ['temp_min_time_qc', 1],
['temp_min_time_flag', 1],
['temp_max', 5], ['temp_max_qc', 1], ['temp_max_flag', 1],
['temp_max_time', 4], ['temp_max_time_qc', 1],
['temp_max_time_flag', 1]],
# CRN, subhourly wetness
'CW1': [['wet1', 5], ['wet1_qc', 1], ['wet1_flag', 1],
['wet2', 5], ['wet2_qc', 1], ['wet2_flag', 1]],
# CRN, vibrating wire summary
'CX1': [['precipitation', 6], ['precip_qc', 1], ['precip_flag', 1],
['freq_avg', 4], ['freq_avg_qc', 1], ['freq_avg_flag', 1],
['freq_min', 4], ['freq_min_qc', 1], ['freq_min_flag', 1],
['freq_max', 4], ['freq_max_qc', 1], ['freq_max_flag', 1]],
'CX2': [['precipitation', 6], ['precip_qc', 1], ['precip_flag', 1],
['freq_avg', 4], ['freq_avg_qc', 1], ['freq_avg_flag', 1],
['freq_min', 4], ['freq_min_qc', 1], ['freq_min_flag', 1],
['freq_max', 4], ['freq_max_qc', 1], ['freq_max_flag', 1]],
'CX3': [['precipitation', 6], ['precip_qc', 1], ['precip_flag', 1],
['freq_avg', 4], ['freq_avg_qc', 1], ['freq_avg_flag', 1],
['freq_min', 4], ['freq_min_qc', 1], ['freq_min_flag', 1],
['freq_max', 4], ['freq_max_qc', 1], ['freq_max_flag', 1]],
# Visual Runway
'ED1': [['angle', 2], ['runway', 1], ['visibility', 4],
['visibility_qc', 1]],
# Sky coverage
'GA1': [['coverage', 2], ['coverage_qc', 1], ['height', 6, _tonumeric],
['height_qc', 1],
['type', 2], ['type_qc', 1]],
'GA2': [['coverage', 2], ['coverage_qc', 1], ['height', 6, _tonumeric],
['height_qc', 1],
['type', 2], ['type_qc', 1]],
'GA3': [['coverage', 2], ['coverage_qc', 1], ['height', 6, _tonumeric],
['height_qc', 1],
['type', 2], ['type_qc', 1]],
'GA4': [['coverage', 2], ['coverage_qc', 1], ['height', 6, _tonumeric],
['height_qc', 1],
['type', 2], ['type_qc', 1]],
'GA5': [['coverage', 2], ['coverage_qc', 1], ['height', 6, _tonumeric],
['height_qc', 1],
['type', 2], ['type_qc', 1]],
'GA6': [['coverage', 2], ['coverage_qc', 1], ['height', 6, _tonumeric],
['height_qc', 1],
['type', 2], ['type_qc', 1]],
# sky cover summation
'GD1': [['state_code', 1], ['state_code2', 2], ['state_qc', 1],
['height', 6, _tonumeric], ['height_qc', 1], ['height_char', 1]],
'GD2': [['state_code', 1], ['state_code2', 2], ['state_qc', 1],
['height', 6, _tonumeric], ['height_qc', 1], ['height_char', 1]],
'GD3': [['state_code', 1], ['state_code2', 2], ['state_qc', 1],
['height', 6, _tonumeric], ['height_qc', 1], ['height_char', 1]],
'GD4': [['state_code', 1], ['state_code2', 2], ['state_qc', 1],
['height', 6, _tonumeric], ['height_qc', 1], ['height_char', 1]],
'GD5': [['state_code', 1], ['state_code2', 2], ['state_qc', 1],
['height', 6, _tonumeric], ['height_qc', 1], ['height_char', 1]],
'GD6': [['state_code', 1], ['state_code2', 2], ['state_qc', 1],
['height', 6, _tonumeric], ['height_qc', 1], ['height_char', 1]],
# sky coverage identifier
'GE1': [['convective', 1], ['vertical_datum', 6], ['height', 6],
['lower_range', 6]],
# Sky coverage
'GF1': [['total', 2], ['opaque', 2], ['coverage_qc', 1],
['lowest_coverage', 2], ['lowest_coverage_qc', 1],
['lowest_genus', 2], ['lowest_genus_code', 1],
['lowest_height', 5], ['lowest_height_qc', 1],
['mid_genus', 2], ['mid_genus_qc', 1],
['high_genus', 2], ['high_genus_qc', 1]],
# below station cloud ID
'GG1': [['coverage_code', 2], ['coverage_qc', 1], ['height', 5],
['height_qc', 1], ['type_code', 2], ['type_code_qc', 1],
['top_code', 2], ['top_code_qc', 1]],
'GG2': [['coverage_code', 2], ['coverage_qc', 1], ['height', 5],
['height_qc', 1], ['type_code', 2], ['type_code_qc', 1],
['top_code', 2], ['top_code_qc', 1]],
'GG3': [['coverage_code', 2], ['coverage_qc', 1], ['height', 5],
['height_qc', 1], ['type_code', 2], ['type_code_qc', 1],
['top_code', 2], ['top_code_qc', 1]],
'GG4': [['coverage_code', 2], ['coverage_qc', 1], ['height', 5],
['height_qc', 1], ['type_code', 2], ['type_code_qc', 1],
['top_code', 2], ['top_code_qc', 1]],
'GG5': [['coverage_code', 2], ['coverage_qc', 1], ['height', 5],
['height_qc', 1], ['type_code', 2], ['type_code_qc', 1],
['top_code', 2], ['top_code_qc', 1]],
'GG6': [['coverage_code', 2], ['coverage_qc', 1], ['height', 5],
['height_qc', 1], ['type_code', 2], ['type_code_qc', 1],
['top_code', 2], ['top_code_qc', 1]],
# Solar Radiation
'GH1': [['solarrad', 5], ['solarrad_qc', 1], ['solarrad_flag', 1],
['solarrad_min', 5], ['solarrad_min_qc', 1],
['solarrad_min_flag', 1],
['solarrad_max', 5], ['solarrad_max_qc', 1],
['solarrad_max_flag', 1],
['solarrad_std', 5], ['solarrad_std_qc', 1],
['solarrad_std_flag', 1]],
# Sunshine
'GJ1': [['duration', 4], ['duration_qc', 1]],
# sunhine
'GK1': [['percent', 3], ['percent_qc', 1]],
# sunshine for month
'GL1': [['duration', 5], ['duration_qc', 1]],
# solar irradiance
'GM1': [['time', 4], ['global_irradiance', 4],
['global_irradiance_flag', 2],
['global_irradiance_qc', 1],
['direct_irradiance', 4], ['direct_irradiance_flag', 2],
['direct_irradiance_qc', 1],
['diffuse_irradiance', 4], ['diffuse_irradiance_flag', 2],
['diffuse_irradiance_qc', 1],
['uvb_irradiance', 4], ['uvb_irradiance_flag', 2],
['uvb_irradiance_qc', 1]],
# solar radiation
'GN1': [['period', 4], ['upwelling_global', 4], ['upwelling_global_qc', 1],
['downwelling_thermal', 4], ['downwelling_thermal_qc', 1],
['upwelling_thermal', 4], ['upwelling_thermal_qc', 1],
['par', 4], ['par_qc', 4],
['solar_zenith', 3], ['solar_zenith_qc', 1]],
# Net Solar
'GO1': [['time', 4], ['net_solar', 4], ['net_solar_qc', 1],
['net_infrared', 4], ['net_infrared_qc', 1],
['net_radiation', 4], ['net_radiation_qc', 1]],
# Modelled irradiance
'GP1': [['time', 4], ['global_horizontal', 4],
['global_horizontal_flag', 2],
['global_horizontal_uncertainty', 3],
['direct_normal', 4], ['direct_normal_flag', 2],
['direct_normal_uncertainty', 3],
['diffuse_horizontal', 4], ['diffuse_horizontal_flag', 2],
['diffuse_horizontal_uncertainty', 3]],
# hourly solar angle
'GQ1': [['time', 4], ['zenith_angle', 4], ['zenith_angle_qc', 1],
['azimuth_angle', 4], ['azimuth_angle_qc', 1]],
# hourly extraterrestrial rad
'GR1': [['time', 4], ['horizontal', 4], ['horizontal_qc', 1],
['normal', 4], ['normal_qc', 1]],
# Hail data
'HL1': [['size', 3], ['size_qc', 1]],
# Ground Surface
'IA1': [['code', 2], ['code_qc', 1]],
# Ground Surface Min temp
'IA2': [['period', 3], ['min_tmpc', 5], ['min_tempc_qc', 1]],
# Hourly surface temperature
'IB1': [['surftemp', 5], ['surftemp_qc', 1], ['surftemp_flag', 1],
['surftemp_min', 5], ['surftemp_min_qc', 1],
['surftemp_min_flag', 1],
['surftemp_max', 5], ['surftemp_max_qc', 1],
['surftemp_max_flag', 1],
['surftemp_std', 4], ['surftemp_std_qc', 1],
['surftemp_std_flag', 1]],
# Hourly Surface
'IB2': [['surftemp_sb', 5], ['surftemp_sb_qc', 1], ['surftemp_sb_flag', 1],
['surftemp_sb_std', 4], ['surftemp_sb_std_qc', 1],
['surftemp_sb_std_flag', 1]],
# Ground surface obs
'IC1': [['hours', 2], ['wind_movement', 4], ['wind_movement_code', 1],
['wind_movement_flag', 1], ['evaporation', 3],
['evaporation_code', 1],
['evaporation_qc', 1], ['max_pan_tmpc', 4],
['max_pan_tmpc_code', 1],
['max_pan_tmpc_qc', 1], ['min_pan_tmpc', 4],
['min_pan_tmpc_code', 1],
['min_pan_tmpc_qc', 1]],
# Temperature extremes
'KA1': [['hours', 3, _i10], ['code', 1], ['tmpc', 5, _d10], ['qc', 1]],
'KA2': [['hours', 3, _i10], ['code', 1], ['tmpc', 5, _d10], ['qc', 1]],
'KA3': [['hours', 3, _i10], ['code', 1], ['tmpc', 5, _d10], ['qc', 1]],
'KA4': [['hours', 3, _i10], ['code', 1], ['tmpc', 5, _d10], ['qc', 1]],
# average air temp
'KB1': [['hours', 3, _i10], ['code', 1], ['tmpc', 5, _d10], ['qc', 1]],
'KB2': [['hours', 3, _i10], ['code', 1], ['tmpc', 5, _d10], ['qc', 1]],
'KB3': [['hours', 3, _i10], ['code', 1], ['tmpc', 5, _d10], ['qc', 1]],
'KB4': [['hours', 3, _i10], ['code', 1], ['tmpc', 5, _d10], ['qc', 1]],
# extreme air temp
'KC1': [['month_code', 1], ['cond_code', 1], ['tmpc', 5], ['dates', 6],
['tmpc_qc', 1]],
'KC2': [['month_code', 1], ['cond_code', 1], ['tmpc', 5], ['dates', 6],
['tmpc_qc', 1]],
# heating/cooling degree days
'KD1': [['period', 3], ['code', 1], ['value', 4], ['qc', 1]],
'KD2': [['period', 3], ['code', 1], ['value', 4], ['qc', 1]],
# extreme temperatures, number of days
'KE1': [['days32', 2], ['days32_code', 1],
['days90', 2], ['days90_code', 1],
['daysmin32', 2], ['daysmin32_code', 1],
['daysmin0', 2], ['daysmin0_code', 1]],
# Hourly calc temp
'KF1': [['temp', 5], ['temp_qc', 1]],
# average dewpoint
'KG1': [['period', 3], ['code', 1], ['dewpoint', 5], ['dewpoint_code', 1],
['dewpoint_qc', 1]],
'KG2': [['period', 3], ['code', 1], ['dewpoint', 5], ['dewpoint_code', 1],
['dewpoint_qc', 1]],
# pressure
'MA1': [['altimeter', 5, _d10], ['altimeter_code', 1],
['station_pressure', 5, _d10],
['station_pressure_code', 1]],
# Pressure Tendency
'MD1': [['code', 1], ['code_qc', 1],
['threehour', 3, _d10], ['threehour_qc', 1],
['24hour', 4, _d10], ['24hour_qc', 1]],
# geopotential
'ME1': [['level_code', 1], ['height', 4], ['height_qc', 1]],
# SLP
'MF1': [['pressure', 5], ['pressure_qc', 1], ['pressure_day', 5],
['pressure_day_qc', 1]],
# Pressure
'MG1': [['avg_pressure', 5], ['avg_pressure_qc', 1],
['min_pressure', 5], ['min_pressure_qc', 1]],
# Pressure for the month
'MH1': [['avg_pressure', 5], ['avg_pressure_qc', 1],
['avg_slp', 5], ['avg_slp_qc', 1]],
# Pressure for the month
'MK1': [['max_pressure', 5], ['max_pressure_datetime', 6],
['max_pressure_qc', 1],
['min_pressure', 5], ['min_pressure_datetime', 6],
['min_pressure_qc', 1]],
# Present Weather
'MV1': [['code', 2], ['code_qc', 1]],
'MV2': [['code', 2], ['code_qc', 1]],
'MV3': [['code', 2], ['code_qc', 1]],
'MV4': [['code', 2], ['code_qc', 1]],
'MV5': [['code', 2], ['code_qc', 1]],
'MV6': [['code', 2], ['code_qc', 1]],
'MV7': [['code', 2], ['code_qc', 1]],
# Present Weather Manual
'MW1': [['code', 2], ['qc', 1]],
'MW2': [['code', 2], ['qc', 1]],
'MW3': [['code', 2], ['qc', 1]],
'MW4': [['code', 2], ['qc', 1]],
'MW5': [['code', 2], ['qc', 1]],
'MW6': [['code', 2], ['qc', 1]],
'MW7': [['code', 2], ['qc', 1]],
# Supplemental Wind
'OA1': [['code', 1], ['period', 2], ['smps', 4], ['qc', 1]],
'OA2': [['code', 1], ['period', 2], ['smps', 4], ['qc', 1]],
'OA3': [['code', 1], ['period', 2], ['smps', 4], ['qc', 1]],
# hourly subhourly wind
'OB1': [['period', 4], ['wind_max', 4], ['wind_max_qc', 1],
['wind_max_flag', 1], ['wind_max_drct', 3],
['wind_max_drct_qc', 1], ['wind_max_drct_flag', 1],
['wind_std', 5], ['wind_std_qc', 1], ['wind_std_flag', 1],
['wind_dir_std', 5], ['wind_dir_std_qc', 1],
['wind_dir_std_flag', 1]],
'OB2': [['period', 4], ['wind_max', 4], ['wind_max_qc', 1],
['wind_max_flag', 1], ['wind_max_drct', 3],
['wind_max_drct_qc', 1], ['wind_max_drct_flag', 1],
['wind_std', 5], ['wind_std_qc', 1], ['wind_std_flag', 1],
['wind_dir_std', 5], ['wind_dir_std_qc', 1],
['wind_dir_std_flag', 1]],
# Wind gust
'OC1': [['speed', 4, _d10], ['speed_qc', 1]],
# Supplementary Wind
'OD1': [['code', 1], ['hours', 2], ['speed', 4], ['speed_qc', 1],
['direction', 3]],
'OD2': [['code', 1], ['hours', 2], ['speed', 4], ['speed_qc', 1],
['direction', 3]],
'OD3': [['code', 1], ['hours', 2], ['speed', 4], ['speed_qc', 1],
['direction', 3]],
# Wind summary
'OE1': [['code', 1], ['period', 2], ['speed', 5], ['direction', 3],
['time', 4], ['qc', 1]],
'OE2': [['code', 1], ['period', 2], ['speed', 5], ['direction', 3],
['time', 4], ['qc', 1]],
'OE3': [['code', 1], ['period', 2], ['speed', 5], ['direction', 3],
['time', 4], ['qc', 1]],
# relative humidity
'RH1': [['hours', 3], ['code', 1], ['percentage', 3], ['derived', 1],
['qc', 1]],
'RH2': [['hours', 3], ['code', 1], ['percentage', 3], ['derived', 1],
['qc', 1]],
'RH3': [['hours', 3], ['code', 1], ['percentage', 3], ['derived', 1],
['qc', 1]],
# Sea Surface temp
'SA1': [['tmpc', 4], ['qc', 1]],
# Soil temperature
'ST1': [['type', 1], ['tmpc', 5], ['qc', 1], ['depth', 4],
['depth_qc', 1], ['cover', 2], ['cover_qc', 1],
['subplot', 1], ['subplot_qc', 1]],
# Wave
'UA1': [['method', 1], ['period', 2], ['height', 3], ['height_qc', 1],
['state', 2], ['state_qc', 1]],
# Wave swell
'UG1': [['seconds', 2], ['height', 3], ['direction', 3],
['swell_qc', 1]],
'UG2': [['seconds', 2], ['height', 3], ['direction', 3],
['swell_qc', 1]],
# Ice Accretion
'WA1': [['source_code', 1], ['thickness', 3], ['tendency_code', 1],
['qc', 1]],
# Surface Ice
'WD1': [['bearing_code', 2], ['concentration_rate', 3],
['non_uniform', 2], ['position_code', 2],
['ship_relative', 1], ['penetration_code', 1],
['ice_trend', 1], ['development_code', 2],
['growler', 1], ['gbb', 3], ['iceberg', 3],
['qc', 1]],
# Water Ice
'WG1': [['bearing', 2], ['edge_distance', 2], ['orientation', 2],
['formation_type', 2], ['navigation_effect', 2],
['qc', 1]],
# water level
'WJ1': [['thickness', 3],
['discharge', 5], ['ice', 2], ['ice2', 2], ['stage', 5],
['slush', 1], ['water_level_code', 1]],
}
SLP = 'Sea Level PressureIn'
ERROR_RE = re.compile("Unparsed groups in body '(?P<msg>.*)' while processing")
def vsbyfmt(val):
""" Tricky formatting of vis"""
val = round(val, 3)
if val == 0:
return 0
if val <= 0.125:
return "1/8"
if val <= 0.25:
return "1/4"
if val <= 0.375:
return "3/8"
if val <= 0.5:
return "1/2"
if val <= 1.1:
return "1"
if val <= 1.25:
return "1 1/4"
if val <= 1.6:
return "1 1/2"
if val <= 2.1:
return "2"
if val <= 2.6:
return "2 1/2"
return "%.0f" % (val,)
class OB(object):
''' hacky representation of the database schema '''
station = None
valid = None
tmpf = None
dwpf = None
drct = None
sknt = None
alti = None
gust = None
vsby = None
skyc1 = None
skyc2 = None
skyc3 = None
skyc4 = None
skyl1 = None
skyl2 = None
skyl3 = None
metar = None
skyl4 = None
p03i = None
p06i = None
p24i = None
max_tmpf_6hr = None
min_tmpf_6hr = None
max_tmpf_24hr = None
min_tmpf_24hr = None
mslp = None
p01i = None
presentwx = None
def process_metar(mstr, now):
""" Do the METAR Processing """
mtr = None
while mtr is None:
try:
mtr = Metar(mstr, now.month, now.year)
except MetarParserError as exp:
try:
msg = str(exp)
except Exception as exp:
return None
tokens = ERROR_RE.findall(str(exp))
orig_mstr = mstr
if tokens:
for token in tokens[0].split():
mstr = mstr.replace(" %s" % (token, ), "")
if orig_mstr == mstr:
print("Can't fix badly formatted metar: " + mstr)
return None
else:
print("MetarParserError: "+msg)
return None
except Exception as exp:
print("Double Fail: %s %s" % (mstr, exp))
return None
if mtr is None or mtr.time is None:
return None
ob = OB()
ob.metar = mstr[:254]
ob.valid = now
if mtr.temp:
ob.tmpf = mtr.temp.value("F")
if mtr.dewpt:
ob.dwpf = mtr.dewpt.value("F")
if mtr.wind_speed:
ob.sknt = mtr.wind_speed.value("KT")
if mtr.wind_gust:
ob.gust = mtr.wind_gust.value("KT")
if mtr.wind_dir and mtr.wind_dir.value() != "VRB":
ob.drct = mtr.wind_dir.value()
if mtr.vis:
ob.vsby = mtr.vis.value("SM")
# see pull request #38
if mtr.press and mtr.press != mtr.press_sea_level:
ob.alti = mtr.press.value("IN")
if mtr.press_sea_level:
ob.mslp = mtr.press_sea_level.value("MB")
if mtr.precip_1hr:
ob.p01i = mtr.precip_1hr.value("IN")
# Do something with sky coverage
for i in range(len(mtr.sky)):
(c, h, _) = mtr.sky[i]
setattr(ob, 'skyc%s' % (i+1), c)
if h is not None:
setattr(ob, 'skyl%s' % (i+1), h.value("FT"))
if mtr.max_temp_6hr:
ob.max_tmpf_6hr = mtr.max_temp_6hr.value("F")
if mtr.min_temp_6hr:
ob.min_tmpf_6hr = mtr.min_temp_6hr.value("F")
if mtr.max_temp_24hr:
ob.max_tmpf_24hr = mtr.max_temp_24hr.value("F")
if mtr.min_temp_24hr:
ob.min_tmpf_6hr = mtr.min_temp_24hr.value("F")
if mtr.precip_3hr:
ob.p03i = mtr.precip_3hr.value("IN")
if mtr.precip_6hr:
ob.p06i = mtr.precip_6hr.value("IN")
if mtr.precip_24hr:
ob.p24i = mtr.precip_24hr.value("IN")
# Presentwx
if mtr.weather:
pwx = []
for x in mtr.weather:
pwx.append(("").join([a for a in x if a is not None]))
ob.presentwx = (",".join(pwx))[:24]
return ob
def sql(txn, stid, data):
"""Persist what data we have to the IEM schema database
In general, the IEM database's atomic data is based on the parsing of the
METAR product. So we wouldn't want the two to conflict, so the METAR
format is again used to drive the data used for the database insert.
Args:
txn (cursor): database transaction
stid (str): station identifier to use with the database
data (dict): what we got from previous parsing
Returns:
int or None: number of rows inserted
"""
# First problem, which metar source to use?
# If this is a US site, likely best to always use it
metar = data['extra'].get('REM', {}).get('MET', '')
if len(metar) > 20 and (len(stid) == 3 or stid[0] == 'P'):
# Split off the cruft
metar = metar.strip().replace(";",
" ").replace("METAR ",
"").replace("COR ",
"").rstrip("=")
else:
metar = data['metar']
table = "t%s" % (data['valid'].year, )
ob = process_metar(metar, data['valid'])
if ob is None:
return
stid = stid if len(stid) == 4 and stid[0] != 'K' else stid[-3:]
_sql = """
INSERT into """ + table + """ (station, valid,
tmpf, dwpf, vsby, drct, sknt, gust, p01i, alti, skyc1, skyc2,
skyc3, skyc4, skyl1, skyl2, skyl3, skyl4, metar, mslp,
presentwx, p03i, p06i, p24i, max_tmpf_6hr, max_tmpf_24hr,
min_tmpf_6hr, min_tmpf_24hr, report_type)
values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s,%s,%s, %s, %s, %s, %s, %s, %s, %s, %s, 2)
RETURNING valid
"""
args = (stid, ob.valid, ob.tmpf, ob.dwpf, ob.vsby, ob.drct,
ob.sknt, ob.gust, ob.p01i, ob.alti, ob.skyc1, ob.skyc2,
ob.skyc3, ob.skyc4, ob.skyl1, ob.skyl2, ob.skyl3,
ob.skyl4, metar,
ob.mslp, ob.presentwx, ob.p03i,
ob.p06i, ob.p24i, ob.max_tmpf_6hr, ob.max_tmpf_24hr,
ob.min_tmpf_6hr, ob.min_tmpf_24hr)
try:
txn.execute(_sql, args)
except Exception as _exp:
print(metar)
print(args)
raise
return txn.rowcount
def gen_metar(data):
"""Convert our parsed dictionary into a METAR"""
mtr = "%s %sZ AUTO " % (data['call_id'], data['valid'].strftime("%d%H%M"))
# wind direction
if data.get('wind_code') == 'C':
mtr += "00000KT "
elif (data.get('drct_qc') in ["1", "5"] and
data['wind_speed_mps'] is not None):
if data['drct'] is None:
mtr += "////"
else:
mtr += "%03.0f" % (data['drct'], )
kts = speed(data['wind_speed_mps'], 'MPS').value('KT')
mtr += "%02.0f" % (kts, )
if 'OC1' in data['extra']:
val = data['extra']['OC1'].get('speed', 0)
if val > 0:
mtr += "G%02.0f" % (speed(val, 'MPS').value('KT'), )
mtr += 'KT '
# vis
if data['vsby_m'] is not None:
val = distance(data['vsby_m'], 'M').value('MI')
mtr += "%sSM " % (vsbyfmt(val), )
# Present Weather Time
combocode = ""
for code in ['AU1', 'AU2', 'AU3', 'AU4', 'AU5', 'AU6', 'AU7', 'AU8',
'AU9']:
if code not in data['extra']:
continue
val = data['extra'][code]
if val['combo'] == "1": # lone
if val['obscure'] == "1":
mtr += "BR "
elif val['combo'] == '2': # start of dual code
if val['descriptor'] == '7':
combocode = "TS"
elif val['combo'] == '3': # end of dual code
if val['proximity'] == '3' and val['precip'] == '02':
mtr += "+%sRA " % (combocode, )
combocode = ""
# Clouds
for code in ['GD1', 'GD2', 'GD3', 'GD4', 'GD5', 'GD6']:
if code not in data['extra']:
continue
val = data['extra'][code]
skycode = SKY_STATE_CODES[val['state_code']]
height = val['height']
if skycode == 'CLR':
mtr += "CLR "
elif height is None:
continue
else:
hft = distance(height, 'M').value('FT') / 100.
mtr += "%s%03.0f " % (skycode, hft)
# temperature
tgroup = None
if (data.get('airtemp_c_qc') not in ["2", "3"] and
data['airtemp_c'] is not None):
tmpc = data['airtemp_c']
dwpc = data['dewpointtemp_c']
mtr += "%s%02.0f/" % ("M" if tmpc < 0 else "", abs(tmpc))
if dwpc is not None:
mtr += "%s%02.0f" % ("M" if dwpc < 0 else "", abs(dwpc))
tgroup = "T%s%03i%s%03i" % ("1" if tmpc < 0 else "0",
abs(tmpc) * 10.,
"1" if dwpc < 0 else "0",
abs(dwpc) * 10.)
mtr += " "
# altimeter
if ('MA1' in data['extra'] and
data['extra']['MA1']['altimeter'] is not None):
altimeter = pressure(data['extra']['MA1']['altimeter'], 'HPA').value(
"IN")
mtr += "A%4.0f " % (altimeter * 100, )
rmk = []
for code in ['AA1', 'AA2', 'AA3', 'AA4']:
if code not in data['extra']:
continue
hours = data['extra'][code]['hours']
depth = data['extra'][code]['depth']
if hours is None or depth is None or hours == 12:
continue
elif depth == 0 and data['extra'][code]['cond_code'] != '2':
continue
elif hours in [3, 6]:
prefix = "6"
elif hours == 24:
prefix = "7"
elif hours == 1:
prefix = "P"
else:
warnings.warn("Unknown precip hours %s" % (hours, ))
continue
amount = distance(depth, 'MM').value('IN')
rmk.append("%s%04.0f" % (prefix, amount * 100))
if data['mslp_hpa'] is not None:
rmk.append("SLP%03.0f" % (data['mslp_hpa'] * 10 % 1000, ))
if tgroup is not None:
rmk.append(tgroup)
# temperature groups
group4 = {'M': '////', 'N': '////'}
for code in ['KA1', 'KA2', 'KA3', 'KA4']:
if code not in data['extra']:
continue
val = data['extra'][code]
hours = val['hours']
typ = val['code']
tmpc = val['tmpc']
if tmpc is None:
continue
if hours is None or hours == 12:
continue
elif hours == 6 and typ == 'M':
prefix = "1"
elif hours == 6 and typ == 'N':
prefix = "2"
elif hours == 24:
group4[typ] = "%s%03i" % ("1" if tmpc < 0 else "0",
abs(tmpc) * 10)
continue
else:
warnings.warn("Unknown temperature hours %s typ: %s" % (hours,
typ))
continue
rmk.append("%s%s%03i" % (prefix, "1" if tmpc < 0 else "0",
abs(tmpc) * 10))
if group4['M'] != '////' or group4['N'] != '////':
rmk.append("4%(M)s%(N)s" % group4)
# 3-hour pressure tendency
if ('MD1' in data['extra'] and
data['extra']['MD1']['threehour'] is not None):
rmk.append("5%s%03i" % (data['extra']['MD1']['code'],
data['extra']['MD1']['threehour'] * 10))
rmk.append("IEM_DS3505")
mtr += "RMK %s " % (" ".join(rmk), )
data['metar'] = mtr.strip()
def parser(msg, call_id, add_metar=False):
"""Parse the message(single line) into a dict
Args:
msg (str): the single line of data to parse into a dict
call_id (str): hard coded call_id as the data can't be trusted, sigh
add_metar (bool,optional): should a METAR be generated? Default: False
Returns:
dict or None
"""
match = DS3505_RE.match(msg)
if not match:
return None
data = match.groupdict()
data['valid'] = datetime.datetime.strptime("%s %s" % (data['yyyymmdd'],
data['hhmi']),
'%Y%m%d %H%M').replace(
tzinfo=pytz.utc)
data['call_id'] = call_id
data['lat'] = _d1000(data['lat'])
data['lon'] = _d1000(data['lon'])
data['wind_speed_mps'] = _d10(data['wind_speed_mps'])
data['airtemp_c'] = _d10(data['airtemp_c'])
data['dewpointtemp_c'] = _d10(data['dewpointtemp_c'])
data['mslp_hpa'] = _d10(data['mslp_hpa'])
for elem in ['drct', 'ceiling_m', 'vsby_m', 'elevation']:
data[elem] = _tonumeric(data[elem])
data['extra'] = {}
parse_extra(data, msg[105:])
if add_metar:
try:
gen_metar(data)
except Exception as _exp:
print(json.dumps(data, indent=True, sort_keys=True, default=str))
raise
return data
def parse_extra(data, extra):
"""Parse the additional data fields"""
pos = 0
while pos < len(extra):
code = extra[pos:pos+3]
pos += 3
if code == 'ADD':
continue
if code == 'QNN':
data['extra']['QNN'] = {}
code = extra[pos:pos+5]
while QNN_RE.match(code):
pos += 5
data['extra']['QNN'][code] = extra[pos:pos+6]
pos += 6
code = extra[pos:pos+5]
continue
if code == 'REM':
data['extra']['REM'] = {}
code = extra[pos:pos+3]
while code in ['SYN', 'AWY', 'MET', 'SOD', 'SOM', 'HPD']:
pos += 3
sz = int(extra[pos:pos+3])
pos += 3
data['extra']['REM'][code] = extra[pos:pos+int(sz)]
pos += sz
code = extra[pos:pos+3]
continue
if code == 'EQD':
data['extra']['EQD'] = {}
code = extra[pos:pos+3]
while EQD_RE.match(code):
pos += 3
data['extra']['EQD'][code] = extra[pos:pos+13]
pos += 13
code = extra[pos:pos+3]
continue
if code not in ADDITIONAL:
raise Exception(("Unaccounted for %s\n"
"remaining '%s'\n"
"extra: '%s'") % (code, extra[pos:], extra))
data['extra'][code] = dict()
for token in ADDITIONAL[code]:
if len(token) == 3:
data['extra'][code][token[0]] = token[2](
extra[pos:pos+token[1]])
else:
data['extra'][code][token[0]] = extra[pos:pos+token[1]]
pos += token[1]
| [
"akrherz@iastate.edu"
] | akrherz@iastate.edu |
c1287a336f7f5e7e784c31b5a260209c471f871f | 846a7668ac964632bdb6db639ab381be11c13b77 | /android/tools/test/connectivity/acts/tests/google/wifi/WifiScannerBssidTest.py | e91c449924d208fc24a570a69d77cfff1e06e0dc | [] | no_license | BPI-SINOVOIP/BPI-A64-Android8 | f2900965e96fd6f2a28ced68af668a858b15ebe1 | 744c72c133b9bf5d2e9efe0ab33e01e6e51d5743 | refs/heads/master | 2023-05-21T08:02:23.364495 | 2020-07-15T11:27:51 | 2020-07-15T11:27:51 | 143,945,191 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,870 | py | #!/usr/bin/env python3.4
#
# Copyright 2016 - The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import queue
from acts import asserts
from acts import base_test
from acts import utils
from acts.test_decorators import test_tracker_info
from acts.test_utils.wifi import wifi_test_utils as wutils
BSSID_EVENT_WAIT = 30
BSSID_EVENT_TAG = "WifiScannerBssid"
SCAN_EVENT_TAG = "WifiScannerScan"
SCANTIME = 10000 #framework support only 10s as minimum scan interval
class WifiScannerBssidError(Exception):
pass
class WifiScannerBssidTest(base_test.BaseTestClass):
def __init__(self, controllers):
base_test.BaseTestClass.__init__(self, controllers)
# A list of all test cases to be executed in this class.
self.tests = ("test_wifi_track_bssid_sanity",
"test_wifi_track_bssid_found",
"test_wifi_track_bssid_lost",
"test_wifi_track_bssid_for_2g_while_scanning_5g_channels",
"test_wifi_track_bssid_for_5g_while_scanning_2g_channels",)
def setup_class(self):
self.default_scan_setting = {
"band": wutils.WifiEnums.WIFI_BAND_BOTH_WITH_DFS,
"periodInMs": SCANTIME,
"reportEvents": wutils.WifiEnums.REPORT_EVENT_AFTER_EACH_SCAN,
'numBssidsPerScan': 32
}
self.leeway = 5
self.stime_channel = 47 #dwell time plus 2ms
self.dut = self.android_devices[0]
wutils.wifi_test_device_init(self.dut)
self.attenuators = wutils.group_attenuators(self.attenuators)
asserts.assert_true(self.dut.droid.wifiIsScannerSupported(),
"Device %s doesn't support WifiScanner, abort." %
self.dut.model)
"""It will setup the required dependencies and fetch the user params from
config file"""
self.attenuators[0].set_atten(0)
self.attenuators[1].set_atten(0)
req_params = ("bssid_2g", "bssid_5g", "bssid_dfs", "attenuator_id",
"max_bugreports")
self.wifi_chs = wutils.WifiChannelUS(self.dut.model)
self.unpack_userparams(req_params, two_ap_testbed=False)
def teardown_class(self):
BaseTestClass.teardown_test(self)
self.log.debug("Shut down all wifi scanner activities.")
self.dut.droid.wifiScannerShutdown()
def on_fail(self, test_name, begin_time):
if self.max_bugreports > 0:
self.dut.take_bug_report(test_name, begin_time)
self.max_bugreports -= 1
""" Helper Functions Begin """
def fetch_scan_result(self, scan_idx, scan_setting):
"""Fetch the scan result for provider listener index.
This function calculate the time required for scanning based on scan setting
and wait for scan result event, on triggering of event process the scan result.
Args:
scan_idx: Index of the scan listener.
scan_setting: Setting used for starting the scan.
Returns:
scan_results: if scan result available.
"""
#generating event wait time from scan setting plus leeway
self.log.debug(scan_setting)
scan_time, scan_channels = wutils.get_scan_time_and_channels(
self.wifi_chs, scan_setting, self.stime_channel)
scan_time += scan_setting['periodInMs'
] #add scan period delay for next cycle
if scan_setting[
"reportEvents"] == wutils.WifiEnums.REPORT_EVENT_AFTER_EACH_SCAN:
waittime = int(scan_time / 1000) + self.leeway
else:
time_cache = scan_setting['periodInMs'] * 10 #default cache
waittime = int((time_cache + scan_time) / 1000) + self.leeway
event_name = "%s%sonResults" % (SCAN_EVENT_TAG, scan_idx)
self.log.info("Waiting for the scan result event %s", event_name)
event = self.dut.ed.pop_event(event_name, waittime)
results = event["data"]["Results"]
if len(results) > 0 and "ScanResults" in results[0]:
return results[0]["ScanResults"]
def start_scan_and_validate_environment(self, scan_setting,
bssid_settings):
"""Validate environment for test using current scan result for provided
settings.
This function start the scan for given setting and verify that interested
Bssids are in scan result or not.
Args:
scan_setting: Setting used for starting the scan.
bssid_settings: list of bssid settings.
Returns:
True, if bssid not found in scan result.
"""
try:
data = wutils.start_wifi_background_scan(self.dut, scan_setting)
self.scan_idx = data["Index"]
results = self.fetch_scan_result(self.scan_idx, scan_setting)
self.log.debug("scan result %s.", results)
asserts.assert_true(results,
"Device is not able to fetch the scan results")
for result in results:
for bssid_setting in bssid_settings:
if bssid_setting[wutils.WifiEnums.BSSID_KEY] == result[
wutils.WifiEnums.BSSID_KEY]:
asserts.fail(("Test environment is not valid: Bssid %s"
"already exist in current scan results")
% result[wutils.WifiEnums.BSSID_KEY])
except queue.Empty as error:
self.dut.droid.wifiScannerStopBackgroundScan(self.scan_idx)
raise AssertionError(
"OnResult event did not triggered for scanner\n%s" % error)
def check_bssid_in_found_result(self, bssid_settings, found_results):
"""look for any tracked bssid in reported result of found bssids.
Args:
bssid_settings:Setting used for tracking bssids.
found_results: Result reported in found event.
Returns:
True if bssid is present in result.
"""
for bssid_setting in bssid_settings:
for found_result in found_results:
if found_result[wutils.WifiEnums.BSSID_KEY] == bssid_setting[
wutils.WifiEnums.BSSID_KEY]:
return
asserts.fail("Test fail because Bssid %s is not found in event results"
% bssid_settings)
def track_bssid_with_vaild_scan_for_found(self, track_setting):
"""Common logic for tracking a bssid for Found event.
1. Starts Wifi Scanner bssid tracking for interested bssids in track_setting.
2. Start Wifi Scanner scan with default scan settings.
3. Validate the environment to check AP is not in range.
4. Attenuate the signal to make AP in range.
5. Verified that onFound event is triggered for interested bssids in
track setting.
Args:
track_setting: Setting for bssid tracking.
Returns:
True if found event occur for interested BSSID.
"""
self.attenuators[self.attenuator_id].set_atten(90)
data = wutils.start_wifi_track_bssid(self.dut, track_setting)
idx = data["Index"]
self.start_scan_and_validate_environment(self.default_scan_setting,
track_setting["bssidInfos"])
try:
self.attenuators[self.attenuator_id].set_atten(0)
event_name = "%s%sonFound" % (BSSID_EVENT_TAG, idx)
self.log.info("Waiting for the BSSID event %s", event_name)
event = self.dut.ed.pop_event(event_name, BSSID_EVENT_WAIT)
self.log.debug(event)
self.check_bssid_in_found_result(track_setting["bssidInfos"],
event["data"]["Results"])
except queue.Empty as error:
self.log.error(error)
# log scan result for debugging
results = self.fetch_scan_result(self.scan_idx,
self.default_scan_setting)
self.log.debug("scan result %s", results)
raise AssertionError("Event %s did not triggered for %s\n%s" %
(event_name, track_setting["bssidInfos"],
error))
finally:
self.dut.droid.wifiScannerStopBackgroundScan(self.scan_idx)
self.dut.droid.wifiScannerStopTrackingBssids(idx)
def track_bssid_with_vaild_scan_for_lost(self, track_setting):
"""Common logic for tracking a bssid for Lost event.
1. Start Wifi Scanner scan with default scan settings.
2. Validate the environment to check AP is not in range.
3. Starts Wifi Scanner bssid tracking for interested bssids in track_setting.
4. Attenuate the signal to make Bssids in range.
5. Verified that onFound event is triggered for interested bssids in
track setting.
6. Attenuate the signal to make Bssids out of range.
7. Verified that onLost event is triggered.
Args:
track_setting: Setting for bssid tracking.
scan_setting: Setting used for starting the scan.
Returns:
True if Lost event occur for interested BSSID.
"""
self.attenuators[self.attenuator_id].set_atten(90)
self.start_scan_and_validate_environment(self.default_scan_setting,
track_setting["bssidInfos"])
idx = None
found = False
try:
data = wutils.start_wifi_track_bssid(self.dut, track_setting)
idx = data["Index"]
self.attenuators[self.attenuator_id].set_atten(0)
#onFound event should be occurre before tracking for onLost event
event_name = "%s%sonFound" % (BSSID_EVENT_TAG, idx)
self.log.info("Waiting for the BSSID event %s", event_name)
event = self.dut.ed.pop_event(event_name, BSSID_EVENT_WAIT)
self.log.debug(event)
self.check_bssid_in_found_result(track_setting["bssidInfos"],
event["data"]["Results"])
self.attenuators[self.attenuator_id].set_atten(90)
# log scan result for debugging
for i in range(1, track_setting["apLostThreshold"]):
results = self.fetch_scan_result(self.scan_idx,
self.default_scan_setting)
self.log.debug("scan result %s %s", i, results)
event_name = "%s%sonLost" % (BSSID_EVENT_TAG, idx)
self.log.info("Waiting for the BSSID event %s", event_name)
event = self.dut.ed.pop_event(event_name, BSSID_EVENT_WAIT)
self.log.debug(event)
except queue.Empty as error:
raise AssertionError("Event %s did not triggered for %s\n%s" %
(event_name, track_setting["bssidInfos"],
error))
finally:
self.dut.droid.wifiScannerStopBackgroundScan(self.scan_idx)
if idx:
self.dut.droid.wifiScannerStopTrackingBssids(idx)
def wifi_generate_track_bssid_settings(self, isLost):
"""Generates all the combinations of different track setting parameters.
Returns:
A list of dictionaries each representing a set of track settings.
"""
bssids = [[self.bssid_2g], [self.bssid_5g],
[self.bssid_2g, self.bssid_5g]]
if self.dut.model != "hammerhead" or not self.two_ap_testbed:
bssids.append([self.bssid_dfs])
if isLost:
apthreshold = (3, 5)
else:
apthreshold = (1, )
# Create track setting strings based on the combinations
setting_combinations = list(itertools.product(bssids, apthreshold))
# Create scan setting strings based on the combinations
track_settings = []
for combo in setting_combinations:
s = {}
s["bssidInfos"] = combo[0]
s["apLostThreshold"] = combo[1]
track_settings.append(s)
return track_settings
def track_setting_to_string(self, track_setting):
"""Convert track setting to string for Bssids in that"""
string = ""
for bssid_setting in track_setting:
string += bssid_setting[wutils.WifiEnums.BSSID_KEY]
string += "_"
return string
def combineBssids(self, *track_settings):
"""Combine bssids in the track_settings to one list"""
bssids = []
for track_setting in track_settings:
bssids.extend(track_setting["bssidInfos"])
return bssids
""" Helper Functions End """
""" Tests Begin """
@test_tracker_info(uuid="599a30b8-73ad-4314-a245-7ec58fc7e74b")
def test_wifi_track_bssid_found(self):
"""Test bssid track for event found with a list of different settings.
1. Starts Wifi Scanner bssid tracking for interested bssids in track_setting.
2. Start Wifi Scanner scan with default scan settings.
3. Validate the environment to check AP is not in range.
4. Attenuate the signal to make AP in range.
5. Verified that onFound event is triggered for interested bssids in
track setting.
"""
track_settings = self.wifi_generate_track_bssid_settings(False)
name_func = lambda track_setting: "test_wifi_track_found_bssidInfos_%sapLostThreshold_%s" % (self.track_setting_to_string(track_setting["bssidInfos"]), track_setting["apLostThreshold"])
failed = self.run_generated_testcases(
self.track_bssid_with_vaild_scan_for_found,
track_settings,
name_func=name_func)
asserts.assert_false(
failed, "Track bssid found failed with these bssids: %s" % failed)
@test_tracker_info(uuid="7ebd4b61-c408-45b3-b9b6-098753d46aa7")
def test_wifi_track_bssid_lost(self):
"""Test bssid track for event lost with a list of different settings.
1. Start Wifi Scanner scan with default scan settings.
2. Validate the environment to check AP is not in range.
3. Starts Wifi Scanner bssid tracking for interested bssids in track_setting.
4. Attenuate the signal to make Bssids in range.
5. Verified that onFound event is triggered for interested bssids in
track setting.
6. Attenuate the signal to make Bssids out of range.
7. Verified that onLost event is triggered.
"""
track_settings = self.wifi_generate_track_bssid_settings(True)
name_func = lambda track_setting: "test_wifi_track_lost_bssidInfos_%sapLostThreshold_%s" % (self.track_setting_to_string(track_setting["bssidInfos"]), track_setting["apLostThreshold"])
failed = self.run_generated_testcases(
self.track_bssid_with_vaild_scan_for_lost,
track_settings,
name_func=name_func)
asserts.assert_false(
failed, "Track bssid lost failed with these bssids: %s" % failed)
def test_wifi_track_bssid_sanity(self):
"""Test bssid track for event found and lost with default settings.
1. Start WifiScanner scan for default scan settings.
2. Start Bssid track for "bssid_2g" AP.
3. Attenuate the signal to move in AP range.
4. Verify that onFound event occur.
5. Attenuate the signal to move out of range
6. Verify that onLost event occur.
"""
track_setting = {"bssidInfos": [self.bssid_2g], "apLostThreshold": 3}
self.track_bssid_with_vaild_scan_for_lost(track_setting)
def test_wifi_track_bssid_for_2g_while_scanning_5g_channels(self):
"""Test bssid track for 2g bssids while scanning 5g channels.
1. Starts Wifi Scanner bssid tracking for 2g bssids in track_setting.
2. Start Wifi Scanner scan for 5G Band only.
3. Validate the environment to check AP is not in range.
4. Attenuate the signal to make AP in range.
5. Verified that onFound event isn't triggered for 2g bssids.
"""
self.attenuators[self.attenuator_id].set_atten(90)
scan_setting = {"band": wutils.WifiEnums.WIFI_BAND_5_GHZ,
"periodInMs": SCANTIME,
"reportEvents":
wutils.WifiEnums.REPORT_EVENT_AFTER_EACH_SCAN,
"numBssidsPerScan": 32}
track_setting = {"bssidInfos": [self.bssid_2g], "apLostThreshold": 3}
self.start_scan_and_validate_environment(scan_setting,
track_setting["bssidInfos"])
idx = None
try:
data = wutils.start_wifi_track_bssid(self.dut, track_setting)
idx = data["Index"]
self.attenuators[self.attenuator_id].set_atten(0)
event_name = "%s%sonFound" % (BSSID_EVENT_TAG, idx)
self.log.info("Waiting for the BSSID event %s", event_name)
#waiting for 2x time to make sure
event = self.dut.ed.pop_event(event_name, BSSID_EVENT_WAIT * 2)
self.log.debug(event)
self.check_bssid_in_found_result(track_setting["bssidInfos"],
event["data"]["Results"])
except queue.Empty as error:
self.log.info(
"As excepted event didn't occurred with different scan setting")
finally:
self.dut.droid.wifiScannerStopBackgroundScan(self.scan_idx)
if idx:
self.dut.droid.wifiScannerStopTrackingBssids(idx)
def test_wifi_track_bssid_for_5g_while_scanning_2g_channels(self):
"""Test bssid track for 5g bssids while scanning 2g channels.
1. Starts Wifi Scanner bssid tracking for 5g bssids in track_setting.
2. Start Wifi Scanner scan for 2G Band only.
3. Validate the environment to check AP is not in range.
4. Attenuate the signal to make AP in range.
5. Verified that onFound event isn't triggered for 5g bssids.
"""
self.attenuators[self.attenuator_id].set_atten(90)
scan_setting = {"band": wutils.WifiEnums.WIFI_BAND_24_GHZ,
"periodInMs": SCANTIME,
"reportEvents":
wutils.WifiEnums.REPORT_EVENT_AFTER_EACH_SCAN,
"numBssidsPerScan": 32}
track_setting = {"bssidInfos": [self.bssid_5g], "apLostThreshold": 3}
data = wutils.start_wifi_track_bssid(self.dut, track_setting)
idx = data["Index"]
self.start_scan_and_validate_environment(scan_setting,
track_setting["bssidInfos"])
try:
self.attenuators[self.attenuator_id].set_atten(0)
event_name = "%s%sonFound" % (BSSID_EVENT_TAG, idx)
self.log.info("Waiting for the BSSID event %s", event_name)
#waiting for 2x time to make sure
event = self.dut.ed.pop_event(event_name, BSSID_EVENT_WAIT * 2)
self.log.debug(event)
self.check_bssid_in_found_result(track_setting["bssidInfos"],
event["data"]["Results"])
except queue.Empty as error:
self.log.info(
"As excepted event didn't occurred with different scan setting")
finally:
self.dut.droid.wifiScannerStopBackgroundScan(self.scan_idx)
if idx:
self.dut.droid.wifiScannerStopTrackingBssids(idx)
def test_wifi_tracking_bssid_multi_listeners_found(self):
"""Test bssid tracking for multiple listeners
1. Start BSSID tracking for 5g bssids
2. Start BSSID tracking for 2g bssids
3. Start WifiScanner scan on both bands.
4. Valid the environment and check the APs are not in range.
5. Attenuate the signal to make the APs in range.
6. Verify onFound event triggered on both APs.
"""
# Attenuate the signal to make APs invisible.
self.attenuators[self.attenuator_id].set_atten(90)
scan_setting = { "band": WifiEnums.WIFI_BAND_BOTH_WITH_DFS,
"periodInMs": SCANTIME,
"reportEvents": WifiEnums.REPORT_EVENT_AFTER_EACH_SCAN,
"numBssidsPerScan": 32}
track_setting_5g = {"bssidInfos":[self.bssid_5g], "apLostThreshold":3}
data_5g = start_wifi_track_bssid(self.dut, track_setting_5g)
idx_5g = data_5g["Index"]
track_setting_2g = {"bssidInfos":[self.bssid_2g], "apLostThreshold":3}
data_2g = start_wifi_track_bssid(self.dut, track_setting_2g)
idx_2g = data_2g["Index"]
valid_env = self.start_scan_and_validate_environment(
scan_setting, self.combineBssids(track_setting_5g, track_setting_2g))
try:
asserts.assert_true(valid_env,
"Test environment is not valid, AP is in range")
self.attenuators[self.attenuator_id].set_atten(0)
event_name = "{}{}{}{}onFound".format(BSSID_EVENT_TAG, idx_5g, BSSID_EVENT_TAG, idx_2g)
self.log.info("Waiting for the BSSID event {}".format(event_name))
#waiting for 2x time to make sure
event = self.dut.ed.pop_event(event_name, BSSID_EVENT_WAIT * 2)
self.log.debug(event)
found = self.check_bssid_in_found_result(
self.combineBssids(track_setting_5g, track_setting_2g),
event["data"]["Results"])
asserts.assert_true(found,
"Test failed because Bssid onFound event is not triggered")
finally:
self.dut.droid.wifiScannerStopBackgroundScan(self.scan_idx)
if idx_5g:
self.dut.droid.wifiScannerStopTrackingBssids(idx_5g)
if idx_2g:
self.dut.droid.wifiScannerStopTrackingBssids(idx_2g);
""" Tests End """
| [
"mingxin.android@gmail.com"
] | mingxin.android@gmail.com |
058ef9a85453f91db61408ad2be56cfd3f100d38 | 3534f9bf3b7e3f397bc5e80736fc50561dec95d1 | /activitystreams_test.py | ef2c511de267d5818a0e7e425a3f69a170ed3eb0 | [
"LicenseRef-scancode-public-domain"
] | permissive | rhiaro/activitystreams-unofficial | c6ece0e341b8b7e7addeb276e323a92cadde707c | cfaf4f82c44ceab857d0a4f52e0247ff78c73b72 | refs/heads/master | 2021-01-16T20:05:58.942272 | 2015-03-14T05:48:08 | 2015-03-14T05:48:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,739 | py | """Unit tests for activitystreams.py.
"""
__author__ = ['Ryan Barrett <activitystreams@ryanb.org>']
import copy
import json
import activitystreams
import facebook_test
import instagram_test
from oauth_dropins.webutil import testutil
import source
import twitter_test
class FakeSource(source.Source):
def __init__(self, **kwargs):
pass
class HandlerTest(testutil.HandlerTest):
activities = [{'foo': 'bar'}]
def setUp(self):
super(HandlerTest, self).setUp()
self.reset()
def reset(self):
self.mox.UnsetStubs()
self.mox.ResetAll()
activitystreams.SOURCE = FakeSource
self.mox.StubOutWithMock(FakeSource, 'get_activities_response')
def get_response(self, url, *args, **kwargs):
start_index = kwargs.setdefault('start_index', 0)
kwargs.setdefault('count', activitystreams.ITEMS_PER_PAGE)
FakeSource.get_activities_response(*args, **kwargs).AndReturn({
'startIndex': start_index,
'itemsPerPage': 1,
'totalResults': 9,
'items': self.activities,
'filtered': False,
'sorted': False,
'updatedSince': False,
})
self.mox.ReplayAll()
return activitystreams.application.get_response(url)
def check_request(self, url, *args, **kwargs):
resp = self.get_response(url, *args, **kwargs)
self.assertEquals(200, resp.status_int)
self.assert_equals({
'startIndex': int(kwargs.get('start_index', 0)),
'itemsPerPage': 1,
'totalResults': 9,
'items': [{'foo': 'bar'}],
'filtered': False,
'sorted': False,
'updatedSince': False,
},
json.loads(resp.body))
def test_all_defaults(self):
self.check_request('/')
def test_me(self):
self.check_request('/@me', None)
def test_user_id(self):
self.check_request('/123/', '123')
def test_all(self):
self.check_request('/123/@all/', '123', None)
def test_friends(self):
self.check_request('/123/@friends/', '123', None)
def test_self(self):
self.check_request('/123/@self/', '123', '@self')
def test_group_id(self):
self.check_request('/123/456', '123', '456')
def test_app(self):
self.check_request('/123/456/@app/', '123', '456', None)
def test_app_id(self):
self.check_request('/123/456/789/', '123', '456', '789')
def test_activity_id(self):
self.check_request('/123/456/789/000/', '123', '456', '789', '000')
def test_defaults_and_activity_id(self):
self.check_request('/@me/@all/@app/000/', None, None, None, '000')
def test_json_format(self):
self.check_request('/@me/?format=json', None)
def test_xml_format(self):
resp = self.get_response('?format=xml')
self.assertEquals(200, resp.status_int)
self.assert_multiline_equals("""\
<?xml version="1.0" encoding="UTF-8"?>
<response>
<items>
<foo>bar</foo>
</items>
<itemsPerPage>1</itemsPerPage>
<updatedSince>False</updatedSince>
<startIndex>0</startIndex>
<sorted>False</sorted>
<filtered>False</filtered>
<totalResults>9</totalResults>
</response>
""", resp.body)
def test_atom_format(self):
for test_module in facebook_test, instagram_test, twitter_test:
self.reset()
self.mox.StubOutWithMock(FakeSource, 'get_actor')
FakeSource.get_actor(None).AndReturn(test_module.ACTOR)
self.activities = [copy.deepcopy(test_module.ACTIVITY)]
# include access_token param to check that it gets stripped
resp = self.get_response('?format=atom&access_token=foo&a=b')
self.assertEquals(200, resp.status_int)
self.assert_multiline_equals(
test_module.ATOM % {'request_url': 'http://localhost',
'host_url': 'http://localhost/'},
resp.body)
def test_unknown_format(self):
resp = activitystreams.application.get_response('?format=bad')
self.assertEquals(400, resp.status_int)
def test_bad_start_index(self):
resp = activitystreams.application.get_response('?startIndex=foo')
self.assertEquals(400, resp.status_int)
def test_bad_count(self):
resp = activitystreams.application.get_response('?count=-1')
self.assertEquals(400, resp.status_int)
def test_start_index(self):
expected_count = activitystreams.ITEMS_PER_PAGE - 2
self.check_request('?startIndex=2', start_index=2, count=expected_count)
def test_count(self):
self.check_request('?count=3', count=3)
def test_start_index_and_count(self):
self.check_request('?startIndex=4&count=5', start_index=4, count=5)
def test_count_greater_than_items_per_page(self):
self.check_request('?count=999', count=activitystreams.ITEMS_PER_PAGE)
# TODO: move to facebook and/or twitter since they do implementation
# def test_start_index_count_zero(self):
# self.check_request('?startIndex=0&count=0', self.ACTIVITIES)
# def test_start_index(self):
# self.check_request('?startIndex=1&count=0', self.ACTIVITIES[1:])
# self.check_request('?startIndex=2&count=0', self.ACTIVITIES[2:])
# def test_count_past_end(self):
# self.check_request('?startIndex=0&count=10', self.ACTIVITIES)
# self.check_request('?startIndex=1&count=10', self.ACTIVITIES[1:])
# def test_start_index_past_end(self):
# self.check_request('?startIndex=10&count=0', [])
# self.check_request('?startIndex=10&count=10', [])
# def test_start_index_subtracts_from_count(self):
# try:
# orig_items_per_page = activitystreams.ITEMS_PER_PAGE
# activitystreams.ITEMS_PER_PAGE = 2
# self.check_request('?startIndex=1&count=0', self.ACTIVITIES[1:2])
# finally:
# activitystreams.ITEMS_PER_PAGE = orig_items_per_page
# def test_start_index_and_count(self):
# self.check_request('?startIndex=1&count=1', [self.ACTIVITIES[1]])
| [
"git@ryanb.org"
] | git@ryanb.org |
658d0a11036050ce40cb2311da2ac7539607f050 | 0502750293383c6dae2aaf4013717d9c83f52c62 | /exercism/python/archive/prime-factors/prime_factors.py | 4dca4fefddd4177d80ad4fc02d69cc52c0c0c9cb | [] | no_license | sebito91/challenges | fcfb680e7fc1abfa9fea9cd5f108c42795da4679 | b4f2d3b7f8b7c78f02b67d67d4bcb7fad2b7e284 | refs/heads/master | 2023-07-08T15:43:42.850679 | 2023-06-26T19:38:51 | 2023-06-26T19:38:51 | 117,160,720 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 585 | py | """ Module to return the set of prime factors for a number """
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import math
def prime_factors(number):
""" return the list of prime factors of the given number """
if number == 1:
return []
thelist = []
while number % 2 == 0:
number /= 2
thelist.append(2)
for each in xrange(3, int(math.sqrt(number))+1, 2):
while number % each == 0:
number /= each
thelist.append(each)
if number > 2:
thelist.append(number)
return thelist
| [
"sebito91@gmail.com"
] | sebito91@gmail.com |
8eb96542eb2443f15fcdbd700049efb6b975ab53 | aaa204ad7f134b526593c785eaa739bff9fc4d2a | /airflow/api_connexion/schemas/dag_warning_schema.py | 211f251e7d26e6255de19ef603116ddec9d3bef6 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] | permissive | cfei18/incubator-airflow | 913b40efa3d9f1fdfc5e299ce2693492c9a92dd4 | ffb2078eb5546420864229cdc6ee361f89cab7bd | refs/heads/master | 2022-09-28T14:44:04.250367 | 2022-09-19T16:50:23 | 2022-09-19T16:50:23 | 88,665,367 | 0 | 1 | Apache-2.0 | 2021-02-05T16:29:42 | 2017-04-18T20:00:03 | Python | UTF-8 | Python | false | false | 1,735 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import NamedTuple
from marshmallow import Schema, fields
from marshmallow_sqlalchemy import SQLAlchemySchema, auto_field
from airflow.models.dagwarning import DagWarning
class DagWarningSchema(SQLAlchemySchema):
"""Import error schema"""
class Meta:
"""Meta"""
model = DagWarning
dag_id = auto_field(data_key="dag_id", dump_only=True)
warning_type = auto_field()
message = auto_field()
timestamp = auto_field(format="iso")
class DagWarningCollection(NamedTuple):
"""List of dag warnings with metadata"""
dag_warnings: list[DagWarning]
total_entries: int
class DagWarningCollectionSchema(Schema):
"""Import error collection schema"""
dag_warnings = fields.List(fields.Nested(DagWarningSchema))
total_entries = fields.Int()
dag_warning_schema = DagWarningSchema()
dag_warning_collection_schema = DagWarningCollectionSchema()
| [
"noreply@github.com"
] | cfei18.noreply@github.com |
2c0ff3e8c8731cd7fa9acc4cbadcc1f28978387e | 45dca4f728d6d7ce2f8ba4d711aaa9983871740a | /tensorpack/dataflow/imgaug/meta.py | 1093bf047a28b33b5a1b851bc7caf01e145c3870 | [
"Apache-2.0"
] | permissive | mtoto/tensorpack | 5de9f0d87f34f70527cdea1ddeb4006f8a503ca7 | 552f8b2690f369cb30dc29e07535dda7aa68f5b1 | refs/heads/master | 2020-03-15T08:03:26.843432 | 2018-05-02T22:04:49 | 2018-05-02T22:04:49 | 132,042,806 | 0 | 0 | Apache-2.0 | 2018-05-03T19:52:39 | 2018-05-03T19:52:38 | null | UTF-8 | Python | false | false | 4,607 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: meta.py
from .base import ImageAugmentor
__all__ = ['RandomChooseAug', 'MapImage', 'Identity', 'RandomApplyAug',
'RandomOrderAug']
class Identity(ImageAugmentor):
""" A no-op augmentor """
def _augment(self, img, _):
return img
class RandomApplyAug(ImageAugmentor):
""" Randomly apply the augmentor with a probability.
Otherwise do nothing
"""
def __init__(self, aug, prob):
"""
Args:
aug (ImageAugmentor): an augmentor
prob (float): the probability
"""
self._init(locals())
super(RandomApplyAug, self).__init__()
def _get_augment_params(self, img):
p = self.rng.rand()
if p < self.prob:
prm = self.aug._get_augment_params(img)
return (True, prm)
else:
return (False, None)
def _augment_return_params(self, img):
p = self.rng.rand()
if p < self.prob:
img, prms = self.aug._augment_return_params(img)
return img, (True, prms)
else:
return img, (False, None)
def reset_state(self):
super(RandomApplyAug, self).reset_state()
self.aug.reset_state()
def _augment(self, img, prm):
if not prm[0]:
return img
else:
return self.aug._augment(img, prm[1])
def _augment_coords(self, coords, prm):
if not prm[0]:
return coords
else:
return self.aug._augment_coords(coords, prm[1])
class RandomChooseAug(ImageAugmentor):
""" Randomly choose one from a list of augmentors """
def __init__(self, aug_lists):
"""
Args:
aug_lists (list): list of augmentors, or list of (augmentor, probability) tuples
"""
if isinstance(aug_lists[0], (tuple, list)):
prob = [k[1] for k in aug_lists]
aug_lists = [k[0] for k in aug_lists]
self._init(locals())
else:
prob = [1.0 / len(aug_lists)] * len(aug_lists)
self._init(locals())
super(RandomChooseAug, self).__init__()
def reset_state(self):
super(RandomChooseAug, self).reset_state()
for a in self.aug_lists:
a.reset_state()
def _get_augment_params(self, img):
aug_idx = self.rng.choice(len(self.aug_lists), p=self.prob)
aug_prm = self.aug_lists[aug_idx]._get_augment_params(img)
return aug_idx, aug_prm
def _augment(self, img, prm):
idx, prm = prm
return self.aug_lists[idx]._augment(img, prm)
def _augment_coords(self, coords, prm):
idx, prm = prm
return self.aug_lists[idx]._augment_coords(coords, prm)
class RandomOrderAug(ImageAugmentor):
"""
Apply the augmentors with randomized order.
"""
def __init__(self, aug_lists):
"""
Args:
aug_lists (list): list of augmentors.
The augmentors are assumed to not change the shape of images.
"""
self._init(locals())
super(RandomOrderAug, self).__init__()
def reset_state(self):
super(RandomOrderAug, self).reset_state()
for a in self.aug_lists:
a.reset_state()
def _get_augment_params(self, img):
# Note: If augmentors change the shape of image, get_augment_param might not work
# All augmentors should only rely on the shape of image
idxs = self.rng.permutation(len(self.aug_lists))
prms = [self.aug_lists[k]._get_augment_params(img)
for k in range(len(self.aug_lists))]
return idxs, prms
def _augment(self, img, prm):
idxs, prms = prm
for k in idxs:
img = self.aug_lists[k]._augment(img, prms[k])
return img
def _augment_coords(self, coords, prm):
idxs, prms = prm
for k in idxs:
img = self.aug_lists[k]._augment_coords(coords, prms[k])
return img
class MapImage(ImageAugmentor):
"""
Map the image array by a function.
"""
def __init__(self, func, coord_func=None):
"""
Args:
func: a function which takes an image array and return an augmented one
"""
super(MapImage, self).__init__()
self.func = func
self.coord_func = coord_func
def _augment(self, img, _):
return self.func(img)
def _augment_coords(self, coords, _):
if self.coord_func is None:
raise NotImplementedError
return self.coord_func(coords)
| [
"ppwwyyxxc@gmail.com"
] | ppwwyyxxc@gmail.com |
9e6e98bcaf65a333c3415b95e9d1b93b57bb6ab8 | 60e4baae4d6b323b3d3b656df3a7b0ea3ca40ef2 | /project/apps/community/migrations/0006_comment_created_date.py | 72662e4eb6a332438ac5fac1c3c7c801c8f017f5 | [] | no_license | Burzhun/Big-django-project | a03a61a15ee75f49324ad7ea51372b6b013d1650 | 1a71f974b7b5399a45862711b5f858c0d4af50d2 | refs/heads/master | 2020-04-11T00:16:06.211039 | 2018-12-11T19:13:38 | 2018-12-11T19:13:38 | 161,381,283 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 572 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-04-04 19:34
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('community', '0005_comment_expert_answer'),
]
operations = [
migrations.AddField(
model_name='comment',
name='created_date',
field=models.DateTimeField(db_index=True, default=django.utils.timezone.now, verbose_name='Дата создания'),
),
]
| [
"burjunov@yandex.ru"
] | burjunov@yandex.ru |
ba5c486c32a7be995fe3c55dd4486130ed1a3c95 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2842/60580/316589.py | 47764e1e08d51f730408ee0f95aa554ff5fc9f29 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 320 | py | n = int(input())
array = [0] * n
for i in range(n):
temp = int(input())
if temp == -1:
array[i] = -1
else:
array[i] = temp - 1
count = []
for i in range(n):
j = i
temp = 0
while array[j] != -1:
temp = temp + 1
j = array[j]
count.append(temp)
print(max(count))
| [
"1069583789@qq.com"
] | 1069583789@qq.com |
01fab021b8daed78539e04be4929281ae7d2f142 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/built-in/nlp/Bert-text-classification_for_PyTorch/transformers/examples/research_projects/wav2vec2/run_pretrain.py | 248f32443f048806366100153b40a928ba52bf5d | [
"Apache-2.0",
"GPL-1.0-or-later",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 15,660 | py | #!/usr/bin/env python3
import logging
import sys
from dataclasses import dataclass, field
from typing import Any, Dict, List, Optional, Union
import torch
from datasets import DatasetDict, load_dataset
from packaging import version
from torch import nn
import librosa
from transformers import (
HfArgumentParser,
Trainer,
TrainingArguments,
Wav2Vec2Config,
Wav2Vec2FeatureExtractor,
Wav2Vec2ForPreTraining,
is_apex_available,
trainer_utils,
)
from transformers.models.wav2vec2.modeling_wav2vec2 import _compute_mask_indices
if is_apex_available():
from apex import amp
if version.parse(torch.__version__) >= version.parse("1.6"):
_is_native_amp_available = True
from torch.cuda.amp import autocast
logger = logging.getLogger(__name__)
@dataclass
class ModelArguments:
"""
Arguments pertaining to which model/config/tokenizer we are going to fine-tune from.
"""
model_name_or_path: str = field(
metadata={"help": "Path to pretrained model or model identifier from huggingface.co/models"}
)
cache_dir: Optional[str] = field(
default=None,
metadata={"help": "Where do you want to store the pretrained models downloaded from huggingface.co"},
)
freeze_feature_extractor: Optional[bool] = field(
default=True, metadata={"help": "Whether to freeze the feature extractor layers of the model."}
)
verbose_logging: Optional[bool] = field(
default=False,
metadata={"help": "Whether to log verbose messages or not."},
)
max_gumbel_temperature: Optional[float] = field(
default=2.0, metadata={"help": "Maximum temperature for gumbel softmax."}
)
min_gumbel_temperature: Optional[float] = field(
default=0.5, metadata={"help": "Minimum temperature for gumbel softmax."}
)
gumbel_temperature_decay: Optional[float] = field(
default=0.999995, metadata={"help": "Decay of gumbel temperature during training."}
)
def configure_logger(model_args: ModelArguments, training_args: TrainingArguments):
logging.basicConfig(
format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
datefmt="%m/%d/%Y %H:%M:%S",
handlers=[logging.StreamHandler(sys.stdout)],
)
logging_level = logging.WARNING
if model_args.verbose_logging:
logging_level = logging.DEBUG
elif trainer_utils.is_main_process(training_args.local_rank):
logging_level = logging.INFO
logger.setLevel(logging_level)
@dataclass
class DataTrainingArguments:
"""
Arguments pertaining to what data we are going to input our model for training and eval.
Using `HfArgumentParser` we can turn this class
into argparse arguments to be able to specify them on
the command line.
"""
dataset_name: str = field(
default=None, metadata={"help": "The name of the dataset to use (via the datasets library)."}
)
dataset_config_name: Optional[str] = field(
default=None, metadata={"help": "The configuration name of the dataset to use (via the datasets library)."}
)
train_split_name: Optional[str] = field(
default="train",
metadata={
"help": "The name of the training data set split to use (via the datasets library). Defaults to 'train'"
},
)
validation_split_name: Optional[str] = field(
default="validation",
metadata={
"help": "The name of the validation data set split to use (via the datasets library). Defaults to 'validation'"
},
)
speech_file_column: Optional[str] = field(
default="file",
metadata={"help": "Column in the dataset that contains speech file path. Defaults to 'file'"},
)
overwrite_cache: bool = field(
default=False, metadata={"help": "Overwrite the cached preprocessed datasets or not."}
)
validation_split_percentage: Optional[int] = field(
default=1,
metadata={
"help": "The percentage of the train set used as validation set in case there's no validation split"
},
)
preprocessing_num_workers: Optional[int] = field(
default=None,
metadata={"help": "The number of processes to use for the preprocessing."},
)
max_duration_in_seconds: Optional[float] = field(
default=20.0, metadata={"help": "Filter audio files that are longer than `max_duration_in_seconds` seconds"}
)
@dataclass
class DataCollatorForWav2Vec2Pretraining:
"""
Data collator that will dynamically pad the inputs received and prepare masked indices
for self-supervised pretraining.
Args:
model (:class:`~transformers.Wav2Vec2ForPreTraining`):
The Wav2Vec2 model used for pretraining. The data collator needs to have access
to config and ``_get_feat_extract_output_lengths`` function for correct padding.
feature_extractor (:class:`~transformers.Wav2Vec2FeatureExtractor`):
The processor used for proccessing the data.
padding (:obj:`bool`, :obj:`str` or :class:`~transformers.tokenization_utils_base.PaddingStrategy`, `optional`, defaults to :obj:`True`):
Select a strategy to pad the returned sequences (according to the model's padding side and padding index)
among:
* :obj:`True` or :obj:`'longest'`: Pad to the longest sequence in the batch (or no padding if only a single
sequence if provided).
* :obj:`'max_length'`: Pad to a maximum length specified with the argument :obj:`max_length` or to the
maximum acceptable input length for the model if that argument is not provided.
* :obj:`False` or :obj:`'do_not_pad'` (default): No padding (i.e., can output a batch with sequences of
different lengths).
max_length (:obj:`int`, `optional`):
Maximum length of the ``input_values`` of the returned list and optionally padding length (see above).
pad_to_multiple_of (:obj:`int`, `optional`):
If set will pad the sequence to a multiple of the provided value.
This is especially useful to enable the use of Tensor Cores on NVIDIA hardware with compute capability >=
7.5 (Volta).
"""
model: Wav2Vec2ForPreTraining
feature_extractor: Wav2Vec2FeatureExtractor
padding: Union[bool, str] = "longest"
pad_to_multiple_of: Optional[int] = None
max_length: Optional[int] = None
def __call__(self, features: List[Dict[str, Union[List[int], torch.Tensor]]]) -> Dict[str, torch.Tensor]:
# reformat list to dict and set to pytorch format
batch = self.feature_extractor.pad(
features,
max_length=self.max_length,
padding=self.padding,
pad_to_multiple_of=self.pad_to_multiple_of,
return_tensors="pt",
)
mask_indices_seq_length = self.model._get_feat_extract_output_lengths(batch["input_values"].shape[-1])
batch_size = batch["input_values"].shape[0]
# make sure that no loss is computed on padded inputs
if batch["attention_mask"] is not None:
# compute real output lengths according to convolution formula
output_lengths = self.model._get_feat_extract_output_lengths(batch["attention_mask"].sum(-1)).to(
torch.long
)
attention_mask = torch.zeros(
(batch_size, mask_indices_seq_length), dtype=torch.long, device=batch["input_values"].device
)
# these two operations makes sure that all values
# before the output lengths indices are attended to
attention_mask[
(torch.arange(attention_mask.shape[0], device=batch["input_values"].device), output_lengths - 1)
] = 1
attention_mask = attention_mask.flip([-1]).cumsum(-1).flip([-1]).bool()
# sample randomly masked indices
batch["mask_time_indices"] = _compute_mask_indices(
(batch_size, mask_indices_seq_length),
self.model.config.mask_time_prob,
self.model.config.mask_time_length,
device=batch["input_values"].device,
attention_mask=attention_mask,
min_masks=2,
)
return batch
class Wav2Vec2PreTrainer(Trainer):
"""
Subclassed :class:`~transformers.Trainer` for Wav2Vec2-like pretraining. Trainer can decay gumbel softmax temperature during training.
"""
def __init__(self, *args, max_gumbel_temp=1, min_gumbel_temp=0, gumbel_temp_decay=1.0, **kwargs):
super().__init__(*args, **kwargs)
self.num_update_step = 0
self.max_gumbel_temp = max_gumbel_temp
self.min_gumbel_temp = min_gumbel_temp
self.gumbel_temp_decay = gumbel_temp_decay
def training_step(self, model: nn.Module, inputs: Dict[str, Union[torch.Tensor, Any]]) -> torch.Tensor:
"""
Perform a training step on a batch of inputs.
Subclass and override to inject custom behavior.
Args:
model (:obj:`nn.Module`):
The model to train.
inputs (:obj:`Dict[str, Union[torch.Tensor, Any]]`):
The inputs and targets of the model.
The dictionary will be unpacked before being fed to the model. Most models expect the targets under the
argument :obj:`labels`. Check your model's documentation for all accepted arguments.
Return:
:obj:`torch.Tensor`: The tensor with training loss on this batch.
"""
model.train()
inputs = self._prepare_inputs(inputs)
if self.use_amp:
with autocast():
loss = self.compute_loss(model, inputs)
else:
loss = self.compute_loss(model, inputs)
if self.args.n_gpu > 1 or self.deepspeed:
if model.module.config.ctc_loss_reduction == "mean":
loss = loss.mean()
elif model.module.config.ctc_loss_reduction == "sum":
loss = loss.sum() / (inputs["mask_time_indices"]).sum()
else:
raise ValueError(f"{model.config.ctc_loss_reduction} is not valid. Choose one of ['mean', 'sum']")
if self.args.gradient_accumulation_steps > 1:
loss = loss / self.args.gradient_accumulation_steps
if self.use_amp:
self.scaler.scale(loss).backward()
elif self.use_apex:
with amp.scale_loss(loss, self.optimizer) as scaled_loss:
scaled_loss.backward()
elif self.deepspeed:
self.deepspeed.backward(loss)
else:
loss.backward()
self.num_update_step += 1
# make sure gumbel softmax temperature is decayed
if self.args.n_gpu > 1 or self.deepspeed:
model.module.set_gumbel_temperature(
max(self.max_gumbel_temp * self.gumbel_temp_decay**self.num_update_step, self.min_gumbel_temp)
)
else:
model.set_gumbel_temperature(
max(self.max_gumbel_temp * self.gumbel_temp_decay**self.num_update_step, self.min_gumbel_temp)
)
return loss.detach()
def main():
# See all possible arguments in src/transformers/training_args.py
# or by passing the --help flag to this script.
# We now keep distinct sets of args, for a cleaner separation of concerns.
parser = HfArgumentParser((ModelArguments, DataTrainingArguments, TrainingArguments))
model_args, data_args, training_args = parser.parse_args_into_dataclasses()
configure_logger(model_args, training_args)
# Downloading and loading a dataset from the hub.
datasets = load_dataset(data_args.dataset_name, data_args.dataset_config_name, cache_dir=model_args.cache_dir)
if "validation" not in datasets.keys():
# make sure only "validation" and "train" keys remain"
datasets = DatasetDict()
datasets["validation"] = load_dataset(
data_args.dataset_name,
data_args.dataset_config_name,
split=f"{data_args.train_split_name}[:{data_args.validation_split_percentage}%]",
cache_dir=model_args.cache_dir,
)
datasets["train"] = load_dataset(
data_args.dataset_name,
data_args.dataset_config_name,
split=f"{data_args.train_split_name}[{data_args.validation_split_percentage}%:]",
cache_dir=model_args.cache_dir,
)
else:
# make sure only "validation" and "train" keys remain"
datasets = DatasetDict()
datasets["validation"] = load_dataset(
data_args.dataset_name,
data_args.dataset_config_name,
split="validation",
cache_dir=model_args.cache_dir,
)
datasets["train"] = load_dataset(
data_args.dataset_name,
data_args.dataset_config_name,
split=f"{data_args.train_split_name}",
cache_dir=model_args.cache_dir,
)
# only normalized-inputs-training is supported
feature_extractor = Wav2Vec2FeatureExtractor.from_pretrained(
model_args.model_name_or_path, cache_dir=model_args.cache_dir, do_normalize=True
)
def prepare_dataset(batch):
# check that all files have the correct sampling rate
batch["speech"], _ = librosa.load(batch[data_args.speech_file_column], sr=feature_extractor.sampling_rate)
return batch
# load audio files into numpy arrays
vectorized_datasets = datasets.map(
prepare_dataset, num_proc=data_args.preprocessing_num_workers, remove_columns=datasets["train"].column_names
)
# filter audio files that are too long
vectorized_datasets = vectorized_datasets.filter(
lambda data: len(data["speech"]) < int(data_args.max_duration_in_seconds * feature_extractor.sampling_rate)
)
def normalize(batch):
return feature_extractor(batch["speech"], sampling_rate=feature_extractor.sampling_rate)
# normalize and transform to `BatchFeatures`
vectorized_datasets = vectorized_datasets.map(
normalize,
batched=True,
num_proc=data_args.preprocessing_num_workers,
load_from_cache_file=not data_args.overwrite_cache,
remove_columns=vectorized_datasets["train"].column_names,
)
# pretraining is only supported for "newer" stable layer norm architecture
# apply_spec_augment has to be True, mask_feature_prob has to be 0.0
config = Wav2Vec2Config.from_pretrained(
model_args.model_name_or_path,
cache_dir=model_args.cache_dir,
gradient_checkpointing=training_args.gradient_checkpointing,
)
if not config.do_stable_layer_norm or config.feat_extract_norm != "layer":
raise ValueError(
"PreTraining is only supported for ``config.do_stable_layer_norm=True`` and ``config.feat_extract_norm='layer'"
)
model = Wav2Vec2ForPreTraining(config)
data_collator = DataCollatorForWav2Vec2Pretraining(model=model, feature_extractor=feature_extractor)
trainer = Wav2Vec2PreTrainer(
model=model,
data_collator=data_collator,
args=training_args,
train_dataset=vectorized_datasets["train"],
eval_dataset=vectorized_datasets["validation"],
tokenizer=feature_extractor,
max_gumbel_temp=model_args.max_gumbel_temperature,
min_gumbel_temp=model_args.min_gumbel_temperature,
gumbel_temp_decay=model_args.gumbel_temperature_decay,
)
trainer.train()
if __name__ == "__main__":
main()
| [
"dongwenbo6@huawei.com"
] | dongwenbo6@huawei.com |
076fd4d7d5a1e94588a1ea2e3116e2703caf79b6 | 76133934b1dd287273a9bfa0c801d10d08a21b21 | /test/functional/p2p-segwit.py | d8e6705f46da4080475fea8cbda29924f01beb88 | [
"MIT"
] | permissive | kenfmcoin/kenfmcoin | d8783b34fcb3ae01067e8d1b33e3a73e3b82b1f9 | 1fa48487593233f2066757dc54f48b2349e2d9db | refs/heads/master | 2020-03-10T17:53:31.569229 | 2018-04-14T12:28:55 | 2018-04-14T12:28:55 | 129,511,375 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 90,130 | py | #!/usr/bin/env python3
# Copyright (c) 2016 The KenFMcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test segwit transactions and blocks on P2P network."""
from test_framework.mininode import *
from test_framework.test_framework import KenFMcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment, get_witness_script, WITNESS_COMMITMENT_HEADER
from test_framework.key import CECKey, CPubKey
import time
import random
from binascii import hexlify
# The versionbit bit used to signal activation of SegWit
VB_WITNESS_BIT = 1
VB_PERIOD = 144
VB_ACTIVATION_THRESHOLD = 108
VB_TOP_BITS = 0x20000000
MAX_SIGOP_COST = 80000
# Calculate the virtual size of a witness block:
# (base + witness/4)
def get_virtual_size(witness_block):
base_size = len(witness_block.serialize())
total_size = len(witness_block.serialize(with_witness=True))
# the "+3" is so we round up
vsize = int((3*base_size + total_size + 3)/4)
return vsize
class TestNode(NodeConnCB):
def __init__(self):
super().__init__()
self.getdataset = set()
def on_getdata(self, conn, message):
for inv in message.inv:
self.getdataset.add(inv.hash)
def announce_tx_and_wait_for_getdata(self, tx, timeout=60):
with mininode_lock:
self.last_message.pop("getdata", None)
self.send_message(msg_inv(inv=[CInv(1, tx.sha256)]))
self.wait_for_getdata(timeout)
def announce_block_and_wait_for_getdata(self, block, use_header, timeout=60):
with mininode_lock:
self.last_message.pop("getdata", None)
self.last_message.pop("getheaders", None)
msg = msg_headers()
msg.headers = [ CBlockHeader(block) ]
if use_header:
self.send_message(msg)
else:
self.send_message(msg_inv(inv=[CInv(2, block.sha256)]))
self.wait_for_getheaders()
self.send_message(msg)
self.wait_for_getdata()
def request_block(self, blockhash, inv_type, timeout=60):
with mininode_lock:
self.last_message.pop("block", None)
self.send_message(msg_getdata(inv=[CInv(inv_type, blockhash)]))
self.wait_for_block(blockhash, timeout)
return self.last_message["block"].block
def test_transaction_acceptance(self, tx, with_witness, accepted, reason=None):
tx_message = msg_tx(tx)
if with_witness:
tx_message = msg_witness_tx(tx)
self.send_message(tx_message)
self.sync_with_ping()
assert_equal(tx.hash in self.connection.rpc.getrawmempool(), accepted)
if (reason != None and not accepted):
# Check the rejection reason as well.
with mininode_lock:
assert_equal(self.last_message["reject"].reason, reason)
# Test whether a witness block had the correct effect on the tip
def test_witness_block(self, block, accepted, with_witness=True):
if with_witness:
self.send_message(msg_witness_block(block))
else:
self.send_message(msg_block(block))
self.sync_with_ping()
assert_equal(self.connection.rpc.getbestblockhash() == block.hash, accepted)
# Used to keep track of anyone-can-spend outputs that we can use in the tests
class UTXO(object):
def __init__(self, sha256, n, nValue):
self.sha256 = sha256
self.n = n
self.nValue = nValue
# Helper for getting the script associated with a P2PKH
def GetP2PKHScript(pubkeyhash):
return CScript([CScriptOp(OP_DUP), CScriptOp(OP_HASH160), pubkeyhash, CScriptOp(OP_EQUALVERIFY), CScriptOp(OP_CHECKSIG)])
# Add signature for a P2PK witness program.
def sign_P2PK_witness_input(script, txTo, inIdx, hashtype, value, key):
tx_hash = SegwitVersion1SignatureHash(script, txTo, inIdx, hashtype, value)
signature = key.sign(tx_hash) + chr(hashtype).encode('latin-1')
txTo.wit.vtxinwit[inIdx].scriptWitness.stack = [signature, script]
txTo.rehash()
class SegWitTest(KenFMcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [["-whitelist=127.0.0.1"], ["-whitelist=127.0.0.1", "-acceptnonstdtxn=0"], ["-whitelist=127.0.0.1", "-vbparams=segwit:0:0"]]
def setup_network(self):
self.setup_nodes()
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[0], 2)
self.sync_all()
''' Helpers '''
# Build a block on top of node0's tip.
def build_next_block(self, nVersion=4):
tip = self.nodes[0].getbestblockhash()
height = self.nodes[0].getblockcount() + 1
block_time = self.nodes[0].getblockheader(tip)["mediantime"] + 1
block = create_block(int(tip, 16), create_coinbase(height), block_time)
block.nVersion = nVersion
block.rehash()
return block
# Adds list of transactions to block, adds witness commitment, then solves.
def update_witness_block_with_transactions(self, block, tx_list, nonce=0):
block.vtx.extend(tx_list)
add_witness_commitment(block, nonce)
block.solve()
return
''' Individual tests '''
def test_witness_services(self):
self.log.info("Verifying NODE_WITNESS service bit")
assert((self.test_node.connection.nServices & NODE_WITNESS) != 0)
# See if sending a regular transaction works, and create a utxo
# to use in later tests.
def test_non_witness_transaction(self):
# Mine a block with an anyone-can-spend coinbase,
# let it mature, then try to spend it.
self.log.info("Testing non-witness transaction")
block = self.build_next_block(nVersion=1)
block.solve()
self.test_node.send_message(msg_block(block))
self.test_node.sync_with_ping() # make sure the block was processed
txid = block.vtx[0].sha256
self.nodes[0].generate(99) # let the block mature
# Create a transaction that spends the coinbase
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(txid, 0), b""))
tx.vout.append(CTxOut(49*100000000, CScript([OP_TRUE])))
tx.calc_sha256()
# Check that serializing it with or without witness is the same
# This is a sanity check of our testing framework.
assert_equal(msg_tx(tx).serialize(), msg_witness_tx(tx).serialize())
self.test_node.send_message(msg_witness_tx(tx))
self.test_node.sync_with_ping() # make sure the tx was processed
assert(tx.hash in self.nodes[0].getrawmempool())
# Save this transaction for later
self.utxo.append(UTXO(tx.sha256, 0, 49*100000000))
self.nodes[0].generate(1)
# Verify that blocks with witnesses are rejected before activation.
def test_unnecessary_witness_before_segwit_activation(self):
self.log.info("Testing behavior of unnecessary witnesses")
# For now, rely on earlier tests to have created at least one utxo for
# us to use
assert(len(self.utxo) > 0)
assert(get_bip9_status(self.nodes[0], 'segwit')['status'] != 'active')
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, CScript([OP_TRUE])))
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)])]
# Verify the hash with witness differs from the txid
# (otherwise our testing framework must be broken!)
tx.rehash()
assert(tx.sha256 != tx.calc_sha256(with_witness=True))
# Construct a segwit-signaling block that includes the transaction.
block = self.build_next_block(nVersion=(VB_TOP_BITS|(1 << VB_WITNESS_BIT)))
self.update_witness_block_with_transactions(block, [tx])
# Sending witness data before activation is not allowed (anti-spam
# rule).
self.test_node.test_witness_block(block, accepted=False)
# TODO: fix synchronization so we can test reject reason
# Right now, kenfmcoind delays sending reject messages for blocks
# until the future, making synchronization here difficult.
#assert_equal(self.test_node.last_message["reject"].reason, "unexpected-witness")
# But it should not be permanently marked bad...
# Resend without witness information.
self.test_node.send_message(msg_block(block))
self.test_node.sync_with_ping()
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
sync_blocks(self.nodes)
# Create a p2sh output -- this is so we can pass the standardness
# rules (an anyone-can-spend OP_TRUE would be rejected, if not wrapped
# in P2SH).
p2sh_program = CScript([OP_TRUE])
p2sh_pubkey = hash160(p2sh_program)
scriptPubKey = CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL])
# Now check that unnecessary witnesses can't be used to blind a node
# to a transaction, eg by violating standardness checks.
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, scriptPubKey))
tx2.rehash()
self.test_node.test_transaction_acceptance(tx2, False, True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
# We'll add an unnecessary witness to this transaction that would cause
# it to be non-standard, to test that violating policy with a witness before
# segwit activation doesn't blind a node to a transaction. Transactions
# rejected for having a witness before segwit activation shouldn't be added
# to the rejection cache.
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), CScript([p2sh_program])))
tx3.vout.append(CTxOut(tx2.vout[0].nValue-1000, scriptPubKey))
tx3.wit.vtxinwit.append(CTxInWitness())
tx3.wit.vtxinwit[0].scriptWitness.stack = [b'a'*400000]
tx3.rehash()
# Note that this should be rejected for the premature witness reason,
# rather than a policy check, since segwit hasn't activated yet.
self.std_node.test_transaction_acceptance(tx3, True, False, b'no-witness-yet')
# If we send without witness, it should be accepted.
self.std_node.test_transaction_acceptance(tx3, False, True)
# Now create a new anyone-can-spend utxo for the next test.
tx4 = CTransaction()
tx4.vin.append(CTxIn(COutPoint(tx3.sha256, 0), CScript([p2sh_program])))
tx4.vout.append(CTxOut(tx3.vout[0].nValue-1000, CScript([OP_TRUE])))
tx4.rehash()
self.test_node.test_transaction_acceptance(tx3, False, True)
self.test_node.test_transaction_acceptance(tx4, False, True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
# Update our utxo list; we spent the first entry.
self.utxo.pop(0)
self.utxo.append(UTXO(tx4.sha256, 0, tx4.vout[0].nValue))
# Mine enough blocks for segwit's vb state to be 'started'.
def advance_to_segwit_started(self):
height = self.nodes[0].getblockcount()
# Will need to rewrite the tests here if we are past the first period
assert(height < VB_PERIOD - 1)
# Genesis block is 'defined'.
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'defined')
# Advance to end of period, status should now be 'started'
self.nodes[0].generate(VB_PERIOD-height-1)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started')
# Mine enough blocks to lock in segwit, but don't activate.
# TODO: we could verify that lockin only happens at the right threshold of
# signalling blocks, rather than just at the right period boundary.
def advance_to_segwit_lockin(self):
height = self.nodes[0].getblockcount()
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started')
# Advance to end of period, and verify lock-in happens at the end
self.nodes[0].generate(VB_PERIOD-1)
height = self.nodes[0].getblockcount()
assert((height % VB_PERIOD) == VB_PERIOD - 2)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started')
self.nodes[0].generate(1)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in')
# Mine enough blocks to activate segwit.
# TODO: we could verify that activation only happens at the right threshold
# of signalling blocks, rather than just at the right period boundary.
def advance_to_segwit_active(self):
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in')
height = self.nodes[0].getblockcount()
self.nodes[0].generate(VB_PERIOD - (height%VB_PERIOD) - 2)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in')
self.nodes[0].generate(1)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'active')
# This test can only be run after segwit has activated
def test_witness_commitments(self):
self.log.info("Testing witness commitments")
# First try a correct witness commitment.
block = self.build_next_block()
add_witness_commitment(block)
block.solve()
# Test the test -- witness serialization should be different
assert(msg_witness_block(block).serialize() != msg_block(block).serialize())
# This empty block should be valid.
self.test_node.test_witness_block(block, accepted=True)
# Try to tweak the nonce
block_2 = self.build_next_block()
add_witness_commitment(block_2, nonce=28)
block_2.solve()
# The commitment should have changed!
assert(block_2.vtx[0].vout[-1] != block.vtx[0].vout[-1])
# This should also be valid.
self.test_node.test_witness_block(block_2, accepted=True)
# Now test commitments with actual transactions
assert (len(self.utxo) > 0)
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
# Let's construct a witness program
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey))
tx.rehash()
# tx2 will spend tx1, and send back to a regular anyone-can-spend address
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, witness_program))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
tx2.rehash()
block_3 = self.build_next_block()
self.update_witness_block_with_transactions(block_3, [tx, tx2], nonce=1)
# Add an extra OP_RETURN output that matches the witness commitment template,
# even though it has extra data after the incorrect commitment.
# This block should fail.
block_3.vtx[0].vout.append(CTxOut(0, CScript([OP_RETURN, WITNESS_COMMITMENT_HEADER + ser_uint256(2), 10])))
block_3.vtx[0].rehash()
block_3.hashMerkleRoot = block_3.calc_merkle_root()
block_3.rehash()
block_3.solve()
self.test_node.test_witness_block(block_3, accepted=False)
# Add a different commitment with different nonce, but in the
# right location, and with some funds burned(!).
# This should succeed (nValue shouldn't affect finding the
# witness commitment).
add_witness_commitment(block_3, nonce=0)
block_3.vtx[0].vout[0].nValue -= 1
block_3.vtx[0].vout[-1].nValue += 1
block_3.vtx[0].rehash()
block_3.hashMerkleRoot = block_3.calc_merkle_root()
block_3.rehash()
assert(len(block_3.vtx[0].vout) == 4) # 3 OP_returns
block_3.solve()
self.test_node.test_witness_block(block_3, accepted=True)
# Finally test that a block with no witness transactions can
# omit the commitment.
block_4 = self.build_next_block()
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b""))
tx3.vout.append(CTxOut(tx.vout[0].nValue-1000, witness_program))
tx3.rehash()
block_4.vtx.append(tx3)
block_4.hashMerkleRoot = block_4.calc_merkle_root()
block_4.solve()
self.test_node.test_witness_block(block_4, with_witness=False, accepted=True)
# Update available utxo's for use in later test.
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
def test_block_malleability(self):
self.log.info("Testing witness block malleability")
# Make sure that a block that has too big a virtual size
# because of a too-large coinbase witness is not permanently
# marked bad.
block = self.build_next_block()
add_witness_commitment(block)
block.solve()
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.append(b'a'*5000000)
assert(get_virtual_size(block) > MAX_BLOCK_BASE_SIZE)
# We can't send over the p2p network, because this is too big to relay
# TODO: repeat this test with a block that can be relayed
self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True)))
assert(self.nodes[0].getbestblockhash() != block.hash)
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.pop()
assert(get_virtual_size(block) < MAX_BLOCK_BASE_SIZE)
self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True)))
assert(self.nodes[0].getbestblockhash() == block.hash)
# Now make sure that malleating the witness nonce doesn't
# result in a block permanently marked bad.
block = self.build_next_block()
add_witness_commitment(block)
block.solve()
# Change the nonce -- should not cause the block to be permanently
# failed
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(1) ]
self.test_node.test_witness_block(block, accepted=False)
# Changing the witness nonce doesn't change the block hash
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(0) ]
self.test_node.test_witness_block(block, accepted=True)
def test_witness_block_size(self):
self.log.info("Testing witness block size limit")
# TODO: Test that non-witness carrying blocks can't exceed 1MB
# Skipping this test for now; this is covered in p2p-fullblocktest.py
# Test that witness-bearing blocks are limited at ceil(base + wit/4) <= 1MB.
block = self.build_next_block()
assert(len(self.utxo) > 0)
# Create a P2WSH transaction.
# The witness program will be a bunch of OP_2DROP's, followed by OP_TRUE.
# This should give us plenty of room to tweak the spending tx's
# virtual size.
NUM_DROPS = 200 # 201 max ops per script!
NUM_OUTPUTS = 50
witness_program = CScript([OP_2DROP]*NUM_DROPS + [OP_TRUE])
witness_hash = uint256_from_str(sha256(witness_program))
scriptPubKey = CScript([OP_0, ser_uint256(witness_hash)])
prevout = COutPoint(self.utxo[0].sha256, self.utxo[0].n)
value = self.utxo[0].nValue
parent_tx = CTransaction()
parent_tx.vin.append(CTxIn(prevout, b""))
child_value = int(value/NUM_OUTPUTS)
for i in range(NUM_OUTPUTS):
parent_tx.vout.append(CTxOut(child_value, scriptPubKey))
parent_tx.vout[0].nValue -= 50000
assert(parent_tx.vout[0].nValue > 0)
parent_tx.rehash()
child_tx = CTransaction()
for i in range(NUM_OUTPUTS):
child_tx.vin.append(CTxIn(COutPoint(parent_tx.sha256, i), b""))
child_tx.vout = [CTxOut(value - 100000, CScript([OP_TRUE]))]
for i in range(NUM_OUTPUTS):
child_tx.wit.vtxinwit.append(CTxInWitness())
child_tx.wit.vtxinwit[-1].scriptWitness.stack = [b'a'*195]*(2*NUM_DROPS) + [witness_program]
child_tx.rehash()
self.update_witness_block_with_transactions(block, [parent_tx, child_tx])
vsize = get_virtual_size(block)
additional_bytes = (MAX_BLOCK_BASE_SIZE - vsize)*4
i = 0
while additional_bytes > 0:
# Add some more bytes to each input until we hit MAX_BLOCK_BASE_SIZE+1
extra_bytes = min(additional_bytes+1, 55)
block.vtx[-1].wit.vtxinwit[int(i/(2*NUM_DROPS))].scriptWitness.stack[i%(2*NUM_DROPS)] = b'a'*(195+extra_bytes)
additional_bytes -= extra_bytes
i += 1
block.vtx[0].vout.pop() # Remove old commitment
add_witness_commitment(block)
block.solve()
vsize = get_virtual_size(block)
assert_equal(vsize, MAX_BLOCK_BASE_SIZE + 1)
# Make sure that our test case would exceed the old max-network-message
# limit
assert(len(block.serialize(True)) > 2*1024*1024)
self.test_node.test_witness_block(block, accepted=False)
# Now resize the second transaction to make the block fit.
cur_length = len(block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0])
block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0] = b'a'*(cur_length-1)
block.vtx[0].vout.pop()
add_witness_commitment(block)
block.solve()
assert(get_virtual_size(block) == MAX_BLOCK_BASE_SIZE)
self.test_node.test_witness_block(block, accepted=True)
# Update available utxo's
self.utxo.pop(0)
self.utxo.append(UTXO(block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue))
# submitblock will try to add the nonce automatically, so that mining
# software doesn't need to worry about doing so itself.
def test_submit_block(self):
block = self.build_next_block()
# Try using a custom nonce and then don't supply it.
# This shouldn't possibly work.
add_witness_commitment(block, nonce=1)
block.vtx[0].wit = CTxWitness() # drop the nonce
block.solve()
self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True)))
assert(self.nodes[0].getbestblockhash() != block.hash)
# Now redo commitment with the standard nonce, but let kenfmcoind fill it in.
add_witness_commitment(block, nonce=0)
block.vtx[0].wit = CTxWitness()
block.solve()
self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True)))
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
# This time, add a tx with non-empty witness, but don't supply
# the commitment.
block_2 = self.build_next_block()
add_witness_commitment(block_2)
block_2.solve()
# Drop commitment and nonce -- submitblock should not fill in.
block_2.vtx[0].vout.pop()
block_2.vtx[0].wit = CTxWitness()
self.nodes[0].submitblock(bytes_to_hex_str(block_2.serialize(True)))
# Tip should not advance!
assert(self.nodes[0].getbestblockhash() != block_2.hash)
# Consensus tests of extra witness data in a transaction.
def test_extra_witness_data(self):
self.log.info("Testing extra witness data in tx")
assert(len(self.utxo) > 0)
block = self.build_next_block()
witness_program = CScript([OP_DROP, OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
# First try extra witness data on a tx that doesn't require a witness
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-2000, scriptPubKey))
tx.vout.append(CTxOut(1000, CScript([OP_TRUE]))) # non-witness output
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([])]
tx.rehash()
self.update_witness_block_with_transactions(block, [tx])
# Extra witness data should not be allowed.
self.test_node.test_witness_block(block, accepted=False)
# Try extra signature data. Ok if we're not spending a witness output.
block.vtx[1].wit.vtxinwit = []
block.vtx[1].vin[0].scriptSig = CScript([OP_0])
block.vtx[1].rehash()
add_witness_commitment(block)
block.solve()
self.test_node.test_witness_block(block, accepted=True)
# Now try extra witness/signature data on an input that DOES require a
# witness
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) # witness output
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 1), b"")) # non-witness
tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE])))
tx2.wit.vtxinwit.extend([CTxInWitness(), CTxInWitness()])
tx2.wit.vtxinwit[0].scriptWitness.stack = [ CScript([CScriptNum(1)]), CScript([CScriptNum(1)]), witness_program ]
tx2.wit.vtxinwit[1].scriptWitness.stack = [ CScript([OP_TRUE]) ]
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
# This has extra witness data, so it should fail.
self.test_node.test_witness_block(block, accepted=False)
# Now get rid of the extra witness, but add extra scriptSig data
tx2.vin[0].scriptSig = CScript([OP_TRUE])
tx2.vin[1].scriptSig = CScript([OP_TRUE])
tx2.wit.vtxinwit[0].scriptWitness.stack.pop(0)
tx2.wit.vtxinwit[1].scriptWitness.stack = []
tx2.rehash()
add_witness_commitment(block)
block.solve()
# This has extra signature data for a witness input, so it should fail.
self.test_node.test_witness_block(block, accepted=False)
# Now get rid of the extra scriptsig on the witness input, and verify
# success (even with extra scriptsig data in the non-witness input)
tx2.vin[0].scriptSig = b""
tx2.rehash()
add_witness_commitment(block)
block.solve()
self.test_node.test_witness_block(block, accepted=True)
# Update utxo for later tests
self.utxo.pop(0)
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
def test_max_witness_push_length(self):
''' Should only allow up to 520 byte pushes in witness stack '''
self.log.info("Testing maximum witness push size")
MAX_SCRIPT_ELEMENT_SIZE = 520
assert(len(self.utxo))
block = self.build_next_block()
witness_program = CScript([OP_DROP, OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey))
tx.rehash()
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE])))
tx2.wit.vtxinwit.append(CTxInWitness())
# First try a 521-byte stack element
tx2.wit.vtxinwit[0].scriptWitness.stack = [ b'a'*(MAX_SCRIPT_ELEMENT_SIZE+1), witness_program ]
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx, tx2])
self.test_node.test_witness_block(block, accepted=False)
# Now reduce the length of the stack element
tx2.wit.vtxinwit[0].scriptWitness.stack[0] = b'a'*(MAX_SCRIPT_ELEMENT_SIZE)
add_witness_commitment(block)
block.solve()
self.test_node.test_witness_block(block, accepted=True)
# Update the utxo for later tests
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
def test_max_witness_program_length(self):
# Can create witness outputs that are long, but can't be greater than
# 10k bytes to successfully spend
self.log.info("Testing maximum witness program length")
assert(len(self.utxo))
MAX_PROGRAM_LENGTH = 10000
# This program is 19 max pushes (9937 bytes), then 64 more opcode-bytes.
long_witness_program = CScript([b'a'*520]*19 + [OP_DROP]*63 + [OP_TRUE])
assert(len(long_witness_program) == MAX_PROGRAM_LENGTH+1)
long_witness_hash = sha256(long_witness_program)
long_scriptPubKey = CScript([OP_0, long_witness_hash])
block = self.build_next_block()
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, long_scriptPubKey))
tx.rehash()
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE])))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a']*44 + [long_witness_program]
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx, tx2])
self.test_node.test_witness_block(block, accepted=False)
# Try again with one less byte in the witness program
witness_program = CScript([b'a'*520]*19 + [OP_DROP]*62 + [OP_TRUE])
assert(len(witness_program) == MAX_PROGRAM_LENGTH)
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
tx.vout[0] = CTxOut(tx.vout[0].nValue, scriptPubKey)
tx.rehash()
tx2.vin[0].prevout.hash = tx.sha256
tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a']*43 + [witness_program]
tx2.rehash()
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx, tx2])
self.test_node.test_witness_block(block, accepted=True)
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
def test_witness_input_length(self):
''' Ensure that vin length must match vtxinwit length '''
self.log.info("Testing witness input length")
assert(len(self.utxo))
witness_program = CScript([OP_DROP, OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
# Create a transaction that splits our utxo into many outputs
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
nValue = self.utxo[0].nValue
for i in range(10):
tx.vout.append(CTxOut(int(nValue/10), scriptPubKey))
tx.vout[0].nValue -= 1000
assert(tx.vout[0].nValue >= 0)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
# Try various ways to spend tx that should all break.
# This "broken" transaction serializer will not normalize
# the length of vtxinwit.
class BrokenCTransaction(CTransaction):
def serialize_with_witness(self):
flags = 0
if not self.wit.is_null():
flags |= 1
r = b""
r += struct.pack("<i", self.nVersion)
if flags:
dummy = []
r += ser_vector(dummy)
r += struct.pack("<B", flags)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
if flags & 1:
r += self.wit.serialize()
r += struct.pack("<I", self.nLockTime)
return r
tx2 = BrokenCTransaction()
for i in range(10):
tx2.vin.append(CTxIn(COutPoint(tx.sha256, i), b""))
tx2.vout.append(CTxOut(nValue-3000, CScript([OP_TRUE])))
# First try using a too long vtxinwit
for i in range(11):
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[i].scriptWitness.stack = [b'a', witness_program]
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=False)
# Now try using a too short vtxinwit
tx2.wit.vtxinwit.pop()
tx2.wit.vtxinwit.pop()
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=False)
# Now make one of the intermediate witnesses be incorrect
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[-1].scriptWitness.stack = [b'a', witness_program]
tx2.wit.vtxinwit[5].scriptWitness.stack = [ witness_program ]
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=False)
# Fix the broken witness and the block should be accepted.
tx2.wit.vtxinwit[5].scriptWitness.stack = [b'a', witness_program]
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=True)
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
def test_witness_tx_relay_before_segwit_activation(self):
self.log.info("Testing relay of witness transactions")
# Generate a transaction that doesn't require a witness, but send it
# with a witness. Should be rejected for premature-witness, but should
# not be added to recently rejected list.
assert(len(self.utxo))
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, CScript([OP_TRUE])))
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a' ]
tx.rehash()
tx_hash = tx.sha256
tx_value = tx.vout[0].nValue
# Verify that if a peer doesn't set nServices to include NODE_WITNESS,
# the getdata is just for the non-witness portion.
self.old_node.announce_tx_and_wait_for_getdata(tx)
assert(self.old_node.last_message["getdata"].inv[0].type == 1)
# Since we haven't delivered the tx yet, inv'ing the same tx from
# a witness transaction ought not result in a getdata.
try:
self.test_node.announce_tx_and_wait_for_getdata(tx, timeout=2)
self.log.error("Error: duplicate tx getdata!")
assert(False)
except AssertionError as e:
pass
# Delivering this transaction with witness should fail (no matter who
# its from)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
self.old_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
# But eliminating the witness should fix it
self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
# Cleanup: mine the first transaction and update utxo
self.nodes[0].generate(1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.utxo.pop(0)
self.utxo.append(UTXO(tx_hash, 0, tx_value))
# After segwit activates, verify that mempool:
# - rejects transactions with unnecessary/extra witnesses
# - accepts transactions with valid witnesses
# and that witness transactions are relayed to non-upgraded peers.
def test_tx_relay_after_segwit_activation(self):
self.log.info("Testing relay of witness transactions")
# Generate a transaction that doesn't require a witness, but send it
# with a witness. Should be rejected because we can't use a witness
# when spending a non-witness output.
assert(len(self.utxo))
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, CScript([OP_TRUE])))
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a' ]
tx.rehash()
tx_hash = tx.sha256
# Verify that unnecessary witnesses are rejected.
self.test_node.announce_tx_and_wait_for_getdata(tx)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
# Verify that removing the witness succeeds.
self.test_node.announce_tx_and_wait_for_getdata(tx)
self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
# Now try to add extra witness data to a valid witness tx.
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx_hash, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, scriptPubKey))
tx2.rehash()
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b""))
tx3.wit.vtxinwit.append(CTxInWitness())
# Add too-large for IsStandard witness and check that it does not enter reject filter
p2sh_program = CScript([OP_TRUE])
p2sh_pubkey = hash160(p2sh_program)
witness_program2 = CScript([b'a'*400000])
tx3.vout.append(CTxOut(tx2.vout[0].nValue-1000, CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL])))
tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program2]
tx3.rehash()
# Node will not be blinded to the transaction
self.std_node.announce_tx_and_wait_for_getdata(tx3)
self.std_node.test_transaction_acceptance(tx3, True, False, b'tx-size')
self.std_node.announce_tx_and_wait_for_getdata(tx3)
self.std_node.test_transaction_acceptance(tx3, True, False, b'tx-size')
# Remove witness stuffing, instead add extra witness push on stack
tx3.vout[0] = CTxOut(tx2.vout[0].nValue-1000, CScript([OP_TRUE]))
tx3.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)]), witness_program ]
tx3.rehash()
self.test_node.test_transaction_acceptance(tx2, with_witness=True, accepted=True)
self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=False)
# Get rid of the extra witness, and verify acceptance.
tx3.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ]
# Also check that old_node gets a tx announcement, even though this is
# a witness transaction.
self.old_node.wait_for_inv([CInv(1, tx2.sha256)]) # wait until tx2 was inv'ed
self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True)
self.old_node.wait_for_inv([CInv(1, tx3.sha256)])
# Test that getrawtransaction returns correct witness information
# hash, size, vsize
raw_tx = self.nodes[0].getrawtransaction(tx3.hash, 1)
assert_equal(int(raw_tx["hash"], 16), tx3.calc_sha256(True))
assert_equal(raw_tx["size"], len(tx3.serialize_with_witness()))
vsize = (len(tx3.serialize_with_witness()) + 3*len(tx3.serialize_without_witness()) + 3) / 4
assert_equal(raw_tx["vsize"], vsize)
assert_equal(len(raw_tx["vin"][0]["txinwitness"]), 1)
assert_equal(raw_tx["vin"][0]["txinwitness"][0], hexlify(witness_program).decode('ascii'))
assert(vsize != raw_tx["size"])
# Cleanup: mine the transactions and update utxo for next test
self.nodes[0].generate(1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
# Test that block requests to NODE_WITNESS peer are with MSG_WITNESS_FLAG
# This is true regardless of segwit activation.
# Also test that we don't ask for blocks from unupgraded peers
def test_block_relay(self, segwit_activated):
self.log.info("Testing block relay")
blocktype = 2|MSG_WITNESS_FLAG
# test_node has set NODE_WITNESS, so all getdata requests should be for
# witness blocks.
# Test announcing a block via inv results in a getdata, and that
# announcing a version 4 or random VB block with a header results in a getdata
block1 = self.build_next_block()
block1.solve()
self.test_node.announce_block_and_wait_for_getdata(block1, use_header=False)
assert(self.test_node.last_message["getdata"].inv[0].type == blocktype)
self.test_node.test_witness_block(block1, True)
block2 = self.build_next_block(nVersion=4)
block2.solve()
self.test_node.announce_block_and_wait_for_getdata(block2, use_header=True)
assert(self.test_node.last_message["getdata"].inv[0].type == blocktype)
self.test_node.test_witness_block(block2, True)
block3 = self.build_next_block(nVersion=(VB_TOP_BITS | (1<<15)))
block3.solve()
self.test_node.announce_block_and_wait_for_getdata(block3, use_header=True)
assert(self.test_node.last_message["getdata"].inv[0].type == blocktype)
self.test_node.test_witness_block(block3, True)
# Check that we can getdata for witness blocks or regular blocks,
# and the right thing happens.
if segwit_activated == False:
# Before activation, we should be able to request old blocks with
# or without witness, and they should be the same.
chain_height = self.nodes[0].getblockcount()
# Pick 10 random blocks on main chain, and verify that getdata's
# for MSG_BLOCK, MSG_WITNESS_BLOCK, and rpc getblock() are equal.
all_heights = list(range(chain_height+1))
random.shuffle(all_heights)
all_heights = all_heights[0:10]
for height in all_heights:
block_hash = self.nodes[0].getblockhash(height)
rpc_block = self.nodes[0].getblock(block_hash, False)
block_hash = int(block_hash, 16)
block = self.test_node.request_block(block_hash, 2)
wit_block = self.test_node.request_block(block_hash, 2|MSG_WITNESS_FLAG)
assert_equal(block.serialize(True), wit_block.serialize(True))
assert_equal(block.serialize(), hex_str_to_bytes(rpc_block))
else:
# After activation, witness blocks and non-witness blocks should
# be different. Verify rpc getblock() returns witness blocks, while
# getdata respects the requested type.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [])
# This gives us a witness commitment.
assert(len(block.vtx[0].wit.vtxinwit) == 1)
assert(len(block.vtx[0].wit.vtxinwit[0].scriptWitness.stack) == 1)
self.test_node.test_witness_block(block, accepted=True)
# Now try to retrieve it...
rpc_block = self.nodes[0].getblock(block.hash, False)
non_wit_block = self.test_node.request_block(block.sha256, 2)
wit_block = self.test_node.request_block(block.sha256, 2|MSG_WITNESS_FLAG)
assert_equal(wit_block.serialize(True), hex_str_to_bytes(rpc_block))
assert_equal(wit_block.serialize(False), non_wit_block.serialize())
assert_equal(wit_block.serialize(True), block.serialize(True))
# Test size, vsize, weight
rpc_details = self.nodes[0].getblock(block.hash, True)
assert_equal(rpc_details["size"], len(block.serialize(True)))
assert_equal(rpc_details["strippedsize"], len(block.serialize(False)))
weight = 3*len(block.serialize(False)) + len(block.serialize(True))
assert_equal(rpc_details["weight"], weight)
# Upgraded node should not ask for blocks from unupgraded
block4 = self.build_next_block(nVersion=4)
block4.solve()
self.old_node.getdataset = set()
# Blocks can be requested via direct-fetch (immediately upon processing the announcement)
# or via parallel download (with an indeterminate delay from processing the announcement)
# so to test that a block is NOT requested, we could guess a time period to sleep for,
# and then check. We can avoid the sleep() by taking advantage of transaction getdata's
# being processed after block getdata's, and announce a transaction as well,
# and then check to see if that particular getdata has been received.
# Since 0.14, inv's will only be responded to with a getheaders, so send a header
# to announce this block.
msg = msg_headers()
msg.headers = [ CBlockHeader(block4) ]
self.old_node.send_message(msg)
self.old_node.announce_tx_and_wait_for_getdata(block4.vtx[0])
assert(block4.sha256 not in self.old_node.getdataset)
# V0 segwit outputs should be standard after activation, but not before.
def test_standardness_v0(self, segwit_activated):
self.log.info("Testing standardness of v0 outputs (%s activation)" % ("after" if segwit_activated else "before"))
assert(len(self.utxo))
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
p2sh_pubkey = hash160(witness_program)
p2sh_scriptPubKey = CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL])
# First prepare a p2sh output (so that spending it will pass standardness)
p2sh_tx = CTransaction()
p2sh_tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")]
p2sh_tx.vout = [CTxOut(self.utxo[0].nValue-1000, p2sh_scriptPubKey)]
p2sh_tx.rehash()
# Mine it on test_node to create the confirmed output.
self.test_node.test_transaction_acceptance(p2sh_tx, with_witness=True, accepted=True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
# Now test standardness of v0 P2WSH outputs.
# Start by creating a transaction with two outputs.
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_program]))]
tx.vout = [CTxOut(p2sh_tx.vout[0].nValue-10000, scriptPubKey)]
tx.vout.append(CTxOut(8000, scriptPubKey)) # Might burn this later
tx.rehash()
self.std_node.test_transaction_acceptance(tx, with_witness=True, accepted=segwit_activated)
# Now create something that looks like a P2PKH output. This won't be spendable.
scriptPubKey = CScript([OP_0, hash160(witness_hash)])
tx2 = CTransaction()
if segwit_activated:
# if tx was accepted, then we spend the second output.
tx2.vin = [CTxIn(COutPoint(tx.sha256, 1), b"")]
tx2.vout = [CTxOut(7000, scriptPubKey)]
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
else:
# if tx wasn't accepted, we just re-spend the p2sh output we started with.
tx2.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_program]))]
tx2.vout = [CTxOut(p2sh_tx.vout[0].nValue-1000, scriptPubKey)]
tx2.rehash()
self.std_node.test_transaction_acceptance(tx2, with_witness=True, accepted=segwit_activated)
# Now update self.utxo for later tests.
tx3 = CTransaction()
if segwit_activated:
# tx and tx2 were both accepted. Don't bother trying to reclaim the
# P2PKH output; just send tx's first output back to an anyone-can-spend.
sync_mempools([self.nodes[0], self.nodes[1]])
tx3.vin = [CTxIn(COutPoint(tx.sha256, 0), b"")]
tx3.vout = [CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE]))]
tx3.wit.vtxinwit.append(CTxInWitness())
tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
tx3.rehash()
self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True)
else:
# tx and tx2 didn't go anywhere; just clean up the p2sh_tx output.
tx3.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_program]))]
tx3.vout = [CTxOut(p2sh_tx.vout[0].nValue-1000, witness_program)]
tx3.rehash()
self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
assert_equal(len(self.nodes[1].getrawmempool()), 0)
# Verify that future segwit upgraded transactions are non-standard,
# but valid in blocks. Can run this before and after segwit activation.
def test_segwit_versions(self):
self.log.info("Testing standardness/consensus for segwit versions (0-16)")
assert(len(self.utxo))
NUM_TESTS = 17 # will test OP_0, OP1, ..., OP_16
if (len(self.utxo) < NUM_TESTS):
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
split_value = (self.utxo[0].nValue - 4000) // NUM_TESTS
for i in range(NUM_TESTS):
tx.vout.append(CTxOut(split_value, CScript([OP_TRUE])))
tx.rehash()
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
self.utxo.pop(0)
for i in range(NUM_TESTS):
self.utxo.append(UTXO(tx.sha256, i, split_value))
sync_blocks(self.nodes)
temp_utxo = []
tx = CTransaction()
count = 0
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
for version in list(range(OP_1, OP_16+1)) + [OP_0]:
count += 1
# First try to spend to a future version segwit scriptPubKey.
scriptPubKey = CScript([CScriptOp(version), witness_hash])
tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")]
tx.vout = [CTxOut(self.utxo[0].nValue-1000, scriptPubKey)]
tx.rehash()
self.std_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=True)
self.utxo.pop(0)
temp_utxo.append(UTXO(tx.sha256, 0, tx.vout[0].nValue))
self.nodes[0].generate(1) # Mine all the transactions
sync_blocks(self.nodes)
assert(len(self.nodes[0].getrawmempool()) == 0)
# Finally, verify that version 0 -> version 1 transactions
# are non-standard
scriptPubKey = CScript([CScriptOp(OP_1), witness_hash])
tx2 = CTransaction()
tx2.vin = [CTxIn(COutPoint(tx.sha256, 0), b"")]
tx2.vout = [CTxOut(tx.vout[0].nValue-1000, scriptPubKey)]
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ]
tx2.rehash()
# Gets accepted to test_node, because standardness of outputs isn't
# checked with fRequireStandard
self.test_node.test_transaction_acceptance(tx2, with_witness=True, accepted=True)
self.std_node.test_transaction_acceptance(tx2, with_witness=True, accepted=False)
temp_utxo.pop() # last entry in temp_utxo was the output we just spent
temp_utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
# Spend everything in temp_utxo back to an OP_TRUE output.
tx3 = CTransaction()
total_value = 0
for i in temp_utxo:
tx3.vin.append(CTxIn(COutPoint(i.sha256, i.n), b""))
tx3.wit.vtxinwit.append(CTxInWitness())
total_value += i.nValue
tx3.wit.vtxinwit[-1].scriptWitness.stack = [witness_program]
tx3.vout.append(CTxOut(total_value - 1000, CScript([OP_TRUE])))
tx3.rehash()
# Spending a higher version witness output is not allowed by policy,
# even with fRequireStandard=false.
self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=False)
self.test_node.sync_with_ping()
with mininode_lock:
assert(b"reserved for soft-fork upgrades" in self.test_node.last_message["reject"].reason)
# Building a block with the transaction must be valid, however.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2, tx3])
self.test_node.test_witness_block(block, accepted=True)
sync_blocks(self.nodes)
# Add utxo to our list
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
def test_premature_coinbase_witness_spend(self):
self.log.info("Testing premature coinbase witness spend")
block = self.build_next_block()
# Change the output of the block to be a witness output.
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
block.vtx[0].vout[0].scriptPubKey = scriptPubKey
# This next line will rehash the coinbase and update the merkle
# root, and solve.
self.update_witness_block_with_transactions(block, [])
self.test_node.test_witness_block(block, accepted=True)
spend_tx = CTransaction()
spend_tx.vin = [CTxIn(COutPoint(block.vtx[0].sha256, 0), b"")]
spend_tx.vout = [CTxOut(block.vtx[0].vout[0].nValue, witness_program)]
spend_tx.wit.vtxinwit.append(CTxInWitness())
spend_tx.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ]
spend_tx.rehash()
# Now test a premature spend.
self.nodes[0].generate(98)
sync_blocks(self.nodes)
block2 = self.build_next_block()
self.update_witness_block_with_transactions(block2, [spend_tx])
self.test_node.test_witness_block(block2, accepted=False)
# Advancing one more block should allow the spend.
self.nodes[0].generate(1)
block2 = self.build_next_block()
self.update_witness_block_with_transactions(block2, [spend_tx])
self.test_node.test_witness_block(block2, accepted=True)
sync_blocks(self.nodes)
def test_signature_version_1(self):
self.log.info("Testing segwit signature hash version 1")
key = CECKey()
key.set_secretbytes(b"9")
pubkey = CPubKey(key.get_pubkey())
witness_program = CScript([pubkey, CScriptOp(OP_CHECKSIG)])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
# First create a witness output for use in the tests.
assert(len(self.utxo))
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey))
tx.rehash()
self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=True)
# Mine this transaction in preparation for following tests.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
sync_blocks(self.nodes)
self.utxo.pop(0)
# Test each hashtype
prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue)
for sigflag in [ 0, SIGHASH_ANYONECANPAY ]:
for hashtype in [SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE]:
hashtype |= sigflag
block = self.build_next_block()
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(prev_utxo.sha256, prev_utxo.n), b""))
tx.vout.append(CTxOut(prev_utxo.nValue - 1000, scriptPubKey))
tx.wit.vtxinwit.append(CTxInWitness())
# Too-large input value
sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue+1, key)
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=False)
# Too-small input value
sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue-1, key)
block.vtx.pop() # remove last tx
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=False)
# Now try correct value
sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue, key)
block.vtx.pop()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue)
# Test combinations of signature hashes.
# Split the utxo into a lot of outputs.
# Randomly choose up to 10 to spend, sign with different hashtypes, and
# output to a random number of outputs. Repeat NUM_TESTS times.
# Ensure that we've tested a situation where we use SIGHASH_SINGLE with
# an input index > number of outputs.
NUM_TESTS = 500
temp_utxos = []
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(prev_utxo.sha256, prev_utxo.n), b""))
split_value = prev_utxo.nValue // NUM_TESTS
for i in range(NUM_TESTS):
tx.vout.append(CTxOut(split_value, scriptPubKey))
tx.wit.vtxinwit.append(CTxInWitness())
sign_P2PK_witness_input(witness_program, tx, 0, SIGHASH_ALL, prev_utxo.nValue, key)
for i in range(NUM_TESTS):
temp_utxos.append(UTXO(tx.sha256, i, split_value))
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
block = self.build_next_block()
used_sighash_single_out_of_bounds = False
for i in range(NUM_TESTS):
# Ping regularly to keep the connection alive
if (not i % 100):
self.test_node.sync_with_ping()
# Choose random number of inputs to use.
num_inputs = random.randint(1, 10)
# Create a slight bias for producing more utxos
num_outputs = random.randint(1, 11)
random.shuffle(temp_utxos)
assert(len(temp_utxos) > num_inputs)
tx = CTransaction()
total_value = 0
for i in range(num_inputs):
tx.vin.append(CTxIn(COutPoint(temp_utxos[i].sha256, temp_utxos[i].n), b""))
tx.wit.vtxinwit.append(CTxInWitness())
total_value += temp_utxos[i].nValue
split_value = total_value // num_outputs
for i in range(num_outputs):
tx.vout.append(CTxOut(split_value, scriptPubKey))
for i in range(num_inputs):
# Now try to sign each input, using a random hashtype.
anyonecanpay = 0
if random.randint(0, 1):
anyonecanpay = SIGHASH_ANYONECANPAY
hashtype = random.randint(1, 3) | anyonecanpay
sign_P2PK_witness_input(witness_program, tx, i, hashtype, temp_utxos[i].nValue, key)
if (hashtype == SIGHASH_SINGLE and i >= num_outputs):
used_sighash_single_out_of_bounds = True
tx.rehash()
for i in range(num_outputs):
temp_utxos.append(UTXO(tx.sha256, i, split_value))
temp_utxos = temp_utxos[num_inputs:]
block.vtx.append(tx)
# Test the block periodically, if we're close to maxblocksize
if (get_virtual_size(block) > MAX_BLOCK_BASE_SIZE - 1000):
self.update_witness_block_with_transactions(block, [])
self.test_node.test_witness_block(block, accepted=True)
block = self.build_next_block()
if (not used_sighash_single_out_of_bounds):
self.log.info("WARNING: this test run didn't attempt SIGHASH_SINGLE with out-of-bounds index value")
# Test the transactions we've added to the block
if (len(block.vtx) > 1):
self.update_witness_block_with_transactions(block, [])
self.test_node.test_witness_block(block, accepted=True)
# Now test witness version 0 P2PKH transactions
pubkeyhash = hash160(pubkey)
scriptPKH = CScript([OP_0, pubkeyhash])
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(temp_utxos[0].sha256, temp_utxos[0].n), b""))
tx.vout.append(CTxOut(temp_utxos[0].nValue, scriptPKH))
tx.wit.vtxinwit.append(CTxInWitness())
sign_P2PK_witness_input(witness_program, tx, 0, SIGHASH_ALL, temp_utxos[0].nValue, key)
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE])))
script = GetP2PKHScript(pubkeyhash)
sig_hash = SegwitVersion1SignatureHash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue)
signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL
# Check that we can't have a scriptSig
tx2.vin[0].scriptSig = CScript([signature, pubkey])
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx, tx2])
self.test_node.test_witness_block(block, accepted=False)
# Move the signature to the witness.
block.vtx.pop()
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [signature, pubkey]
tx2.vin[0].scriptSig = b""
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=True)
temp_utxos.pop(0)
# Update self.utxos for later tests. Just spend everything in
# temp_utxos to a corresponding entry in self.utxos
tx = CTransaction()
index = 0
for i in temp_utxos:
# Just spend to our usual anyone-can-spend output
# Use SIGHASH_SINGLE|SIGHASH_ANYONECANPAY so we can build up
# the signatures as we go.
tx.vin.append(CTxIn(COutPoint(i.sha256, i.n), b""))
tx.vout.append(CTxOut(i.nValue, CScript([OP_TRUE])))
tx.wit.vtxinwit.append(CTxInWitness())
sign_P2PK_witness_input(witness_program, tx, index, SIGHASH_SINGLE|SIGHASH_ANYONECANPAY, i.nValue, key)
index += 1
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
for i in range(len(tx.vout)):
self.utxo.append(UTXO(tx.sha256, i, tx.vout[i].nValue))
# Test P2SH wrapped witness programs.
def test_p2sh_witness(self, segwit_activated):
self.log.info("Testing P2SH witness transactions")
assert(len(self.utxo))
# Prepare the p2sh-wrapped witness output
witness_program = CScript([OP_DROP, OP_TRUE])
witness_hash = sha256(witness_program)
p2wsh_pubkey = CScript([OP_0, witness_hash])
p2sh_witness_hash = hash160(p2wsh_pubkey)
scriptPubKey = CScript([OP_HASH160, p2sh_witness_hash, OP_EQUAL])
scriptSig = CScript([p2wsh_pubkey]) # a push of the redeem script
# Fund the P2SH output
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey))
tx.rehash()
# Verify mempool acceptance and block validity
self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True, with_witness=segwit_activated)
sync_blocks(self.nodes)
# Now test attempts to spend the output.
spend_tx = CTransaction()
spend_tx.vin.append(CTxIn(COutPoint(tx.sha256, 0), scriptSig))
spend_tx.vout.append(CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE])))
spend_tx.rehash()
# This transaction should not be accepted into the mempool pre- or
# post-segwit. Mempool acceptance will use SCRIPT_VERIFY_WITNESS which
# will require a witness to spend a witness program regardless of
# segwit activation. Note that older kenfmcoind's that are not
# segwit-aware would also reject this for failing CLEANSTACK.
self.test_node.test_transaction_acceptance(spend_tx, with_witness=False, accepted=False)
# Try to put the witness script in the scriptSig, should also fail.
spend_tx.vin[0].scriptSig = CScript([p2wsh_pubkey, b'a'])
spend_tx.rehash()
self.test_node.test_transaction_acceptance(spend_tx, with_witness=False, accepted=False)
# Now put the witness script in the witness, should succeed after
# segwit activates.
spend_tx.vin[0].scriptSig = scriptSig
spend_tx.rehash()
spend_tx.wit.vtxinwit.append(CTxInWitness())
spend_tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a', witness_program ]
# Verify mempool acceptance
self.test_node.test_transaction_acceptance(spend_tx, with_witness=True, accepted=segwit_activated)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [spend_tx])
# If we're before activation, then sending this without witnesses
# should be valid. If we're after activation, then sending this with
# witnesses should be valid.
if segwit_activated:
self.test_node.test_witness_block(block, accepted=True)
else:
self.test_node.test_witness_block(block, accepted=True, with_witness=False)
# Update self.utxo
self.utxo.pop(0)
self.utxo.append(UTXO(spend_tx.sha256, 0, spend_tx.vout[0].nValue))
# Test the behavior of starting up a segwit-aware node after the softfork
# has activated. As segwit requires different block data than pre-segwit
# nodes would have stored, this requires special handling.
# To enable this test, pass --oldbinary=<path-to-pre-segwit-kenfmcoind> to
# the test.
def test_upgrade_after_activation(self, node_id):
self.log.info("Testing software upgrade after softfork activation")
assert(node_id != 0) # node0 is assumed to be a segwit-active kenfmcoind
# Make sure the nodes are all up
sync_blocks(self.nodes)
# Restart with the new binary
self.stop_node(node_id)
self.start_node(node_id, extra_args=[])
connect_nodes(self.nodes[0], node_id)
sync_blocks(self.nodes)
# Make sure that this peer thinks segwit has activated.
assert(get_bip9_status(self.nodes[node_id], 'segwit')['status'] == "active")
# Make sure this peers blocks match those of node0.
height = self.nodes[node_id].getblockcount()
while height >= 0:
block_hash = self.nodes[node_id].getblockhash(height)
assert_equal(block_hash, self.nodes[0].getblockhash(height))
assert_equal(self.nodes[0].getblock(block_hash), self.nodes[node_id].getblock(block_hash))
height -= 1
def test_witness_sigops(self):
'''Ensure sigop counting is correct inside witnesses.'''
self.log.info("Testing sigops limit")
assert(len(self.utxo))
# Keep this under MAX_OPS_PER_SCRIPT (201)
witness_program = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKMULTISIG]*5 + [OP_CHECKSIG]*193 + [OP_ENDIF])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
sigops_per_script = 20*5 + 193*1
# We'll produce 2 extra outputs, one with a program that would take us
# over max sig ops, and one with a program that would exactly reach max
# sig ops
outputs = (MAX_SIGOP_COST // sigops_per_script) + 2
extra_sigops_available = MAX_SIGOP_COST % sigops_per_script
# We chose the number of checkmultisigs/checksigs to make this work:
assert(extra_sigops_available < 100) # steer clear of MAX_OPS_PER_SCRIPT
# This script, when spent with the first
# N(=MAX_SIGOP_COST//sigops_per_script) outputs of our transaction,
# would push us just over the block sigop limit.
witness_program_toomany = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG]*(extra_sigops_available + 1) + [OP_ENDIF])
witness_hash_toomany = sha256(witness_program_toomany)
scriptPubKey_toomany = CScript([OP_0, witness_hash_toomany])
# If we spend this script instead, we would exactly reach our sigop
# limit (for witness sigops).
witness_program_justright = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG]*(extra_sigops_available) + [OP_ENDIF])
witness_hash_justright = sha256(witness_program_justright)
scriptPubKey_justright = CScript([OP_0, witness_hash_justright])
# First split our available utxo into a bunch of outputs
split_value = self.utxo[0].nValue // outputs
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
for i in range(outputs):
tx.vout.append(CTxOut(split_value, scriptPubKey))
tx.vout[-2].scriptPubKey = scriptPubKey_toomany
tx.vout[-1].scriptPubKey = scriptPubKey_justright
tx.rehash()
block_1 = self.build_next_block()
self.update_witness_block_with_transactions(block_1, [tx])
self.test_node.test_witness_block(block_1, accepted=True)
tx2 = CTransaction()
# If we try to spend the first n-1 outputs from tx, that should be
# too many sigops.
total_value = 0
for i in range(outputs-1):
tx2.vin.append(CTxIn(COutPoint(tx.sha256, i), b""))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program ]
total_value += tx.vout[i].nValue
tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program_toomany ]
tx2.vout.append(CTxOut(total_value, CScript([OP_TRUE])))
tx2.rehash()
block_2 = self.build_next_block()
self.update_witness_block_with_transactions(block_2, [tx2])
self.test_node.test_witness_block(block_2, accepted=False)
# Try dropping the last input in tx2, and add an output that has
# too many sigops (contributing to legacy sigop count).
checksig_count = (extra_sigops_available // 4) + 1
scriptPubKey_checksigs = CScript([OP_CHECKSIG]*checksig_count)
tx2.vout.append(CTxOut(0, scriptPubKey_checksigs))
tx2.vin.pop()
tx2.wit.vtxinwit.pop()
tx2.vout[0].nValue -= tx.vout[-2].nValue
tx2.rehash()
block_3 = self.build_next_block()
self.update_witness_block_with_transactions(block_3, [tx2])
self.test_node.test_witness_block(block_3, accepted=False)
# If we drop the last checksig in this output, the tx should succeed.
block_4 = self.build_next_block()
tx2.vout[-1].scriptPubKey = CScript([OP_CHECKSIG]*(checksig_count-1))
tx2.rehash()
self.update_witness_block_with_transactions(block_4, [tx2])
self.test_node.test_witness_block(block_4, accepted=True)
# Reset the tip back down for the next test
sync_blocks(self.nodes)
for x in self.nodes:
x.invalidateblock(block_4.hash)
# Try replacing the last input of tx2 to be spending the last
# output of tx
block_5 = self.build_next_block()
tx2.vout.pop()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, outputs-1), b""))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program_justright ]
tx2.rehash()
self.update_witness_block_with_transactions(block_5, [tx2])
self.test_node.test_witness_block(block_5, accepted=True)
# TODO: test p2sh sigop counting
def test_getblocktemplate_before_lockin(self):
self.log.info("Testing getblocktemplate setting of segwit versionbit (before lockin)")
# Node0 is segwit aware, node2 is not.
for node in [self.nodes[0], self.nodes[2]]:
gbt_results = node.getblocktemplate()
block_version = gbt_results['version']
# If we're not indicating segwit support, we will still be
# signalling for segwit activation.
assert_equal((block_version & (1 << VB_WITNESS_BIT) != 0), node == self.nodes[0])
# If we don't specify the segwit rule, then we won't get a default
# commitment.
assert('default_witness_commitment' not in gbt_results)
# Workaround:
# Can either change the tip, or change the mempool and wait 5 seconds
# to trigger a recomputation of getblocktemplate.
txid = int(self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1), 16)
# Using mocktime lets us avoid sleep()
sync_mempools(self.nodes)
self.nodes[0].setmocktime(int(time.time())+10)
self.nodes[2].setmocktime(int(time.time())+10)
for node in [self.nodes[0], self.nodes[2]]:
gbt_results = node.getblocktemplate({"rules" : ["segwit"]})
block_version = gbt_results['version']
if node == self.nodes[2]:
# If this is a non-segwit node, we should still not get a witness
# commitment, nor a version bit signalling segwit.
assert_equal(block_version & (1 << VB_WITNESS_BIT), 0)
assert('default_witness_commitment' not in gbt_results)
else:
# For segwit-aware nodes, check the version bit and the witness
# commitment are correct.
assert(block_version & (1 << VB_WITNESS_BIT) != 0)
assert('default_witness_commitment' in gbt_results)
witness_commitment = gbt_results['default_witness_commitment']
# Check that default_witness_commitment is present.
witness_root = CBlock.get_merkle_root([ser_uint256(0),
ser_uint256(txid)])
script = get_witness_script(witness_root, 0)
assert_equal(witness_commitment, bytes_to_hex_str(script))
# undo mocktime
self.nodes[0].setmocktime(0)
self.nodes[2].setmocktime(0)
# Uncompressed pubkeys are no longer supported in default relay policy,
# but (for now) are still valid in blocks.
def test_uncompressed_pubkey(self):
self.log.info("Testing uncompressed pubkeys")
# Segwit transactions using uncompressed pubkeys are not accepted
# under default policy, but should still pass consensus.
key = CECKey()
key.set_secretbytes(b"9")
key.set_compressed(False)
pubkey = CPubKey(key.get_pubkey())
assert_equal(len(pubkey), 65) # This should be an uncompressed pubkey
assert(len(self.utxo) > 0)
utxo = self.utxo.pop(0)
# Test 1: P2WPKH
# First create a P2WPKH output that uses an uncompressed pubkey
pubkeyhash = hash160(pubkey)
scriptPKH = CScript([OP_0, pubkeyhash])
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(utxo.sha256, utxo.n), b""))
tx.vout.append(CTxOut(utxo.nValue-1000, scriptPKH))
tx.rehash()
# Confirm it in a block.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
# Now try to spend it. Send it to a P2WSH output, which we'll
# use in the next test.
witness_program = CScript([pubkey, CScriptOp(OP_CHECKSIG)])
witness_hash = sha256(witness_program)
scriptWSH = CScript([OP_0, witness_hash])
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, scriptWSH))
script = GetP2PKHScript(pubkeyhash)
sig_hash = SegwitVersion1SignatureHash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue)
signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [ signature, pubkey ]
tx2.rehash()
# Should fail policy test.
self.test_node.test_transaction_acceptance(tx2, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
# But passes consensus.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=True)
# Test 2: P2WSH
# Try to spend the P2WSH output created in last test.
# Send it to a P2SH(P2WSH) output, which we'll use in the next test.
p2sh_witness_hash = hash160(scriptWSH)
scriptP2SH = CScript([OP_HASH160, p2sh_witness_hash, OP_EQUAL])
scriptSig = CScript([scriptWSH])
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b""))
tx3.vout.append(CTxOut(tx2.vout[0].nValue-1000, scriptP2SH))
tx3.wit.vtxinwit.append(CTxInWitness())
sign_P2PK_witness_input(witness_program, tx3, 0, SIGHASH_ALL, tx2.vout[0].nValue, key)
# Should fail policy test.
self.test_node.test_transaction_acceptance(tx3, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
# But passes consensus.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx3])
self.test_node.test_witness_block(block, accepted=True)
# Test 3: P2SH(P2WSH)
# Try to spend the P2SH output created in the last test.
# Send it to a P2PKH output, which we'll use in the next test.
scriptPubKey = GetP2PKHScript(pubkeyhash)
tx4 = CTransaction()
tx4.vin.append(CTxIn(COutPoint(tx3.sha256, 0), scriptSig))
tx4.vout.append(CTxOut(tx3.vout[0].nValue-1000, scriptPubKey))
tx4.wit.vtxinwit.append(CTxInWitness())
sign_P2PK_witness_input(witness_program, tx4, 0, SIGHASH_ALL, tx3.vout[0].nValue, key)
# Should fail policy test.
self.test_node.test_transaction_acceptance(tx4, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx4])
self.test_node.test_witness_block(block, accepted=True)
# Test 4: Uncompressed pubkeys should still be valid in non-segwit
# transactions.
tx5 = CTransaction()
tx5.vin.append(CTxIn(COutPoint(tx4.sha256, 0), b""))
tx5.vout.append(CTxOut(tx4.vout[0].nValue-1000, CScript([OP_TRUE])))
(sig_hash, err) = SignatureHash(scriptPubKey, tx5, 0, SIGHASH_ALL)
signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL
tx5.vin[0].scriptSig = CScript([signature, pubkey])
tx5.rehash()
# Should pass policy and consensus.
self.test_node.test_transaction_acceptance(tx5, True, True)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx5])
self.test_node.test_witness_block(block, accepted=True)
self.utxo.append(UTXO(tx5.sha256, 0, tx5.vout[0].nValue))
def test_non_standard_witness(self):
self.log.info("Testing detection of non-standard P2WSH witness")
pad = chr(1).encode('latin-1')
# Create scripts for tests
scripts = []
scripts.append(CScript([OP_DROP] * 100))
scripts.append(CScript([OP_DROP] * 99))
scripts.append(CScript([pad * 59] * 59 + [OP_DROP] * 60))
scripts.append(CScript([pad * 59] * 59 + [OP_DROP] * 61))
p2wsh_scripts = []
assert(len(self.utxo))
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
# For each script, generate a pair of P2WSH and P2SH-P2WSH output.
outputvalue = (self.utxo[0].nValue - 1000) // (len(scripts) * 2)
for i in scripts:
p2wsh = CScript([OP_0, sha256(i)])
p2sh = hash160(p2wsh)
p2wsh_scripts.append(p2wsh)
tx.vout.append(CTxOut(outputvalue, p2wsh))
tx.vout.append(CTxOut(outputvalue, CScript([OP_HASH160, p2sh, OP_EQUAL])))
tx.rehash()
txid = tx.sha256
self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
# Creating transactions for tests
p2wsh_txs = []
p2sh_txs = []
for i in range(len(scripts)):
p2wsh_tx = CTransaction()
p2wsh_tx.vin.append(CTxIn(COutPoint(txid,i*2)))
p2wsh_tx.vout.append(CTxOut(outputvalue - 5000, CScript([OP_0, hash160(hex_str_to_bytes(""))])))
p2wsh_tx.wit.vtxinwit.append(CTxInWitness())
p2wsh_tx.rehash()
p2wsh_txs.append(p2wsh_tx)
p2sh_tx = CTransaction()
p2sh_tx.vin.append(CTxIn(COutPoint(txid,i*2+1), CScript([p2wsh_scripts[i]])))
p2sh_tx.vout.append(CTxOut(outputvalue - 5000, CScript([OP_0, hash160(hex_str_to_bytes(""))])))
p2sh_tx.wit.vtxinwit.append(CTxInWitness())
p2sh_tx.rehash()
p2sh_txs.append(p2sh_tx)
# Testing native P2WSH
# Witness stack size, excluding witnessScript, over 100 is non-standard
p2wsh_txs[0].wit.vtxinwit[0].scriptWitness.stack = [pad] * 101 + [scripts[0]]
self.std_node.test_transaction_acceptance(p2wsh_txs[0], True, False, b'bad-witness-nonstandard')
# Non-standard nodes should accept
self.test_node.test_transaction_acceptance(p2wsh_txs[0], True, True)
# Stack element size over 80 bytes is non-standard
p2wsh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 81] * 100 + [scripts[1]]
self.std_node.test_transaction_acceptance(p2wsh_txs[1], True, False, b'bad-witness-nonstandard')
# Non-standard nodes should accept
self.test_node.test_transaction_acceptance(p2wsh_txs[1], True, True)
# Standard nodes should accept if element size is not over 80 bytes
p2wsh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 80] * 100 + [scripts[1]]
self.std_node.test_transaction_acceptance(p2wsh_txs[1], True, True)
# witnessScript size at 3600 bytes is standard
p2wsh_txs[2].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, scripts[2]]
self.test_node.test_transaction_acceptance(p2wsh_txs[2], True, True)
self.std_node.test_transaction_acceptance(p2wsh_txs[2], True, True)
# witnessScript size at 3601 bytes is non-standard
p2wsh_txs[3].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, pad, scripts[3]]
self.std_node.test_transaction_acceptance(p2wsh_txs[3], True, False, b'bad-witness-nonstandard')
# Non-standard nodes should accept
self.test_node.test_transaction_acceptance(p2wsh_txs[3], True, True)
# Repeating the same tests with P2SH-P2WSH
p2sh_txs[0].wit.vtxinwit[0].scriptWitness.stack = [pad] * 101 + [scripts[0]]
self.std_node.test_transaction_acceptance(p2sh_txs[0], True, False, b'bad-witness-nonstandard')
self.test_node.test_transaction_acceptance(p2sh_txs[0], True, True)
p2sh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 81] * 100 + [scripts[1]]
self.std_node.test_transaction_acceptance(p2sh_txs[1], True, False, b'bad-witness-nonstandard')
self.test_node.test_transaction_acceptance(p2sh_txs[1], True, True)
p2sh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 80] * 100 + [scripts[1]]
self.std_node.test_transaction_acceptance(p2sh_txs[1], True, True)
p2sh_txs[2].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, scripts[2]]
self.test_node.test_transaction_acceptance(p2sh_txs[2], True, True)
self.std_node.test_transaction_acceptance(p2sh_txs[2], True, True)
p2sh_txs[3].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, pad, scripts[3]]
self.std_node.test_transaction_acceptance(p2sh_txs[3], True, False, b'bad-witness-nonstandard')
self.test_node.test_transaction_acceptance(p2sh_txs[3], True, True)
self.nodes[0].generate(1) # Mine and clean up the mempool of non-standard node
# Valid but non-standard transactions in a block should be accepted by standard node
sync_blocks(self.nodes)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
self.utxo.pop(0)
def run_test(self):
# Setup the p2p connections and start up the network thread.
self.test_node = TestNode() # sets NODE_WITNESS|NODE_NETWORK
self.old_node = TestNode() # only NODE_NETWORK
self.std_node = TestNode() # for testing node1 (fRequireStandard=true)
self.p2p_connections = [self.test_node, self.old_node]
self.connections = []
self.connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.test_node, services=NODE_NETWORK|NODE_WITNESS))
self.connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.old_node, services=NODE_NETWORK))
self.connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], self.std_node, services=NODE_NETWORK|NODE_WITNESS))
self.test_node.add_connection(self.connections[0])
self.old_node.add_connection(self.connections[1])
self.std_node.add_connection(self.connections[2])
NetworkThread().start() # Start up network handling in another thread
# Keep a place to store utxo's that can be used in later tests
self.utxo = []
# Test logic begins here
self.test_node.wait_for_verack()
self.log.info("Starting tests before segwit lock in:")
self.test_witness_services() # Verifies NODE_WITNESS
self.test_non_witness_transaction() # non-witness tx's are accepted
self.test_unnecessary_witness_before_segwit_activation()
self.test_block_relay(segwit_activated=False)
# Advance to segwit being 'started'
self.advance_to_segwit_started()
sync_blocks(self.nodes)
self.test_getblocktemplate_before_lockin()
sync_blocks(self.nodes)
# At lockin, nothing should change.
self.log.info("Testing behavior post lockin, pre-activation")
self.advance_to_segwit_lockin()
# Retest unnecessary witnesses
self.test_unnecessary_witness_before_segwit_activation()
self.test_witness_tx_relay_before_segwit_activation()
self.test_block_relay(segwit_activated=False)
self.test_p2sh_witness(segwit_activated=False)
self.test_standardness_v0(segwit_activated=False)
sync_blocks(self.nodes)
# Now activate segwit
self.log.info("Testing behavior after segwit activation")
self.advance_to_segwit_active()
sync_blocks(self.nodes)
# Test P2SH witness handling again
self.test_p2sh_witness(segwit_activated=True)
self.test_witness_commitments()
self.test_block_malleability()
self.test_witness_block_size()
self.test_submit_block()
self.test_extra_witness_data()
self.test_max_witness_push_length()
self.test_max_witness_program_length()
self.test_witness_input_length()
self.test_block_relay(segwit_activated=True)
self.test_tx_relay_after_segwit_activation()
self.test_standardness_v0(segwit_activated=True)
self.test_segwit_versions()
self.test_premature_coinbase_witness_spend()
self.test_uncompressed_pubkey()
self.test_signature_version_1()
self.test_non_standard_witness()
sync_blocks(self.nodes)
self.test_upgrade_after_activation(node_id=2)
self.test_witness_sigops()
if __name__ == '__main__':
SegWitTest().main()
| [
"37983255+spineinhalb@users.noreply.github.com"
] | 37983255+spineinhalb@users.noreply.github.com |
238ccfd4362df93c4e34f8f29a13d1dee0422ea8 | 8c2de4da068ba3ed3ce1adf0a113877385b7783c | /hyperion/diarization/__init__.py | 494ef35caac655fc51985be54a762d30647364a0 | [
"Apache-2.0"
] | permissive | hyperion-ml/hyperion | a024c718c4552ba3a03aae2c2ca1b8674eaebc76 | c4c9eee0acab1ba572843373245da12d00dfffaa | refs/heads/master | 2023-08-28T22:28:37.624139 | 2022-03-25T16:28:08 | 2022-03-25T16:28:08 | 175,275,679 | 55 | 20 | Apache-2.0 | 2023-09-13T15:35:46 | 2019-03-12T18:40:19 | Python | UTF-8 | Python | false | false | 173 | py | """
Copyright 2018 Johns Hopkins University (Author: Jesus Villalba)
Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
"""
from .diar_ahc_plda import DiarAHCPLDA
| [
"jesus.antonio.villalba@gmail.com"
] | jesus.antonio.villalba@gmail.com |
3f7929b0a938ce5579e05271c8c032a63b81cf4a | 0b636a0a7bfcb534a3f00cf6c38b54ad8d050639 | /AtCoder/ABC144D.py | ce77a9178f2c9434acc53d00097e590d80101e9a | [] | no_license | RRFHOUDEN/competitive-programming | 23be02f55ab67f7d03ccec32f291e136770fc113 | 582ed5c5d5e9797fc952ee1d569ef72f8bf8ef48 | refs/heads/master | 2022-10-19T21:53:26.952049 | 2020-06-13T11:44:07 | 2020-06-13T11:44:07 | 258,940,723 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 323 | py | import fractions
a, b, x = map(int, input().split())
PI = 3.1415926535897932384626
if x >= a * a * b / 2:
ans = fractions.atan(2 * (a * a * b - x) / (a * a * a))
ans = fractions.degrees(ans)
else:
ans = fractions.atan(2 * x / (a * b * b))
ans = fractions.degrees(ans)
ans = 90 - ans
print(ans) | [
"keisuke.toyodano@gmail.com"
] | keisuke.toyodano@gmail.com |
48c589047491fc3c841360dce7a399720757723f | ace15f9c55e5c5ba6fee8cb8c660bd4832a5a632 | /Tests.py | 9163da2526a4affad9387dae6d154ca6aa385e2c | [] | no_license | CodeProgress/Minesweeper | dcdfc82a73821c0c1f6c70c2302ec78621727b7f | ef67892e880de542547fdeea462bda2789eca296 | refs/heads/master | 2021-01-18T23:16:25.588515 | 2021-01-10T21:06:52 | 2021-01-10T21:06:52 | 87,101,320 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,659 | py | import unittest
import Minesweeper
class Tests(unittest.TestCase):
def setUp(self):
self.size_of_board = 12
self.num_mines = 20
self.Game = Minesweeper.Game((self.size_of_board, self.num_mines))
def tearDown(self):
self.Game = None
def reset_board_with_new_mine_locations(self, new_mine_locations):
board_obj = self.Game.board
board_obj.mine_locations = new_mine_locations
board_obj.board = board_obj.empty_board()
board_obj.populate_board_with_all_cells()
def expected_error_for_bad_turn_input(self, expected_error, bad_inputs):
for bad_input in bad_inputs:
self.assertRaises(expected_error,
lambda: self.Game.turn_input_validator._validate_row_col(bad_input))
@staticmethod
def get_board_str(board):
output = ""
for row in board:
output += ' '.join([str(x) for x in row]) + "\n"
return output
def initialize_non_random_game(self, size, num_mines, mine_locations):
assert num_mines == len(mine_locations)
self.Game = Minesweeper.Game((size, num_mines))
self.reset_board_with_new_mine_locations(mine_locations)
def test_board_size(self):
expected = self.size_of_board
num_rows = len(self.Game.board.board)
num_cols = len(self.Game.board.board[0])
self.assertEqual(expected, num_rows)
self.assertEqual(expected, num_cols)
def test_parameter_input_validator(self):
validator = self.Game.param_input_validator
size_too_small = -1
size_too_big = 31
self.assertRaises(ValueError,
lambda: validator._validate_size_of_grid(size_too_small))
self.assertRaises(ValueError,
lambda: validator._validate_size_of_grid(size_too_big))
size_of_board = 20
too_many_mines = 401
too_few_mines = -1
self.assertRaises(ValueError,
lambda: validator._validate_num_mines(too_many_mines, size_of_board))
self.assertRaises(ValueError,
lambda: validator._validate_num_mines(too_few_mines, size_of_board))
valid_size = 20
valid_num_mines = 400
self.assertEqual(valid_size, validator._validate_size_of_grid(valid_size))
self.assertEqual(valid_num_mines, validator._validate_num_mines(valid_num_mines, valid_size))
def test_turn_input_validator(self):
too_few_args = "10"
too_many_args = "1 2 3"
too_many_characters = "1 2 3 4 5 6"
off_board_negative = "-1 0"
off_board_positive = "20 50"
non_ints = "a b"
valid_input = "10 11"
self.expected_error_for_bad_turn_input(Minesweeper.WrongNumberOfArguments, [too_few_args, too_many_args])
self.expected_error_for_bad_turn_input(ValueError,
[off_board_negative, off_board_positive, non_ints, too_many_characters])
self.assertEqual((10, 11), self.Game.turn_input_validator._validate_row_col(valid_input))
def test_is_cell_on_board(self):
self.assertTrue(self.Game.board.is_cell_on_board(0, 0))
self.assertTrue(self.Game.board.is_cell_on_board(self.size_of_board-1, self.size_of_board-1))
self.assertFalse(self.Game.board.is_cell_on_board(5, -1))
self.assertFalse(self.Game.board.is_cell_on_board(-1, 5))
self.assertFalse(self.Game.board.is_cell_on_board(-1, -5))
self.assertFalse(self.Game.board.is_cell_on_board(self.size_of_board, 5))
self.assertFalse(self.Game.board.is_cell_on_board(5, self.size_of_board))
self.assertFalse(self.Game.board.is_cell_on_board(self.size_of_board, self.size_of_board))
def test_initial_board_layout(self):
# regression test
mine_locations = {(7, 3), (6, 9), (9, 1), (10, 17), (3, 0), (11, 2), (19, 4), (18, 4), (8, 7),
(0, 0), (14, 0), (5, 1), (15, 19), (12, 9), (15, 12), (9, 0), (6, 7), (15, 11),
(12, 13), (7, 19), (12, 4), (15, 18), (1, 10), (10, 0), (15, 17), (1, 1),
(10, 9), (6, 4), (5, 4), (6, 18), (8, 2), (19, 6), (16, 4), (17, 10), (1, 13),
(12, 7), (19, 17), (16, 12), (3, 5), (10, 13), (1, 12), (5, 16), (8, 12),
(3, 18), (10, 14), (4, 3), (1, 7), (11, 15), (3, 4), (8, 4)}
self.initialize_non_random_game(20, 50, mine_locations)
self.assertFalse(self.Game.is_game_over())
self.assertFalse(self.Game.is_game_won)
self.Game.board.uncover_all_cells()
expected_board = "M 2 1 . . . 1 1 1 1 1 2 2 2 1 . . . . .\n" + \
"2 M 1 . . . 1 M 1 1 M 2 M M 1 . . . . .\n" + \
"2 2 1 1 2 2 2 1 1 1 1 2 2 2 1 . . 1 1 1\n" + \
"M 1 1 2 M M 1 . . . . . . . . . . 1 M 1\n" + \
"2 2 2 M 4 3 1 . . . . . . . . 1 1 2 1 1\n" + \
"1 M 2 3 M 2 1 1 2 1 1 . . . . 1 M 2 1 1\n" + \
"1 1 2 3 M 2 1 M 2 M 1 . . . . 1 1 2 M 2\n" + \
". 1 2 M 3 2 2 2 3 1 1 1 1 1 . . . 1 2 M\n" + \
"2 3 M 3 M 1 1 M 1 . . 1 M 1 . . . . 1 1\n" + \
"M M 2 2 1 1 1 1 2 1 1 1 2 3 2 1 1 1 1 .\n" + \
"M 4 2 1 . . . . 1 M 1 . 1 M M 2 2 M 1 .\n" + \
"1 2 M 2 1 1 1 1 3 2 2 . 2 3 4 M 2 1 1 .\n" + \
". 1 1 2 M 1 1 M 2 M 1 . 1 M 2 1 1 . . .\n" + \
"1 1 . 1 1 1 1 1 2 1 1 . 1 1 1 . . . . .\n" + \
"M 1 . . . . . . . . 1 2 2 1 . . 1 2 3 2\n" + \
"1 1 . 1 1 1 . . . . 1 M M 2 . . 1 M M M\n" + \
". . . 1 M 1 . . . 1 2 4 M 2 . . 1 2 3 2\n" + \
". . . 2 2 2 . . . 1 M 2 1 1 . . . . . .\n" + \
". . . 2 M 3 1 1 . 1 1 1 . . . . 1 1 1 .\n" + \
". . . 2 M 3 M 1 . . . . . . . . 1 M 1 .\n"
self.assertEqual(expected_board, self.get_board_str(self.Game.board.board))
def test_uncovering_mine(self):
mine_locations = {(0, 0), (1, 1), (2, 2)}
self.initialize_non_random_game(6, 3, mine_locations)
self.Game.update_board((0, 0))
self.assertTrue(self.Game.is_game_over())
self.assertFalse(self.Game.is_game_won)
expected_board = "M X X X X X\n" + \
"X X X X X X\n" + \
"X X X X X X\n" + \
"X X X X X X\n" + \
"X X X X X X\n" + \
"X X X X X X\n"
self.assertEqual(expected_board, self.get_board_str(self.Game.board.board))
self.Game.board.uncover_all_cells()
expected_board = "M 2 1 . . .\n" + \
"2 M 2 1 . .\n" + \
"1 2 M 1 . .\n" + \
". 1 1 1 . .\n" + \
". . . . . .\n" + \
". . . . . .\n"
self.assertEqual(expected_board, self.get_board_str(self.Game.board.board))
def test_uncovering_empty_cell(self):
mine_locations = {(0, 0), (1, 1), (2, 2)}
self.initialize_non_random_game(6, 3, mine_locations)
self.Game.update_board((4, 4))
self.assertFalse(self.Game.is_game_over())
self.assertFalse(self.Game.is_game_won)
expected_board = "X X 1 . . .\n" + \
"X X 2 1 . .\n" + \
"1 2 X 1 . .\n" + \
". 1 1 1 . .\n" + \
". . . . . .\n" + \
". . . . . .\n"
self.assertEqual(expected_board, self.get_board_str(self.Game.board.board))
def test_uncovering_all_safecells(self):
mine_locations = {(0, 0), (1, 1), (2, 2)}
self.initialize_non_random_game(6, 3, mine_locations)
self.Game.update_board((4, 4))
self.Game.update_board((1, 0))
self.assertFalse(self.Game.is_game_over())
self.assertFalse(self.Game.is_game_won)
expected_board = "X X 1 . . .\n" + \
"2 X 2 1 . .\n" + \
"1 2 X 1 . .\n" + \
". 1 1 1 . .\n" + \
". . . . . .\n" + \
". . . . . .\n"
self.assertEqual(expected_board, self.get_board_str(self.Game.board.board))
self.Game.update_board((0, 1))
self.assertTrue(self.Game.is_game_over())
self.assertTrue(self.Game.is_game_won)
expected_board = "X 2 1 . . .\n" + \
"2 X 2 1 . .\n" + \
"1 2 X 1 . .\n" + \
". 1 1 1 . .\n" + \
". . . . . .\n" + \
". . . . . .\n"
self.assertEqual(expected_board, self.get_board_str(self.Game.board.board))
self.Game.board.uncover_all_cells()
expected_board = "M 2 1 . . .\n" + \
"2 M 2 1 . .\n" + \
"1 2 M 1 . .\n" + \
". 1 1 1 . .\n" + \
". . . . . .\n" + \
". . . . . .\n"
self.assertEqual(expected_board, self.get_board_str(self.Game.board.board))
def test_zero_dimensional_game(self):
mine_locations = {}
self.initialize_non_random_game(0, 0, mine_locations)
self.assertTrue(self.Game.is_game_over())
self.assertTrue(self.Game.is_game_won)
def test_size_one_with_one_mine(self):
mine_locations = {(0, 0)}
self.initialize_non_random_game(1, 1, mine_locations)
self.assertTrue(self.Game.is_game_over())
self.assertTrue(self.Game.is_game_won)
expected_board = "X\n"
self.assertEqual(expected_board, self.get_board_str(self.Game.board.board))
def test_size_one_with_zero_mines(self):
mine_locations = {}
self.initialize_non_random_game(1, 0, mine_locations)
self.assertFalse(self.Game.is_game_over())
self.assertFalse(self.Game.is_game_won)
expected_board = "X\n"
self.assertEqual(expected_board, self.get_board_str(self.Game.board.board))
self.Game.update_board((0, 0))
self.assertTrue(self.Game.is_game_over())
self.assertTrue(self.Game.is_game_won)
expected_board = ".\n"
self.assertEqual(expected_board, self.get_board_str(self.Game.board.board))
| [
"codeprogress1@gmail.com"
] | codeprogress1@gmail.com |
5fcbf86c132748a1b7bc70226d49500fc35ae18b | 08d40b7ebae801667de433ed0b43d9c8cb3e4915 | /banana/bids.py | 253924c898d2b824773ceb0b0e6f4c452e518d76 | [
"Apache-2.0"
] | permissive | amrka/banana | 79e64f2ca89d4769360aec67edc21e51c8d41195 | 4776df93059c93932ac02171bb004200d8e3aa1a | refs/heads/master | 2020-04-11T08:17:10.803452 | 2018-12-12T02:58:03 | 2018-12-12T02:58:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,158 | py | from arcana.exceptions import (
ArcanaSelectorMissingMatchError, ArcanaUsageError)
from banana.exceptions import BananaUsageError
from arcana.data.selector import FilesetSelector
from arcana.data.item import Fileset
from arcana.data.file_format import FileFormat
from arcana.utils import split_extension
import os
import os.path as op
import stat
import logging
from bids.layout import BIDSLayout
from arcana.repository import DirectoryRepository
import banana.file_format # @UnusedImport
logger = logging.getLogger('arcana')
class BidsRepository(DirectoryRepository):
"""
A repository class for BIDS datasets
Parameters
----------
root_dir : str
The path to the root of the BidsRepository
"""
type = 'bids'
def __init__(self, root_dir, **kwargs):
DirectoryRepository.__init__(self, root_dir, depth=2, **kwargs)
self._layout = BIDSLayout(root_dir)
@property
def root_dir(self):
return self._root_dir
@property
def layout(self):
return self._layout
def __repr__(self):
return "BidsRepository(root_dir='{}')".format(self.root_dir)
def __hash__(self):
return super().__hash__()
def find_data(self, subject_ids=None, visit_ids=None):
"""
Return subject and session information for a project in the local
repository
Parameters
----------
subject_ids : list(str)
List of subject IDs with which to filter the tree with. If None all
are returned
visit_ids : list(str)
List of visit IDs with which to filter the tree with. If None all
are returned
Returns
-------
project : arcana.repository.Tree
A hierarchical tree of subject, session and fileset information for
the repository
"""
filesets = []
all_subjects = self.layout.get_subjects()
all_visits = self.layout.get_sessions()
for item in self.layout.get(return_type='object'):
if not hasattr(item, 'entities') or not item.entities.get('type',
False):
logger.warning("Skipping unrecognised file '{}' in BIDS tree"
.format(op.join(item.dirname, item.filename)))
continue # Ignore hidden file
try:
subject_ids = [item.entities['subject']]
except KeyError:
# If item exists in top-levels of in the directory structure
# it is inferred to exist for all subjects in the tree
subject_ids = all_subjects
try:
visit_ids = [item.entities['session']]
except KeyError:
# If item exists in top-levels of in the directory structure
# it is inferred to exist for all visits in the tree
visit_ids = all_visits
for subject_id in subject_ids:
for visit_id in visit_ids:
fileset = BidsFileset(
path=op.join(item.dirname, item.filename),
type=item.entities['type'],
subject_id=subject_id, visit_id=visit_id,
repository=self,
modality=item.entities.get('modality', None),
task=item.entities.get('task', None))
filesets.append(fileset)
# Get derived filesets, fields and records using the same method using
# the method in the DirectoryRepository base class
derived_filesets, fields, records = super().find_data(
subject_ids=subject_ids, visit_ids=visit_ids)
filesets.extend(derived_filesets)
return filesets, fields, records
def fileset_path(self, item, fname=None):
if not item.derived:
raise ArcanaUsageError(
"Can only get automatically get path to derived filesets not "
"{}".format(item))
if fname is None:
fname = item.fname
if item.subject_id is not None:
subject_id = item.subject_id
else:
subject_id = self.SUMMARY_NAME
if item.visit_id is not None:
visit_id = item.visit_id
else:
visit_id = self.SUMMARY_NAME
sess_dir = op.join(self.root_dir, 'derivatives', item.from_study,
'sub-{}'.format(subject_id),
'sess-{}'.format(visit_id))
# Make session dir if required
if not op.exists(sess_dir):
os.makedirs(sess_dir, stat.S_IRWXU | stat.S_IRWXG)
return op.join(sess_dir, fname)
def _extract_ids_from_path(self, path_parts, *args, **kwargs): # @UnusedVariable @IgnorePep8
if len(path_parts) != 4 or path_parts[0] != 'derivatives':
return None
from_study, subj, sess = path_parts[1:]
subj_id = subj[len('sub-'):]
visit_id = sess[len('sess-'):]
return subj_id, visit_id, from_study
class BaseBidsFileset(object):
derived = False
def __init__(self, type, modality, task): # @ReservedAssignment
self._modality = modality
self._type = type
self._task = task
def __eq__(self, other):
return (self.type == other.type and
self.task == other.task and
self.modality == other.modality)
def __hash__(self):
return (hash(self.type) ^
hash(self.task) ^
hash(self.modality))
def initkwargs(self):
dct = {}
dct['type'] = self.type
dct['task'] = self.task
dct['modality'] = self.modality
return dct
@property
def modality(self):
return self._modality
@property
def task(self):
return self._task
@property
def type(self):
return self._type
class BidsFileset(Fileset, BaseBidsFileset):
"""
A representation of a fileset within the repository.
Parameters
----------
name : str
The name of the fileset
path : str | None
The path to the fileset on the local system
subject_id : int | str | None
The id of the subject which the fileset belongs to
visit_id : int | str | None
The id of the visit which the fileset belongs to
repository : BaseRepository
The repository which the fileset is stored
modality : str
The BIDS modality
task : str
The BIDS task
checksums : dict[str, str]
A checksums of all files within the fileset in a dictionary sorted by
relative file paths
"""
def __init__(self, path, type, subject_id, visit_id, repository, # @ReservedAssignment @IgnorePep8
modality=None, task=None, checksums=None):
Fileset.__init__(
self,
name=op.basename(path),
format=FileFormat.by_ext(split_extension(path)[1]),
frequency='per_session',
path=path,
subject_id=subject_id,
visit_id=visit_id,
repository=repository,
checksums=checksums)
BaseBidsFileset.__init__(self, type, modality, task)
def __repr__(self):
return ("{}(type={}, task={}, modality={}, format={}, subj={}, vis={})"
.format(self.__class__.__name__, self.type, self.task,
self.modality, self.format.name, self.subject_id,
self.visit_id))
class BidsSelector(FilesetSelector, BaseBidsFileset):
"""
A match object for matching filesets from their BIDS attributes and file
format. If any of the provided attributes are None, then that attribute
is omitted from the match
Parameters
----------
spec_name : str
Name of the spec to match
type : str
Type of the fileset
format : FileFormat
The file format of the fileset to match
task : str
The task the fileset belongs to
modality : str
Modality of the filesets
"""
def __init__(self, spec_name, type, format=None, task=None, modality=None, # @ReservedAssignment @IgnorePep8
**kwargs):
FilesetSelector.__init__(
self, spec_name, pattern=None, format=format, frequency='per_session', # @ReservedAssignment @IgnorePep8
**kwargs)
BaseBidsFileset.__init__(self, type, modality, task)
def _filtered_matches(self, node):
matches = [
f for f in node.filesets
if (isinstance(f, BidsFileset) and
self.type == f.type and
(self.modality is None or self.modality == f.modality) and
(self.task is None or self.task == f.task) and
(self.format is None or self.format == f.format))]
if not matches:
raise ArcanaSelectorMissingMatchError(
"No BIDS filesets for {} match {} found:\n{}"
.format(node, self, '\n'.join(str(f) for f in node.filesets)))
return matches
def __repr__(self):
return ("{}(spec_name='{}', type={}, format={}, modality={}, task={})"
.format(
self.__class__.__name__,
self.spec_name,
self.type,
self._format.name if self._format is not None else None,
self.modality, self.task))
def __eq__(self, other):
return (FilesetSelector.__eq__(self, other) and
BaseBidsFileset.__eq__(self, other))
def __hash__(self):
return (FilesetSelector.__hash__(self) ^
BaseBidsFileset.__hash__(self))
def initkwargs(self):
dct = FilesetSelector.initkwargs(self)
dct.update(BaseBidsFileset.initkwargs(self))
return dct
def _check_args(self):
pass # Disable check for either pattern or ID in base class
@BaseBidsFileset.task.setter
def task(self, task):
self._task = task
class BidsAssociatedSelector(FilesetSelector):
"""
A match object for matching BIDS filesets that are associated with
another BIDS filesets (e.g. field-maps, bvecs, bvals)
Parameters
----------
name : str
Name of the associated fileset
primary : BidsSelector
A selector to select the primary fileset which the associated fileset
is associated with
association : str
The name of the association between the fileset to match and the
primary fileset, can be one of 'bvec', 'bval', 'phase1', 'phase2',
'phasediff', 'epi' or 'fieldmap'
fieldmap_order : int
If there are more than one field-maps associated with the primary
fileset, which one to return
"""
VALID_ASSOCIATIONS = ('bvec', 'bval', 'phase1', 'phase2', 'phasediff',
'epi', 'fieldmap')
def __init__(self, spec_name, primary, association, format=None, # @ReservedAssignment @IgnorePep8
fieldmap_order=0, **kwargs):
FilesetSelector.__init__(self, spec_name, format,
frequency='per_session', **kwargs)
self._primary = primary
if association not in self.VALID_ASSOCIATIONS:
raise BananaUsageError(
"Invalid association '{}' passed to BidsAssociatedSelector, "
"can be one of '{}'".format(
association, "', '".join(self.VALID_ASSOCIATIONS)))
self._association = association
self._fieldmap_order = fieldmap_order
def __eq__(self, other):
return (FilesetSelector.__eq__(self, other) and
self.primary == other.primary and
self.format == other.format and
self.association == other.association and
self.fieldmap_type == other.fieldmap_type)
def __hash__(self):
return (FilesetSelector.__hash__(self) ^
hash(self.primary) ^
hash(self.format) ^
hash(self.association) ^
hash(self.fieldmap_order))
def initkwargs(self):
dct = FilesetSelector.initkwargs(self)
dct['primary'] = self.primary
dct['format'] = self.primary
dct['association'] = self.association
dct['fieldmap_order'] = self.fieldmap_order
return dct
def __repr__(self):
return ("{}(spec_name={}, primary={}, format={}, association={}, "
"fieldmap_order={})".format(
type(self).__name__,
self.spec_name, self.primary,
self._format.name if self._format is not None else None,
self.association, self.fieldmap_order))
def bind(self, study, spec_name=None, **kwargs):
# We need to access a bound primary selector when matching the
# associated selector so we set the bound version temporarily to
# self._primary before winding it back after we have done the bind
unbound_primary = self._primary
self._primary = self._primary.bind(study, **kwargs)
bound = super().bind(study, spec_name=spec_name, **kwargs)
self._primary = unbound_primary
return bound
@property
def primary(self):
return self._primary
@property
def association(self):
return self._association
@property
def fieldmap_order(self):
return self._fieldmap_order
@property
def task(self):
return self.primary.task
@task.setter
def task(self, task):
self.primary.task = task
def match_node(self, node):
primary_match = self.primary.match_node(node)
layout = self.primary.repository.layout
if self.association == 'bvec':
path = layout.get_bvec(primary_match.path)
elif self.association == 'bval':
path = layout.get_bval(primary_match.path)
else:
fieldmaps = layout.get_fieldmap(primary_match.path,
return_list=True)
if not len(fieldmaps):
raise ArcanaSelectorMissingMatchError(
"No matches for field-map '{}' associated with {}"
.format(self.association, primary_match))
try:
fieldmap = fieldmaps[self.fieldmap_order]
except IndexError:
raise ArcanaSelectorMissingMatchError(
"Index of field-map ({}) is outside range of available "
"field-maps ({}) for {}"
.format(self.fieldmap_order,
', '.join(str(f) for f in fieldmaps),
primary_match))
path = fieldmap[self.association]
return Fileset.from_path(path, format=self._format,
repository=self.primary.repository,
subject_id=node.subject_id,
visit_id=node.visit_id)
| [
"tom.g.close@gmail.com"
] | tom.g.close@gmail.com |
da631c41d70c1d7f33b03220272f3b4868cae56e | f445450ac693b466ca20b42f1ac82071d32dd991 | /generated_tempdir_2019_09_15_163300/generated_part009169.py | 278cb18a9c17ec793e71dc502195fe855036b2a7 | [] | no_license | Upabjojr/rubi_generated | 76e43cbafe70b4e1516fb761cabd9e5257691374 | cd35e9e51722b04fb159ada3d5811d62a423e429 | refs/heads/master | 2020-07-25T17:26:19.227918 | 2019-09-15T15:41:48 | 2019-09-15T15:41:48 | 208,357,412 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,516 | py | from sympy.abc import *
from matchpy.matching.many_to_one import CommutativeMatcher
from matchpy import *
from matchpy.utils import VariableWithCount
from collections import deque
from multiset import Multiset
from sympy.integrals.rubi.constraints import *
from sympy.integrals.rubi.utility_function import *
from sympy.integrals.rubi.rules.miscellaneous_integration import *
from sympy import *
class CommutativeMatcher110400(CommutativeMatcher):
_instance = None
patterns = {
0: (0, Multiset({}), [
(VariableWithCount('i3.1.2.1.0', 1, 1, None), Mul),
(VariableWithCount('i3.1.2.1.0_1', 1, 1, S(1)), Mul)
]),
1: (1, Multiset({0: 1}), [
(VariableWithCount('i3.1.2.1.0', 1, 1, S(1)), Mul)
])
}
subjects = {}
subjects_by_id = {}
bipartite = BipartiteGraph()
associative = Mul
max_optional_count = 1
anonymous_patterns = set()
def __init__(self):
self.add_subject(None)
@staticmethod
def get():
if CommutativeMatcher110400._instance is None:
CommutativeMatcher110400._instance = CommutativeMatcher110400()
return CommutativeMatcher110400._instance
@staticmethod
def get_match_iter(subject):
subjects = deque([subject]) if subject is not None else deque()
subst0 = Substitution()
# State 110399
if len(subjects) >= 1 and isinstance(subjects[0], Pow):
tmp1 = subjects.popleft()
subjects2 = deque(tmp1._args)
# State 110742
if len(subjects2) >= 1:
tmp3 = subjects2.popleft()
subst1 = Substitution(subst0)
try:
subst1.try_add_variable('i3.1.2.1.1', tmp3)
except ValueError:
pass
else:
pass
# State 110743
if len(subjects2) >= 1 and subjects2[0] == Integer(2):
tmp5 = subjects2.popleft()
# State 110744
if len(subjects2) == 0:
pass
# State 110745
if len(subjects) == 0:
pass
# 0: x**2
yield 0, subst1
subjects2.appendleft(tmp5)
subjects2.appendleft(tmp3)
subjects.appendleft(tmp1)
return
yield
from collections import deque | [
"franz.bonazzi@gmail.com"
] | franz.bonazzi@gmail.com |
62ec0f10af3cd0254445d9c25cfa5e46cd0b104b | 6b03517ab54bc5dfd7464bd680e4d6a0df054dbc | /src/bot/settings.py | 446d8f863c3cd1dffd0a5cd9f4cde9d4da4bc8d2 | [] | no_license | mike0sv/telegram-bot-django-template-project | 88eba6ef485dc603161cf6110f32149509eacc2b | 43fd7cfa1d54887274a6ee0e6f56c4d9cdd1cb10 | refs/heads/master | 2021-06-24T10:05:58.092449 | 2019-09-26T15:52:55 | 2019-09-26T15:52:55 | 168,697,668 | 0 | 1 | null | 2021-06-10T21:10:30 | 2019-02-01T12:57:41 | Python | UTF-8 | Python | false | false | 4,436 | py | """
Django settings for bot project.
Generated by 'django-admin startproject' using Django 2.1.5.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'syd-@wanon*sja0%d6ztc%l@dfkjv8mkgx*#&mtve@(_0nj2q8'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'backend'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'bot.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'bot.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
DOCKER_ENV = os.environ.get('DOCKER', 'false') == 'true'
if DOCKER_ENV:
DATABASES['default'] = {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'tgbot',
'PASSWORD': 'postgres',
'USER': 'postgres',
'HOST': 'postgres'
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
}
},
'loggers': {
'django.request': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
},
'bot': {
'handlers': ['console'],
'level': 'INFO',
}
},
}
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
def env(name, default=None, type=None):
value = os.environ.get(name, default)
return type(value) if type is not None else value
DEFAULT_BOT_TOKEN = env('BOT_TOKEN', None)
TG_USE_PROXY = env('TG_USE_PROXY', 'True', type=lambda x: x == 'True')
TG_PROXY_ADDRESS = env('TG_PROXY_ADDRESS')
TG_PROXY_USERNAME = env('TG_PROXY_USERNAME')
TG_PROXY_PASSWORD = env('TG_PROXY_PASSWORD')
TG_WEBHOOK_CERT_KEY = os.environ.get('BOT_CERT_KEY', 'cert/private.key')
TG_WEBHOOK_CERT_PEM = os.environ.get('BOT_CERT_PEM', 'cert/cert.pem')
TG_WEBHOOK = os.environ.get('BOT_WEBHOOK') == 'true'
TG_WEBHOOK_PORT = int(os.environ.get('BOT_WEBHOOK_PORT', 5555))
| [
"mike0sv@gmail.com"
] | mike0sv@gmail.com |
7e1835c0f76b6c713b30174e560aeb5c4b77e5bd | bc9f66258575dd5c8f36f5ad3d9dfdcb3670897d | /lib/surface/compute/networks/delete.py | 39d2c060fc886ce025072ddf7996fe34279c33f2 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | google-cloud-sdk-unofficial/google-cloud-sdk | 05fbb473d629195f25887fc5bfaa712f2cbc0a24 | 392abf004b16203030e6efd2f0af24db7c8d669e | refs/heads/master | 2023-08-31T05:40:41.317697 | 2023-08-23T18:23:16 | 2023-08-23T18:23:16 | 335,182,594 | 9 | 2 | NOASSERTION | 2022-10-29T20:49:13 | 2021-02-02T05:47:30 | Python | UTF-8 | Python | false | false | 2,389 | py | # -*- coding: utf-8 -*- #
# Copyright 2014 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for deleting networks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.api_lib.compute import utils
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.compute import flags as compute_flags
from googlecloudsdk.command_lib.compute.networks import flags
class Delete(base.DeleteCommand):
r"""Delete Compute Engine networks.
*{command}* deletes one or more Compute Engine
networks. Networks can only be deleted when no other resources
(e.g., virtual machine instances) refer to them.
## EXAMPLES
To delete a network with the name 'network-name', run:
$ {command} network-name
To delete two networks with the names 'network-name1' and 'network-name2',
run:
$ {command} network-name1 network-name2
"""
NETWORK_ARG = None
@staticmethod
def Args(parser):
Delete.NETWORK_ARG = flags.NetworkArgument(plural=True)
Delete.NETWORK_ARG.AddArgument(parser, operation_type='delete')
parser.display_info.AddCacheUpdater(flags.NetworksCompleter)
def Run(self, args):
holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
client = holder.client
network_refs = Delete.NETWORK_ARG.ResolveAsResource(
args,
holder.resources,
scope_lister=compute_flags.GetDefaultScopeLister(client))
utils.PromptForDeletion(network_refs)
requests = []
for network_ref in network_refs:
requests.append((client.apitools_client.networks, 'Delete',
client.messages.ComputeNetworksDeleteRequest(
**network_ref.AsDict())))
return client.MakeRequests(requests)
| [
"cloudsdk.mirror@gmail.com"
] | cloudsdk.mirror@gmail.com |
84f22aec0e44660430ea57649264e25e7d9cd0ac | 67325192c1e528a39d457f11e61b480d68826708 | /mods/mcpython/Item/stone_brick.py | d89a7eefa7b49fc05f922501e114e4cddf3480be | [
"MIT"
] | permissive | vashistaarav1611/mcpython-a-minecraft-clone-in-python | 5851b377b54fd2b28c106112c7b18f397b71ab50 | c16cd66f319efdeec4130e1a43f5a857caf1ea13 | refs/heads/master | 2023-02-01T22:48:51.787106 | 2020-12-21T15:02:25 | 2020-12-21T15:02:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 225 | py | from .Item import *
class StoneBrick(Item):
def getName(self):
return "minecraft:stone_brick"
def getTexturFile(self):
return "./assets/textures/items/stonebrick.png"
handler.register(StoneBrick)
| [
"baulukas1301@googlemail.com"
] | baulukas1301@googlemail.com |
2a47b827129883aed64529d397c3c7dfc99df3af | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2876/61020/297101.py | 9989eb94aabc4644e48460e350427804c5b184bd | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,201 | py | import itertools
import copy
def indexes_of(a_list_, ele):
result = []
for j in range(len(a_list_)):
if a_list_[j] == ele:
result.append(j)
return result
def is_done(a_list):
if len(a_list) <= 2:
return True
if len(a_list) == 3:
if a_list != ['1', '0', '1']:
return True
return False
if len(a_list) > 3:
return is_done(a_list[0:3]) and is_done(a_list[1:])
def min_switch(a_list):
indexes_of_1 = indexes_of(a_list, '1')
if is_done(a_list):
return 0
for i in range(1, len(indexes_of_1)):
coms = list(itertools.combinations(indexes_of_1, 2))
for indexes in coms:
seq_to_be_change = copy.copy(a_list)
for index in indexes:
seq_to_be_change[index] = 0
if is_done(seq_to_be_change):
return i
trash = input()
nums = input().split()
print(min_switch(nums))
# test1
# import copy
#
# a = ['0', '1', '2']
# b = copy.copy(a)
#
# print(a == b)
# print(a is b)
#
# b[1] = '4'
# print(a)
# print(b)
# test2
# import copy
#
# a = ['0', '1', '2']
# b = copy.deepcopy(a)
# print(a == b)
# print(a is b)
| [
"1069583789@qq.com"
] | 1069583789@qq.com |
10e2468f2d86c9659123463bdfb409ef99344a45 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2812/60651/233659.py | e0225c2f2d256ddec8d6adca5ebbca99843180af | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 281 | py | inlist=input().split()
position=int(inlist[0])
n=int(inlist[1])
molist=[]
for i in range(n):
innum=int(input())
mo=innum%position
if mo not in molist:
molist.append(mo)
else:
print(i+1)
break
if(len(molist)==n):
print(-1)
| [
"1069583789@qq.com"
] | 1069583789@qq.com |
07d064421aac823d34841bf839142fc4565a719a | 252a65fc43c2484fb78ac60e7694ff7ce43068cf | /python_way.py | f74a53eaa0b63745f744b7434d94cb9c2806e7a0 | [] | no_license | fandrefh/curso-python-django | 558f26f16576147e4a89506c709466354081167c | 8f74eb3d4d3dfda98b1493bce9dba479c689fe08 | refs/heads/master | 2020-04-28T22:29:41.769622 | 2015-07-25T21:12:53 | 2015-07-25T21:12:53 | 38,125,699 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 56 | py | with open('numeros.txt', 'r') as f:
print(f.read())
| [
"fandrefh@gmail.com"
] | fandrefh@gmail.com |
ad71c973ebccb1a08a32fdb6c07b5344bd636377 | 5ada50387ed5998d292011031b5f3739ac20414b | /find_bridges/python/find_bridges_1.py | 90648ab138b60265117445e2e51bc4b590e7fc32 | [] | no_license | bigWaitForItOh/Graph-Algorithms | 84484230588d3d81584d4d3feb6c231ea77e84b0 | 011163856d60be89c602044c705c835af47b3b95 | refs/heads/master | 2021-01-17T10:21:43.900771 | 2016-05-22T19:01:38 | 2016-05-22T19:01:38 | 44,433,628 | 7 | 2 | null | 2016-05-22T19:01:38 | 2015-10-17T10:54:27 | Python | UTF-8 | Python | false | false | 1,454 | py | ##################################################################################################################################
#Program to find Bridges in a simple, unweighted, undirected graph.
#Time Complexity: O (E*(V + E))
#E = number of edges, V = number of Nodes
#For each edge in the graph, it removes the edge and explores the new graph using Breadth First Search to check if the graph is still connected.
#It then inserts the removed edge back into the graph
##################################################################################################################################
def is_connected (graph):
queue, visited = [list (graph.keys ()) [0]], set ();
current = None;
while (queue):
current = queue.pop ();
visited.add (current);
for neighbour in graph [current]:
if (not (neighbour in visited or neighbour in queue)):
queue.append (neighbour);
return (len (graph) == len (visited));
def find_bridges (graph):
bridges = set ();
for node in graph:
for neighbour in graph [node]:
graph [node].remove (neighbour);
graph [neighbour].remove (node);
if (not is_connected (graph)):
bridges.add ( (node, neighbour) );
graph [node].add (neighbour);
graph [neighbour].add (node);
return (bridges);
graph = {
'A' : set (['B','C']),
'B' : set (['A','C']),
'C' : set (['A','B','D','F']),
'D' : set (['C','E']),
'E' : set (['D']),
'F' : set (['C'])
};
print (find_bridges (graph));
| [
"duaraghav8@gmail.com"
] | duaraghav8@gmail.com |
6053f2bbd3c07939d3ae090def23183a8179fae1 | f37c8a91ba8f18db083cf4dcb7d74cee6b84e444 | /pynion/filesystem/_filetypes/basefile.py | 51482cc137768351b0c4b566255392fd7a571a68 | [
"MIT"
] | permissive | jaumebonet/pynion | 6818d764cd1dbb9cbb53d665837adab828e8eac7 | 4052aee023df7206c2e56a28c361d4a5a445a6d4 | refs/heads/master | 2021-01-10T08:56:24.968686 | 2016-02-16T18:43:31 | 2016-02-16T18:43:31 | 50,191,510 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,770 | py | import os
import json
import pathlib
from ... import Manager
from ...metaclass import Multiton
from ...errors.fe import FileOpenError as FOE
from ...errors.fe import FileWrongRequestedActionError as FWR
m = Manager()
class BaseFile(object):
"""
The **BaseFile** :py:class:`pynion.Multiton` is a file management object
created directly through the py:class:`pynion.File` factory.
It specifically manages regular files.
Allows the with statement in read files.
"""
__metaclass__ = Multiton
_IDENTIFIER = 'file_name'
def __init__(self, file_name, action):
self.fname = pathlib.Path(file_name)
self.action = action
self._fd = None
self._pattern = None
##############
# ATTRIBUTES #
##############
@property
def full(self):
"""
:return: Full path of the file
:rtype: str
"""
try:
return str(self.fname.resolve())
except:
return os.path.abspath(str(self.fname))
@property
def dir(self):
"""
:return: Full path containing directory
:rtype: str
"""
return str(self.fname.resolve().parent)
@property
def last_dir(self):
"""
:return: Name of the containing directory
:rtype: str
"""
return str(self.fname.resolve().parent.name)
@property
def name(self):
"""
:return: Name of the file
:rtype: str
"""
return str(self.fname.name)
@property
def prefix(self):
"""
:return: Name of the file without extension
:rtype: str
"""
return str(self.fname.stem)
@property
def first_prefix(self):
"""
:return: Name of the first section of the file
:rtype: str
"""
return self.name.split('.')[0]
@property
def extension(self):
"""
:return: Name of the file's extension
:rtype: str
"""
return str(self.fname.suffix)
@property
def extensions(self):
"""
:return: List of all the sections of the file name except the first one.
:rtype: list
"""
return self.fname.suffixes
@property
def descriptor(self):
"""
:return: Descriptor of the stored file
:rtype: str
"""
return self._fd
@property
def size(self):
"""
:return: File size
:rtype: str
"""
return self.fname.stat().st_size
@property
def pattern(self):
"""
:return: Dictionary with the pattern assigned sections of the file name.
:rtype: dict
"""
if self._pattern is None:
return None
pattern = {}
for p in self._pattern:
pattern[p] = self.__dict__[p]
return pattern
############
# BOOLEANS #
############
@property
def is_open(self):
"""
:return: Check if the file descriptor is open
:rtype: bool
"""
return self._fd is not None
@property
def is_to_write(self):
"""
:return: Check if the file is set to write
:rtype: bool
"""
return self.action in set(['w', 'a'])
@property
def is_to_read(self):
"""
:return: Check if the file is set to read
:rtype: bool
"""
return self.action in set(['r'])
###########
# METHODS #
###########
def relative_to(self, path = pathlib.Path.cwd()):
"""
:param str path: Path to which the relative path is required.
:return: Actual path relative to the query path
:rtype: str
"""
return self.fname.relative_to(path)
####################
# METHODS: ON FILE #
####################
def open(self):
"""
Open the file in the previously defined action type.
:rtype: self
"""
if self.is_open:
return self
self._fd = open(self.full, self.action)
return self
def read(self):
"""
:raise: :py:class:`pynion.errors.fe.FileWrongRequestedActionError` if
opened in write mode.
:rtype: File Descriptor
"""
self._check_action('r')
return self._fd
def readline(self):
"""
:raise: :py:class:`pynion.errors.fe.FileWrongRequestedActionError` if
opened in write mode.
:return: One line of the file.
:rtype: str
"""
self._check_action('r')
return self._fd.readline()
def readJSON(self, encoding = 'utf-8'):
"""
Retrieve all data in file as a JSON dictionary.
:param str encoding: Encoding read format (default: utf-8)
:raise: :py:class:`pynion.errors.fe.FileWrongRequestedActionError` if
opened in write mode.
:rtype: dict
"""
d = []
self.open()
for l in self.read():
d.append(l.strip())
return json.loads(''.join(d), encoding=encoding)
def write(self, line, encoding = None):
"""
Write to the file
:param str line: Content to write
:param str encoding: Encoding format (use utf-8, for example, if needs
to print greek characters)
:raise: :py:class:`pynion.errors.fe.FileWrongRequestedActionError` if
opened in read mode.
"""
self._check_action('w')
if encoding is None:
self._fd.write(line)
else:
self._fd.write(line.encode(encoding))
def flush(self):
"""
:raise: :py:class:`pynion.errors.fe.FileWrongRequestedActionError` if
opened in read mode.
"""
self._check_action('w')
self._fd.flush()
def close(self):
"""
Close the file.
"""
self._fd.close()
self._fd = None
###################
# PRIVATE METHODS #
###################
def _check_action(self, call_method):
if not self.is_open:
raise FOE(self.full, self.action)
if call_method == 'r' and self.is_to_write:
raise FWR(self.full, self.action)
elif call_method == 'w' and self.is_to_read:
raise FWR(self.full, self.action)
def __enter__(self):
self.open()
return self.read()
def __exit__(self, type, value, traceback):
self.close()
#################
# MAGIC METHODS #
#################
def __str__(self):
return self.full
def __repr__(self):
return '<{0}: {1.full}>'.format(self.__class__.__name__, self)
| [
"jaume.bonet@gmail.com"
] | jaume.bonet@gmail.com |
c03060aef8d7988b9b7fed6a37ca78c20e19f07c | 0e5658deaa630a603a7134847518408c09e3a6d0 | /vendor/riffyn-sdk/test/test_connections_body.py | ed2ddaf90178c82802ccc979934e76aa1efa8f67 | [] | no_license | jace-ys/lab-automation | cb0d0d2b88ec64e235cffca8bbf556b22c55ab1e | 27be3a942b111404844f29aa9a0dd957b7fde459 | refs/heads/master | 2023-06-01T20:30:55.557975 | 2021-06-16T07:46:20 | 2021-06-16T08:15:38 | 297,329,484 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,639 | py | # coding: utf-8
"""
Riffyn Nexus REST API V1
## Vocabulary Before you begin, please familiarize yourself with our [Glossary of Terms](https://help.riffyn.com/hc/en-us/articles/360045503694). ## Getting Started If you'd like to play around with the API, there are several free GUI tools that will allow you to send requests and receive responses. We suggest using the free app [Postman](https://www.getpostman.com/). ## Authentication Begin with a call the [authenticate](#/authentication/authenticate) endpoint using [HTTP Basic authentication](https://en.wikipedia.org/wiki/Basic_access_authentication) with your `username` and `password` to retrieve either an API Key or an Access Token. For example: curl -X POST -u '<username>' https://api.app.riffyn.com/v1/auth -v You may then use either the API Key or the accessToken for all future requests to the API. For example: curl -H 'access-token: <ACCESS_TOKEN>' https://api.app.riffyn.com/v1/units -v curl -H 'api-key: <API_KEY>' https://api.app.riffyn.com/v1/units -v The tokens' values will be either in the message returned by the `/authenticate` endpoint or in the createApiKey `/auth/api-key` or CreateAccesToken `/auth/access-token` endpoints. The API Key will remain valid until it is deauthorized by revoking it through the Security Settings in the Riffyn Nexus App UI. The API Key is best for running scripts and longer lasting interactions with the API. The Access Token will expire automatically and is best suited to granting applications short term access to the Riffyn Nexus API. Make your requests by sending the HTTP header `api-key: $API_KEY`, or `access-token: $ACCESS_TOKEN`. In Postman, add your preferred token to the headers under the Headers tab for any request other than the original request to `/authenticate`. If you are enrolled in MultiFactor Authentication (MFA) the `status` returned by the `/authenticate` endpoint will be `MFA_REQUIRED`. A `passCode`, a `stateToken`, and a `factorId` must be passed to the [/verify](#/authentication/verify) endpoint to complete the authentication process and achieve the `SUCCESS` status. MFA must be managed in the Riffyn Nexus App UI. ## Paging and Sorting The majority of endpoints that return a list of data support paging and sorting through the use of three properties, `limit`, `offset`, and `sort`. Please see the list of query parameters, displayed below each endpoint's code examples, to see if paging or sorting is supported for that specific endpoint. Certain endpoints return data that's added frequently, like resources. As a result, you may want filter results on either the maximum or minimum creation timestamp. This will prevent rows from shifting their position from the top of the list, as you scroll though subsequent pages of a multi-page response. Before querying for the first page, store the current date-time (in memory, a database, a file...). On subsequent pages you *may* include the `before` query parameter, to limit the results to records created before that date-time. E.g. before loading page one, you store the current date time of `2016-10-31T22:00:00Z` (ISO date format). Later, when generating the URL for page two, you *could* limit the results by including the query parameter `before=1477951200000` (epoch timestamp). ## Postman endpoint examples There is a YAML file with the examples of the request on Riffyn Nexus API [Click here](/v1/collection) to get the file. If you don't know how to import the collection file, [here](https://learning.postman.com/docs/postman/collections/data-formats/#importing-postman-data) are the steps. ## Client SDKs You may write your own API client, or you may use one of ours. [Click here](/v1/clients) to select your programming language and download an API client. # noqa: E501
OpenAPI spec version: 4.2.0
Contact: support@riffyn.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import riffyn_nexus_sdk_v1
from riffyn_nexus_sdk_v1.models.connections_body import ConnectionsBody # noqa: E501
from riffyn_nexus_sdk_v1.rest import ApiException
class TestConnectionsBody(unittest.TestCase):
"""ConnectionsBody unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testConnectionsBody(self):
"""Test ConnectionsBody"""
# FIXME: construct object with mandatory attributes with example values
# model = riffyn_nexus_sdk_v1.models.connections_body.ConnectionsBody() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"jaceys.tan@gmail.com"
] | jaceys.tan@gmail.com |
aca5fbe139d058a03bd335abccdd53ee66eaa77f | b66eb4f3253ba20aa6d4777f3df5c3e787fd79bc | /devil/devil/android/device_utils.py | 8685017197e16107d8ab73657146f82f1b34b6cf | [
"BSD-3-Clause"
] | permissive | ezart/catapult | 26aa11172a7f8c34a4fcd8e51541ac394f8c5fb4 | d3a10a0399c14a92d5ffb737c303a03fbfccb98e | refs/heads/master | 2020-12-05T02:00:03.056415 | 2020-01-03T22:40:44 | 2020-01-03T23:46:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 132,962 | py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides a variety of device interactions based on adb.
Eventually, this will be based on adb_wrapper.
"""
# pylint: disable=unused-argument
import calendar
import collections
import contextlib
import fnmatch
import json
import logging
import math
import os
import posixpath
import pprint
import random
import re
import shutil
import stat
import sys
import tempfile
import time
import threading
import uuid
from devil import base_error
from devil import devil_env
from devil.utils import cmd_helper
from devil.android import apk_helper
from devil.android import device_signal
from devil.android import decorators
from devil.android import device_errors
from devil.android import device_temp_file
from devil.android import install_commands
from devil.android import logcat_monitor
from devil.android import md5sum
from devil.android.sdk import adb_wrapper
from devil.android.sdk import intent
from devil.android.sdk import keyevent
from devil.android.sdk import version_codes
from devil.utils import host_utils
from devil.utils import parallelizer
from devil.utils import reraiser_thread
from devil.utils import timeout_retry
from devil.utils import zip_utils
from py_utils import tempfile_ext
try:
from devil.utils import reset_usb
except ImportError:
# Fail silently if we can't import reset_usb. We're likely on windows.
reset_usb = None
logger = logging.getLogger(__name__)
_DEFAULT_TIMEOUT = 30
_DEFAULT_RETRIES = 3
# A sentinel object for default values
# TODO(jbudorick): revisit how default values are handled by
# the timeout_retry decorators.
DEFAULT = object()
# A sentinel object to require that calls to RunShellCommand force running the
# command with su even if the device has been rooted. To use, pass into the
# as_root param.
_FORCE_SU = object()
_RECURSIVE_DIRECTORY_LIST_SCRIPT = """
function list_subdirs() {
for f in "$1"/* ;
do
if [ -d "$f" ] ;
then
if [ "$f" == "." ] || [ "$f" == ".." ] ;
then
continue ;
fi ;
echo "$f" ;
list_subdirs "$f" ;
fi ;
done ;
} ;
list_subdirs %s
"""
_RESTART_ADBD_SCRIPT = """
trap '' HUP
trap '' TERM
trap '' PIPE
function restart() {
stop adbd
start adbd
}
restart &
"""
# Not all permissions can be set.
_PERMISSIONS_BLACKLIST_RE = re.compile('|'.join(fnmatch.translate(p) for p in [
'android.permission.ACCESS_LOCATION_EXTRA_COMMANDS',
'android.permission.ACCESS_MOCK_LOCATION',
'android.permission.ACCESS_NETWORK_STATE',
'android.permission.ACCESS_NOTIFICATION_POLICY',
'android.permission.ACCESS_VR_STATE',
'android.permission.ACCESS_WIFI_STATE',
'android.permission.AUTHENTICATE_ACCOUNTS',
'android.permission.BLUETOOTH',
'android.permission.BLUETOOTH_ADMIN',
'android.permission.BROADCAST_STICKY',
'android.permission.CHANGE_NETWORK_STATE',
'android.permission.CHANGE_WIFI_MULTICAST_STATE',
'android.permission.CHANGE_WIFI_STATE',
'android.permission.DISABLE_KEYGUARD',
'android.permission.DOWNLOAD_WITHOUT_NOTIFICATION',
'android.permission.EXPAND_STATUS_BAR',
'android.permission.FOREGROUND_SERVICE',
'android.permission.GET_PACKAGE_SIZE',
'android.permission.INSTALL_SHORTCUT',
'android.permission.INJECT_EVENTS',
'android.permission.INTERNET',
'android.permission.KILL_BACKGROUND_PROCESSES',
'android.permission.MANAGE_ACCOUNTS',
'android.permission.MODIFY_AUDIO_SETTINGS',
'android.permission.NFC',
'android.permission.READ_SYNC_SETTINGS',
'android.permission.READ_SYNC_STATS',
'android.permission.RECEIVE_BOOT_COMPLETED',
'android.permission.RECORD_VIDEO',
'android.permission.REORDER_TASKS',
'android.permission.REQUEST_INSTALL_PACKAGES',
'android.permission.RESTRICTED_VR_ACCESS',
'android.permission.RUN_INSTRUMENTATION',
'android.permission.SET_ALARM',
'android.permission.SET_TIME_ZONE',
'android.permission.SET_WALLPAPER',
'android.permission.SET_WALLPAPER_HINTS',
'android.permission.TRANSMIT_IR',
'android.permission.USE_CREDENTIALS',
'android.permission.USE_FINGERPRINT',
'android.permission.VIBRATE',
'android.permission.WAKE_LOCK',
'android.permission.WRITE_SYNC_SETTINGS',
'com.android.browser.permission.READ_HISTORY_BOOKMARKS',
'com.android.browser.permission.WRITE_HISTORY_BOOKMARKS',
'com.android.launcher.permission.INSTALL_SHORTCUT',
'com.chrome.permission.DEVICE_EXTRAS',
'com.google.android.apps.now.CURRENT_ACCOUNT_ACCESS',
'com.google.android.c2dm.permission.RECEIVE',
'com.google.android.providers.gsf.permission.READ_GSERVICES',
'com.google.vr.vrcore.permission.VRCORE_INTERNAL',
'com.sec.enterprise.knox.MDM_CONTENT_PROVIDER',
'*.permission.C2D_MESSAGE',
'*.permission.READ_WRITE_BOOKMARK_FOLDERS',
'*.TOS_ACKED',
]))
_SHELL_OUTPUT_SEPARATOR = '~X~'
_PERMISSIONS_EXCEPTION_RE = re.compile(
r'java\.lang\.\w+Exception: .*$', re.MULTILINE)
_CURRENT_FOCUS_CRASH_RE = re.compile(
r'\s*mCurrentFocus.*Application (Error|Not Responding): (\S+)}')
_GETPROP_RE = re.compile(r'\[(.*?)\]: \[(.*?)\]')
# Regex to parse the long (-l) output of 'ls' command, c.f.
# https://github.com/landley/toybox/blob/master/toys/posix/ls.c#L446
_LONG_LS_OUTPUT_RE = re.compile(
r'(?P<st_mode>[\w-]{10})\s+' # File permissions
r'(?:(?P<st_nlink>\d+)\s+)?' # Number of links (optional)
r'(?P<st_owner>\w+)\s+' # Name of owner
r'(?P<st_group>\w+)\s+' # Group of owner
r'(?:' # Either ...
r'(?P<st_rdev_major>\d+),\s+' # Device major, and
r'(?P<st_rdev_minor>\d+)\s+' # Device minor
r'|' # .. or
r'(?P<st_size>\d+)\s+' # Size in bytes
r')?' # .. or nothing
r'(?P<st_mtime>\d{4}-\d\d-\d\d \d\d:\d\d)\s+' # Modification date/time
r'(?P<filename>.+?)' # File name
r'(?: -> (?P<symbolic_link_to>.+))?' # Symbolic link (optional)
r'$' # End of string
)
_LS_DATE_FORMAT = '%Y-%m-%d %H:%M'
_FILE_MODE_RE = re.compile(r'[dbclps-](?:[r-][w-][xSs-]){2}[r-][w-][xTt-]$')
_FILE_MODE_KIND = {
'd': stat.S_IFDIR, 'b': stat.S_IFBLK, 'c': stat.S_IFCHR,
'l': stat.S_IFLNK, 'p': stat.S_IFIFO, 's': stat.S_IFSOCK,
'-': stat.S_IFREG}
_FILE_MODE_PERMS = [
stat.S_IRUSR, stat.S_IWUSR, stat.S_IXUSR,
stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP,
stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH,
]
_FILE_MODE_SPECIAL = [
('s', stat.S_ISUID),
('s', stat.S_ISGID),
('t', stat.S_ISVTX),
]
_PS_COLUMNS = {
'pid': 1,
'ppid': 2,
'name': -1
}
_SELINUX_MODE = {
'enforcing': True,
'permissive': False,
'disabled': None
}
# Some devices require different logic for checking if root is necessary
_SPECIAL_ROOT_DEVICE_LIST = [
'marlin', # Pixel XL
'sailfish', # Pixel
'taimen', # Pixel 2 XL
'vega', # Lenovo Mirage Solo
'walleye', # Pixel 2
'crosshatch', # Pixel 3 XL
'blueline', # Pixel 3
'sargo', # Pixel 3a
'bonito', # Pixel 3a XL
'sdk_goog3_x86', # Crow emulator
]
_SPECIAL_ROOT_DEVICE_LIST += ['aosp_%s' % _d for _d in
_SPECIAL_ROOT_DEVICE_LIST]
_IMEI_RE = re.compile(r' Device ID = (.+)$')
# The following regex is used to match result parcels like:
"""
Result: Parcel(
0x00000000: 00000000 0000000f 00350033 00360033 '........3.5.3.6.'
0x00000010: 00360032 00370030 00300032 00300039 '2.6.0.7.2.0.9.0.'
0x00000020: 00380033 00000039 '3.8.9... ')
"""
_PARCEL_RESULT_RE = re.compile(
r'0x[0-9a-f]{8}\: (?:[0-9a-f]{8}\s+){1,4}\'(.{16})\'')
_EBUSY_RE = re.compile(
r'mkdir failed for ([^,]*), Device or resource busy')
# http://bit.ly/2WLZhUF added a timeout to adb wait-for-device. We sometimes
# want to wait longer than the implicit call within adb root allows.
_WAIT_FOR_DEVICE_TIMEOUT_STR = 'timeout expired while waiting for device'
_WEBVIEW_SYSUPDATE_CURRENT_PKG_RE = re.compile(
r'Current WebView package.*:.*\(([a-z.]*),')
_WEBVIEW_SYSUPDATE_NULL_PKG_RE = re.compile(
r'Current WebView package is null')
_WEBVIEW_SYSUPDATE_FALLBACK_LOGIC_RE = re.compile(
r'Fallback logic enabled: (true|false)')
_WEBVIEW_SYSUPDATE_PACKAGE_INSTALLED_RE = re.compile(
r'(?:Valid|Invalid) package\s+(\S+)\s+\(.*\),?\s+(.*)$')
_WEBVIEW_SYSUPDATE_PACKAGE_NOT_INSTALLED_RE = re.compile(
r'(\S+)\s+(is NOT installed\.)')
_WEBVIEW_SYSUPDATE_MIN_VERSION_CODE = re.compile(
r'Minimum WebView version code: (\d+)')
_GOOGLE_FEATURES_RE = re.compile(r'^\s*com\.google\.')
PS_COLUMNS = ('name', 'pid', 'ppid')
ProcessInfo = collections.namedtuple('ProcessInfo', PS_COLUMNS)
@decorators.WithExplicitTimeoutAndRetries(
_DEFAULT_TIMEOUT, _DEFAULT_RETRIES)
def GetAVDs():
"""Returns a list of Android Virtual Devices.
Returns:
A list containing the configured AVDs.
"""
lines = cmd_helper.GetCmdOutput([
os.path.join(devil_env.config.LocalPath('android_sdk'),
'tools', 'android'),
'list', 'avd']).splitlines()
avds = []
for line in lines:
if 'Name:' not in line:
continue
key, value = (s.strip() for s in line.split(':', 1))
if key == 'Name':
avds.append(value)
return avds
@decorators.WithExplicitTimeoutAndRetries(
_DEFAULT_TIMEOUT, _DEFAULT_RETRIES)
def RestartServer():
"""Restarts the adb server.
Raises:
CommandFailedError if we fail to kill or restart the server.
"""
def adb_killed():
return not adb_wrapper.AdbWrapper.IsServerOnline()
def adb_started():
return adb_wrapper.AdbWrapper.IsServerOnline()
adb_wrapper.AdbWrapper.KillServer()
if not timeout_retry.WaitFor(adb_killed, wait_period=1, max_tries=5):
# TODO(crbug.com/442319): Switch this to raise an exception if we
# figure out why sometimes not all adb servers on bots get killed.
logger.warning('Failed to kill adb server')
adb_wrapper.AdbWrapper.StartServer()
if not timeout_retry.WaitFor(adb_started, wait_period=1, max_tries=5):
raise device_errors.CommandFailedError('Failed to start adb server')
def _ParseModeString(mode_str):
"""Parse a mode string, e.g. 'drwxrwxrwx', into a st_mode value.
Effectively the reverse of |mode_to_string| in, e.g.:
https://github.com/landley/toybox/blob/master/lib/lib.c#L896
"""
if not _FILE_MODE_RE.match(mode_str):
raise ValueError('Unexpected file mode %r', mode_str)
mode = _FILE_MODE_KIND[mode_str[0]]
for c, flag in zip(mode_str[1:], _FILE_MODE_PERMS):
if c != '-' and c.islower():
mode |= flag
for c, (t, flag) in zip(mode_str[3::3], _FILE_MODE_SPECIAL):
if c.lower() == t:
mode |= flag
return mode
def _GetTimeStamp():
"""Return a basic ISO 8601 time stamp with the current local time."""
return time.strftime('%Y%m%dT%H%M%S', time.localtime())
def _JoinLines(lines):
# makes sure that the last line is also terminated, and is more memory
# efficient than first appending an end-line to each line and then joining
# all of them together.
return ''.join(s for line in lines for s in (line, '\n'))
def _CreateAdbWrapper(device):
if isinstance(device, adb_wrapper.AdbWrapper):
return device
else:
return adb_wrapper.AdbWrapper(device)
def _FormatPartialOutputError(output):
lines = output.splitlines() if isinstance(output, basestring) else output
message = ['Partial output found:']
if len(lines) > 11:
message.extend('- %s' % line for line in lines[:5])
message.extend('<snip>')
message.extend('- %s' % line for line in lines[-5:])
else:
message.extend('- %s' % line for line in lines)
return '\n'.join(message)
_PushableComponents = collections.namedtuple(
'_PushableComponents', ('host', 'device', 'collapse'))
def _IterPushableComponents(host_path, device_path):
"""Yields a sequence of paths that can be pushed directly via adb push.
`adb push` doesn't currently handle pushing directories that contain
symlinks: https://bit.ly/2pMBlW5
To circumvent this issue, we get the smallest set of files and/or
directories that can be pushed without attempting to push a directory
that contains a symlink.
This function does so by recursing through |host_path|. Each call
yields 3-tuples that include the smallest set of (host, device) path pairs
that can be passed to adb push and a bool indicating whether the parent
directory can be pushed -- i.e., if True, the host path is neither a
symlink nor a directory that contains a symlink.
Args:
host_path: an absolute path of a file or directory on the host
device_path: an absolute path of a file or directory on the device
Yields:
3-tuples containing
host (str): the host path, with symlinks dereferenced
device (str): the device path
collapse (bool): whether this entity permits its parent to be pushed
in its entirety. (Parents need permission from all child entities
in order to be pushed in their entirety.)
"""
if os.path.isfile(host_path):
yield _PushableComponents(
os.path.realpath(host_path), device_path,
not os.path.islink(host_path))
else:
components = []
for child in os.listdir(host_path):
components.extend(
_IterPushableComponents(
os.path.join(host_path, child),
posixpath.join(device_path, child)))
if all(c.collapse for c in components):
yield _PushableComponents(
os.path.realpath(host_path), device_path,
not os.path.islink(host_path))
else:
for c in components:
yield c
class DeviceUtils(object):
_MAX_ADB_COMMAND_LENGTH = 512
_MAX_ADB_OUTPUT_LENGTH = 32768
_LAUNCHER_FOCUSED_RE = re.compile(
r'\s*mCurrentFocus.*(Launcher|launcher).*')
_VALID_SHELL_VARIABLE = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*$')
LOCAL_PROPERTIES_PATH = posixpath.join('/', 'data', 'local.prop')
# Property in /data/local.prop that controls Java assertions.
JAVA_ASSERT_PROPERTY = 'dalvik.vm.enableassertions'
def __init__(self, device, enable_device_files_cache=False,
default_timeout=_DEFAULT_TIMEOUT,
default_retries=_DEFAULT_RETRIES):
"""DeviceUtils constructor.
Args:
device: Either a device serial, an existing AdbWrapper instance, or an
an existing AndroidCommands instance.
enable_device_files_cache: For PushChangedFiles(), cache checksums of
pushed files rather than recomputing them on a subsequent call.
default_timeout: An integer containing the default number of seconds to
wait for an operation to complete if no explicit value is provided.
default_retries: An integer containing the default number or times an
operation should be retried on failure if no explicit value is provided.
"""
self.adb = None
if isinstance(device, basestring):
self.adb = _CreateAdbWrapper(device)
elif isinstance(device, adb_wrapper.AdbWrapper):
self.adb = device
else:
raise ValueError('Unsupported device value: %r' % device)
self._commands_installed = None
self._default_timeout = default_timeout
self._default_retries = default_retries
self._enable_device_files_cache = enable_device_files_cache
self._cache = {}
self._client_caches = {}
self._cache_lock = threading.RLock()
assert hasattr(self, decorators.DEFAULT_TIMEOUT_ATTR)
assert hasattr(self, decorators.DEFAULT_RETRIES_ATTR)
self.ClearCache()
@property
def serial(self):
"""Returns the device serial."""
return self.adb.GetDeviceSerial()
def __eq__(self, other):
"""Checks whether |other| refers to the same device as |self|.
Args:
other: The object to compare to. This can be a basestring, an instance
of adb_wrapper.AdbWrapper, or an instance of DeviceUtils.
Returns:
Whether |other| refers to the same device as |self|.
"""
return self.serial == str(other)
def __lt__(self, other):
"""Compares two instances of DeviceUtils.
This merely compares their serial numbers.
Args:
other: The instance of DeviceUtils to compare to.
Returns:
Whether |self| is less than |other|.
"""
return self.serial < other.serial
def __str__(self):
"""Returns the device serial."""
return self.serial
@decorators.WithTimeoutAndRetriesFromInstance()
def IsOnline(self, timeout=None, retries=None):
"""Checks whether the device is online.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
True if the device is online, False otherwise.
Raises:
CommandTimeoutError on timeout.
"""
try:
return self.adb.GetState() == 'device'
except base_error.BaseError as exc:
logger.info('Failed to get state: %s', exc)
return False
@decorators.WithTimeoutAndRetriesFromInstance()
def HasRoot(self, timeout=None, retries=None):
"""Checks whether or not adbd has root privileges.
A device is considered to have root if all commands are implicitly run
with elevated privileges, i.e. without having to use "su" to run them.
Note that some devices do not allow this implicit privilige elevation,
but _can_ run commands as root just fine when done explicitly with "su".
To check if your device can run commands with elevated privileges at all
use:
device.HasRoot() or device.NeedsSU()
Luckily, for the most part you don't need to worry about this and using
RunShellCommand(cmd, as_root=True) will figure out for you the right
command incantation to run with elevated privileges.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
True if adbd has root privileges, False otherwise.
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
try:
if self.build_type == 'eng':
# 'eng' builds have root enabled by default and the adb session cannot
# be unrooted.
return True
if self.product_name in _SPECIAL_ROOT_DEVICE_LIST:
return self.GetProp('service.adb.root') == '1'
self.RunShellCommand(['ls', '/root'], check_return=True)
return True
except device_errors.AdbCommandFailedError:
return False
def NeedsSU(self, timeout=DEFAULT, retries=DEFAULT):
"""Checks whether 'su' is needed to access protected resources.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
True if 'su' is available on the device and is needed to to access
protected resources; False otherwise if either 'su' is not available
(e.g. because the device has a user build), or not needed (because adbd
already has root privileges).
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
if 'needs_su' not in self._cache:
cmd = '%s && ! ls /root' % self._Su('ls /root')
if self.product_name in _SPECIAL_ROOT_DEVICE_LIST:
if self.HasRoot():
self._cache['needs_su'] = False
return False
cmd = 'which which && which su'
try:
self.RunShellCommand(cmd, shell=True, check_return=True,
timeout=self._default_timeout if timeout is DEFAULT else timeout,
retries=self._default_retries if retries is DEFAULT else retries)
self._cache['needs_su'] = True
except device_errors.AdbCommandFailedError:
self._cache['needs_su'] = False
return self._cache['needs_su']
def _Su(self, command):
if self.build_version_sdk >= version_codes.MARSHMALLOW:
return 'su 0 %s' % command
return 'su -c %s' % command
@decorators.WithTimeoutAndRetriesFromInstance()
def EnableRoot(self, timeout=None, retries=None):
"""Restarts adbd with root privileges.
Args:
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError if root could not be enabled.
CommandTimeoutError on timeout.
"""
if 'needs_su' in self._cache:
del self._cache['needs_su']
try:
self.adb.Root()
except device_errors.AdbCommandFailedError as e:
if self.IsUserBuild():
raise device_errors.CommandFailedError(
'Unable to root device with user build.', str(self))
elif e.output and _WAIT_FOR_DEVICE_TIMEOUT_STR in e.output:
# adb 1.0.41 added a call to wait-for-device *inside* root
# with a timeout that can be too short in some cases.
# If we hit that timeout, ignore it & do our own wait below.
pass
else:
raise # Failed probably due to some other reason.
def device_online_with_root():
try:
self.adb.WaitForDevice()
return self.HasRoot()
except (device_errors.AdbCommandFailedError,
device_errors.DeviceUnreachableError):
return False
timeout_retry.WaitFor(device_online_with_root, wait_period=1)
@decorators.WithTimeoutAndRetriesFromInstance()
def IsUserBuild(self, timeout=None, retries=None):
"""Checks whether or not the device is running a user build.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
True if the device is running a user build, False otherwise (i.e. if
it's running a userdebug build).
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
return self.build_type == 'user'
@decorators.WithTimeoutAndRetriesFromInstance()
def GetExternalStoragePath(self, timeout=None, retries=None):
"""Get the device's path to its SD card.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
The device's path to its SD card.
Raises:
CommandFailedError if the external storage path could not be determined.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
self._EnsureCacheInitialized()
if not self._cache['external_storage']:
raise device_errors.CommandFailedError('$EXTERNAL_STORAGE is not set',
str(self))
return self._cache['external_storage']
@decorators.WithTimeoutAndRetriesFromInstance()
def GetIMEI(self, timeout=None, retries=None):
"""Get the device's IMEI.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
The device's IMEI.
Raises:
AdbCommandFailedError on error
"""
if self._cache.get('imei') is not None:
return self._cache.get('imei')
if self.build_version_sdk < 21:
out = self.RunShellCommand(['dumpsys', 'iphonesubinfo'],
raw_output=True, check_return=True)
if out:
match = re.search(_IMEI_RE, out)
if match:
self._cache['imei'] = match.group(1)
return self._cache['imei']
else:
out = self.RunShellCommand(['service', 'call', 'iphonesubinfo', '1'],
check_return=True)
if out:
imei = ''
for line in out:
match = re.search(_PARCEL_RESULT_RE, line)
if match:
imei = imei + match.group(1)
imei = imei.replace('.', '').strip()
if imei:
self._cache['imei'] = imei
return self._cache['imei']
raise device_errors.CommandFailedError('Unable to fetch IMEI.')
@decorators.WithTimeoutAndRetriesFromInstance()
def IsApplicationInstalled(self, package, timeout=None, retries=None):
"""Determines whether a particular package is installed on the device.
Args:
package: Name of the package.
Returns:
True if the application is installed, False otherwise.
"""
# `pm list packages` allows matching substrings, but we want exact matches
# only.
matching_packages = self.RunShellCommand(
['pm', 'list', 'packages', package], check_return=True)
desired_line = 'package:' + package
return desired_line in matching_packages
@decorators.WithTimeoutAndRetriesFromInstance()
def GetApplicationPaths(self, package, timeout=None, retries=None):
"""Get the paths of the installed apks on the device for the given package.
Args:
package: Name of the package.
Returns:
List of paths to the apks on the device for the given package.
"""
return self._GetApplicationPathsInternal(package)
def _GetApplicationPathsInternal(self, package, skip_cache=False):
cached_result = self._cache['package_apk_paths'].get(package)
if cached_result is not None and not skip_cache:
if package in self._cache['package_apk_paths_to_verify']:
self._cache['package_apk_paths_to_verify'].remove(package)
# Don't verify an app that is not thought to be installed. We are
# concerned only with apps we think are installed having been
# uninstalled manually.
if cached_result and not self.PathExists(cached_result):
cached_result = None
self._cache['package_apk_checksums'].pop(package, 0)
if cached_result is not None:
return list(cached_result)
# 'pm path' is liable to incorrectly exit with a nonzero number starting
# in Lollipop.
# TODO(jbudorick): Check if this is fixed as new Android versions are
# released to put an upper bound on this.
should_check_return = (self.build_version_sdk < version_codes.LOLLIPOP)
output = self.RunShellCommand(
['pm', 'path', package], check_return=should_check_return)
apks = []
bad_output = False
for line in output:
if line.startswith('package:'):
apks.append(line[len('package:'):])
elif line.startswith('WARNING:'):
continue
else:
bad_output = True # Unexpected line in output.
if not apks and output:
if bad_output:
raise device_errors.CommandFailedError(
'Unexpected pm path output: %r' % '\n'.join(output), str(self))
else:
logger.warning('pm returned no paths but the following warnings:')
for line in output:
logger.warning('- %s', line)
self._cache['package_apk_paths'][package] = list(apks)
return apks
@decorators.WithTimeoutAndRetriesFromInstance()
def GetApplicationVersion(self, package, timeout=None, retries=None):
"""Get the version name of a package installed on the device.
Args:
package: Name of the package.
Returns:
A string with the version name or None if the package is not found
on the device.
"""
output = self.RunShellCommand(
['dumpsys', 'package', package], check_return=True)
if not output:
return None
for line in output:
line = line.strip()
if line.startswith('versionName='):
return line[len('versionName='):]
raise device_errors.CommandFailedError(
'Version name for %s not found on dumpsys output' % package, str(self))
@decorators.WithTimeoutAndRetriesFromInstance()
def GetPackageArchitecture(self, package, timeout=None, retries=None):
"""Get the architecture of a package installed on the device.
Args:
package: Name of the package.
Returns:
A string with the architecture, or None if the package is missing.
"""
lines = self._GetDumpsysOutput(['package', package], 'primaryCpuAbi')
if lines:
_, _, package_arch = lines[-1].partition('=')
return package_arch.strip()
return None
@decorators.WithTimeoutAndRetriesFromInstance()
def GetApplicationDataDirectory(self, package, timeout=None, retries=None):
"""Get the data directory on the device for the given package.
Args:
package: Name of the package.
Returns:
The package's data directory.
Raises:
CommandFailedError if the package's data directory can't be found,
whether because it's not installed or otherwise.
"""
output = self._RunPipedShellCommand(
'pm dump %s | grep dataDir=' % cmd_helper.SingleQuote(package))
for line in output:
_, _, dataDir = line.partition('dataDir=')
if dataDir:
return dataDir
raise device_errors.CommandFailedError(
'Could not find data directory for %s', package)
@decorators.WithTimeoutAndRetriesFromInstance()
def GetSecurityContextForPackage(self, package, encrypted=False, timeout=None,
retries=None):
"""Gets the SELinux security context for the given package.
Args:
package: Name of the package.
encrypted: Whether to check in the encrypted data directory
(/data/user_de/0/) or the unencrypted data directory (/data/data/).
Returns:
The package's security context as a string, or None if not found.
"""
directory = '/data/user_de/0/' if encrypted else '/data/data/'
for line in self.RunShellCommand(['ls', '-Z', directory],
as_root=True, check_return=True):
split_line = line.split()
# ls -Z output differs between Android versions, but the package is
# always last and the context always starts with "u:object"
if split_line[-1] == package:
for column in split_line:
if column.startswith('u:object'):
return column
return None
def TakeBugReport(self, path, timeout=60*5, retries=None):
"""Takes a bug report and dumps it to the specified path.
This doesn't use adb's bugreport option since its behavior is dependent on
both adb version and device OS version. To make it simpler, this directly
runs the bugreport command on the device itself and dumps the stdout to a
file.
Args:
path: Path on the host to drop the bug report.
timeout: (optional) Timeout per try in seconds.
retries: (optional) Number of retries to attempt.
"""
with device_temp_file.DeviceTempFile(self.adb) as device_tmp_file:
cmd = '( bugreport )>%s 2>&1' % device_tmp_file.name
self.RunShellCommand(
cmd, check_return=True, shell=True, timeout=timeout, retries=retries)
self.PullFile(device_tmp_file.name, path)
@decorators.WithTimeoutAndRetriesFromInstance()
def WaitUntilFullyBooted(self, wifi=False, timeout=None, retries=None):
"""Wait for the device to fully boot.
This means waiting for the device to boot, the package manager to be
available, and the SD card to be ready. It can optionally mean waiting
for wifi to come up, too.
Args:
wifi: A boolean indicating if we should wait for wifi to come up or not.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError on failure.
CommandTimeoutError if one of the component waits times out.
DeviceUnreachableError if the device becomes unresponsive.
"""
def sd_card_ready():
try:
self.RunShellCommand(['test', '-d', self.GetExternalStoragePath()],
check_return=True)
return True
except device_errors.AdbCommandFailedError:
return False
def pm_ready():
try:
return self._GetApplicationPathsInternal('android', skip_cache=True)
except device_errors.CommandFailedError:
return False
def boot_completed():
try:
return self.GetProp('sys.boot_completed', cache=False) == '1'
except device_errors.CommandFailedError:
return False
def wifi_enabled():
return 'Wi-Fi is enabled' in self.RunShellCommand(['dumpsys', 'wifi'],
check_return=False)
self.adb.WaitForDevice()
timeout_retry.WaitFor(sd_card_ready)
timeout_retry.WaitFor(pm_ready)
timeout_retry.WaitFor(boot_completed)
if wifi:
timeout_retry.WaitFor(wifi_enabled)
REBOOT_DEFAULT_TIMEOUT = 10 * _DEFAULT_TIMEOUT
@decorators.WithTimeoutAndRetriesFromInstance(
min_default_timeout=REBOOT_DEFAULT_TIMEOUT)
def Reboot(self, block=True, wifi=False, timeout=None, retries=None):
"""Reboot the device.
Args:
block: A boolean indicating if we should wait for the reboot to complete.
wifi: A boolean indicating if we should wait for wifi to be enabled after
the reboot. The option has no effect unless |block| is also True.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
def device_offline():
return not self.IsOnline()
self.adb.Reboot()
self.ClearCache()
timeout_retry.WaitFor(device_offline, wait_period=1)
if block:
self.WaitUntilFullyBooted(wifi=wifi)
INSTALL_DEFAULT_TIMEOUT = 8 * _DEFAULT_TIMEOUT
MODULES_SRC_DIRECTORY_PATH = '/data/local/tmp/modules'
@decorators.WithTimeoutAndRetriesFromInstance(
min_default_timeout=INSTALL_DEFAULT_TIMEOUT)
def Install(self, apk, allow_downgrade=False, reinstall=False,
permissions=None, timeout=None, retries=None, modules=None,
fake_modules=None):
"""Install an APK or app bundle.
Noop if an identical APK is already installed. If installing a bundle, the
bundletools helper script (bin/*_bundle) should be used rather than the .aab
file.
Args:
apk: An ApkHelper instance or string containing the path to the APK or
bundle.
allow_downgrade: A boolean indicating if we should allow downgrades.
reinstall: A boolean indicating if we should keep any existing app data.
Ignored if |apk| is a bundle.
permissions: Set of permissions to set. If not set, finds permissions with
apk helper. To set no permissions, pass [].
timeout: timeout in seconds
retries: number of retries
modules: An iterable containing specific bundle modules to install.
Error if set and |apk| points to an APK instead of a bundle.
fake_modules: An iterable containing specific bundle modules that
should have their apks copied to |MODULES_SRC_DIRECTORY_PATH| rather
than installed. Thus the app can emulate SplitCompat while running.
This should not have any overlap with |modules|.
Raises:
CommandFailedError if the installation fails.
CommandTimeoutError if the installation times out.
DeviceUnreachableError on missing device.
"""
apk = apk_helper.ToHelper(apk)
modules_set = set(modules or [])
fake_modules_set = set(fake_modules or [])
assert modules_set.isdisjoint(fake_modules_set), (
'These modules overlap: %s' % (modules_set & fake_modules_set))
all_modules = modules_set | fake_modules_set
with apk.GetApkPaths(self, modules=all_modules) as apk_paths:
fake_apk_paths = self._GetFakeInstallPaths(apk_paths, fake_modules)
apk_paths_to_install = [p for p in apk_paths if p not in fake_apk_paths]
self._FakeInstall(fake_apk_paths, fake_modules)
self._InstallInternal(
apk,
apk_paths_to_install,
allow_downgrade=allow_downgrade,
reinstall=reinstall,
permissions=permissions)
@staticmethod
def _GetFakeInstallPaths(apk_paths, fake_modules):
def IsFakeModulePath(path):
filename = os.path.basename(path)
return any(filename.startswith(f + '-') for f in fake_modules)
if not fake_modules:
return set()
return set(p for p in apk_paths if IsFakeModulePath(p))
def _FakeInstall(self, fake_apk_paths, fake_modules):
with tempfile_ext.NamedTemporaryDirectory() as modules_dir:
if not fake_modules:
# Push empty module dir to clear device dir and update the cache.
self.PushChangedFiles([(modules_dir, self.MODULES_SRC_DIRECTORY_PATH)],
delete_device_stale=True)
return
still_need_master = set(fake_modules)
for path in fake_apk_paths:
filename = os.path.basename(path)
# Example names: base-en.apk, test_dummy-master.apk.
module_name, suffix = filename.split('-', 1)
if 'master' in suffix:
assert module_name in still_need_master, (
'Duplicate master apk file for %s' % module_name)
still_need_master.remove(module_name)
new_filename = '%s.apk' % module_name
else:
# |suffix| includes .apk extension.
new_filename = '%s.config.%s' % (module_name, suffix)
new_path = os.path.join(modules_dir, new_filename)
os.rename(path, new_path)
assert not still_need_master, (
'Missing master apk file for %s' % still_need_master)
self.PushChangedFiles([(modules_dir, self.MODULES_SRC_DIRECTORY_PATH)],
delete_device_stale=True)
@decorators.WithTimeoutAndRetriesFromInstance(
min_default_timeout=INSTALL_DEFAULT_TIMEOUT)
def InstallSplitApk(self, base_apk, split_apks, allow_downgrade=False,
reinstall=False, allow_cached_props=False,
permissions=None, timeout=None, retries=None):
"""Install a split APK.
Noop if all of the APK splits are already installed.
Args:
base_apk: An ApkHelper instance or string containing the path to the base
APK.
split_apks: A list of strings of paths of all of the APK splits.
allow_downgrade: A boolean indicating if we should allow downgrades.
reinstall: A boolean indicating if we should keep any existing app data.
allow_cached_props: Whether to use cached values for device properties.
permissions: Set of permissions to set. If not set, finds permissions with
apk helper. To set no permissions, pass [].
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError if the installation fails.
CommandTimeoutError if the installation times out.
DeviceUnreachableError on missing device.
DeviceVersionError if device SDK is less than Android L.
"""
apk = apk_helper.ToSplitHelper(base_apk, split_apks)
with apk.GetApkPaths(
self, allow_cached_props=allow_cached_props) as apk_paths:
self._InstallInternal(
apk,
apk_paths,
reinstall=reinstall,
permissions=permissions,
allow_downgrade=allow_downgrade)
def _InstallInternal(self,
apk,
apk_paths,
allow_downgrade=False,
reinstall=False,
permissions=None):
if not apk_paths:
raise device_errors.CommandFailedError('Did not get any APKs to install')
if len(apk_paths) > 1:
self._CheckSdkLevel(version_codes.LOLLIPOP)
missing_apks = [a for a in apk_paths if not os.path.exists(a)]
if missing_apks:
raise device_errors.CommandFailedError(
'Attempted to install non-existent apks: %s'
% pprint.pformat(missing_apks))
package_name = apk.GetPackageName()
device_apk_paths = self._GetApplicationPathsInternal(package_name)
host_checksums = None
if not device_apk_paths:
apks_to_install = apk_paths
elif len(device_apk_paths) > 1 and len(apk_paths) == 1:
logger.warning(
'Installing non-split APK when split APK was previously installed')
apks_to_install = apk_paths
elif len(device_apk_paths) == 1 and len(apk_paths) > 1:
logger.warning(
'Installing split APK when non-split APK was previously installed')
apks_to_install = apk_paths
else:
try:
apks_to_install, host_checksums = (
self._ComputeStaleApks(package_name, apk_paths))
except EnvironmentError as e:
logger.warning('Error calculating md5: %s', e)
apks_to_install, host_checksums = apk_paths, None
if apks_to_install and not reinstall:
apks_to_install = apk_paths
if device_apk_paths and apks_to_install and not reinstall:
self.Uninstall(package_name)
if apks_to_install:
# Assume that we won't know the resulting device state.
self._cache['package_apk_paths'].pop(package_name, 0)
self._cache['package_apk_checksums'].pop(package_name, 0)
partial = package_name if len(apks_to_install) < len(apk_paths) else None
if len(apks_to_install) > 1 or partial:
self.adb.InstallMultiple(
apks_to_install, partial=partial, reinstall=reinstall,
allow_downgrade=allow_downgrade)
else:
self.adb.Install(
apks_to_install[0],
reinstall=reinstall,
allow_downgrade=allow_downgrade)
else:
# Running adb install terminates running instances of the app, so to be
# consistent, we explicitly terminate it when skipping the install.
self.ForceStop(package_name)
if (permissions is None
and self.build_version_sdk >= version_codes.MARSHMALLOW):
permissions = apk.GetPermissions()
self.GrantPermissions(package_name, permissions)
# Upon success, we know the device checksums, but not their paths.
if host_checksums is not None:
self._cache['package_apk_checksums'][package_name] = host_checksums
@decorators.WithTimeoutAndRetriesFromInstance()
def Uninstall(self, package_name, keep_data=False, timeout=None,
retries=None):
"""Remove the app |package_name| from the device.
This is a no-op if the app is not already installed.
Args:
package_name: The package to uninstall.
keep_data: (optional) Whether to keep the data and cache directories.
timeout: Timeout in seconds.
retries: Number of retries.
Raises:
CommandFailedError if the uninstallation fails.
CommandTimeoutError if the uninstallation times out.
DeviceUnreachableError on missing device.
"""
installed = self._GetApplicationPathsInternal(package_name)
if not installed:
return
# cached package paths are indeterminate due to system apps taking over
# user apps after uninstall, so clear it
self._cache['package_apk_paths'].pop(package_name, 0)
self._cache['package_apk_checksums'].pop(package_name, 0)
self.adb.Uninstall(package_name, keep_data)
def _CheckSdkLevel(self, required_sdk_level):
"""Raises an exception if the device does not have the required SDK level.
"""
if self.build_version_sdk < required_sdk_level:
raise device_errors.DeviceVersionError(
('Requires SDK level %s, device is SDK level %s' %
(required_sdk_level, self.build_version_sdk)),
device_serial=self.serial)
@decorators.WithTimeoutAndRetriesFromInstance()
def RunShellCommand(self, cmd, shell=False, check_return=False, cwd=None,
env=None, run_as=None, as_root=False, single_line=False,
large_output=False, raw_output=False, timeout=None,
retries=None):
"""Run an ADB shell command.
The command to run |cmd| should be a sequence of program arguments
(preferred) or a single string with a shell script to run.
When |cmd| is a sequence, it is assumed to contain the name of the command
to run followed by its arguments. In this case, arguments are passed to the
command exactly as given, preventing any further processing by the shell.
This allows callers to easily pass arguments with spaces or special
characters without having to worry about quoting rules. Whenever possible,
it is recomended to pass |cmd| as a sequence.
When |cmd| is passed as a single string, |shell| should be set to True.
The command will be interpreted and run by the shell on the device,
allowing the use of shell features such as pipes, wildcards, or variables.
Failing to set shell=True will issue a warning, but this will be changed
to a hard failure in the future (see: catapult:#3242).
This behaviour is consistent with that of command runners in cmd_helper as
well as Python's own subprocess.Popen.
TODO(crbug.com/1029769) Change the default of |check_return| to True when
callers have switched to the new behaviour.
Args:
cmd: A sequence containing the command to run and its arguments, or a
string with a shell script to run (should also set shell=True).
shell: A boolean indicating whether shell features may be used in |cmd|.
check_return: A boolean indicating whether or not the return code should
be checked.
cwd: The device directory in which the command should be run.
env: The environment variables with which the command should be run.
run_as: A string containing the package as which the command should be
run.
as_root: A boolean indicating whether the shell command should be run
with root privileges.
single_line: A boolean indicating if only a single line of output is
expected.
large_output: Uses a work-around for large shell command output. Without
this large output will be truncated.
raw_output: Whether to only return the raw output
(no splitting into lines).
timeout: timeout in seconds
retries: number of retries
Returns:
If single_line is False, the output of the command as a list of lines,
otherwise, a string with the unique line of output emmited by the command
(with the optional newline at the end stripped).
Raises:
AdbCommandFailedError if check_return is True and the exit code of
the command run on the device is non-zero.
CommandFailedError if single_line is True but the output contains two or
more lines.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
def env_quote(key, value):
if not DeviceUtils._VALID_SHELL_VARIABLE.match(key):
raise KeyError('Invalid shell variable name %r' % key)
# using double quotes here to allow interpolation of shell variables
return '%s=%s' % (key, cmd_helper.DoubleQuote(value))
def run(cmd):
return self.adb.Shell(cmd)
def handle_check_return(cmd):
try:
return run(cmd)
except device_errors.AdbCommandFailedError as exc:
if check_return:
raise
else:
return exc.output
def handle_large_command(cmd):
if len(cmd) < self._MAX_ADB_COMMAND_LENGTH:
return handle_check_return(cmd)
else:
with device_temp_file.DeviceTempFile(self.adb, suffix='.sh') as script:
self._WriteFileWithPush(script.name, cmd)
logger.info('Large shell command will be run from file: %s ...',
cmd[:self._MAX_ADB_COMMAND_LENGTH])
return handle_check_return('sh %s' % script.name_quoted)
def handle_large_output(cmd, large_output_mode):
if large_output_mode:
with device_temp_file.DeviceTempFile(self.adb) as large_output_file:
large_output_cmd = '( %s )>%s 2>&1' % (cmd, large_output_file.name)
logger.debug('Large output mode enabled. Will write output to '
'device and read results from file.')
try:
handle_large_command(large_output_cmd)
return self.ReadFile(large_output_file.name, force_pull=True)
except device_errors.AdbShellCommandFailedError as exc:
output = self.ReadFile(large_output_file.name, force_pull=True)
raise device_errors.AdbShellCommandFailedError(
cmd, output, exc.status, exc.device_serial)
else:
try:
return handle_large_command(cmd)
except device_errors.AdbCommandFailedError as exc:
if exc.status is None:
logger.error(_FormatPartialOutputError(exc.output))
logger.warning('Attempting to run in large_output mode.')
logger.warning('Use RunShellCommand(..., large_output=True) for '
'shell commands that expect a lot of output.')
return handle_large_output(cmd, True)
else:
raise
if isinstance(cmd, basestring):
if not shell:
# TODO(crbug.com/1029769): Make this an error instead.
logger.warning(
'The command to run should preferably be passed as a sequence of'
' args. If shell features are needed (pipes, wildcards, variables)'
' clients should explicitly set shell=True.')
else:
cmd = ' '.join(cmd_helper.SingleQuote(s) for s in cmd)
if env:
env = ' '.join(env_quote(k, v) for k, v in env.iteritems())
cmd = '%s %s' % (env, cmd)
if cwd:
cmd = 'cd %s && %s' % (cmd_helper.SingleQuote(cwd), cmd)
if run_as:
cmd = 'run-as %s sh -c %s' % (cmd_helper.SingleQuote(run_as),
cmd_helper.SingleQuote(cmd))
if (as_root is _FORCE_SU) or (as_root and self.NeedsSU()):
# "su -c sh -c" allows using shell features in |cmd|
cmd = self._Su('sh -c %s' % cmd_helper.SingleQuote(cmd))
output = handle_large_output(cmd, large_output)
if raw_output:
return output
output = output.splitlines()
if single_line:
if not output:
return ''
elif len(output) == 1:
return output[0]
else:
msg = 'one line of output was expected, but got: %s'
raise device_errors.CommandFailedError(msg % output, str(self))
else:
return output
def _RunPipedShellCommand(self, script, **kwargs):
PIPESTATUS_LEADER = 'PIPESTATUS: '
script += '; echo "%s${PIPESTATUS[@]}"' % PIPESTATUS_LEADER
kwargs.update(shell=True, check_return=True)
output = self.RunShellCommand(script, **kwargs)
pipestatus_line = output[-1]
if not pipestatus_line.startswith(PIPESTATUS_LEADER):
logger.error('Pipe exit statuses of shell script missing.')
raise device_errors.AdbShellCommandFailedError(
script, output, status=None,
device_serial=self.serial)
output = output[:-1]
statuses = [
int(s) for s in pipestatus_line[len(PIPESTATUS_LEADER):].split()]
if any(statuses):
raise device_errors.AdbShellCommandFailedError(
script, output, status=statuses,
device_serial=self.serial)
return output
@decorators.WithTimeoutAndRetriesFromInstance()
def KillAll(self, process_name, exact=False, signum=device_signal.SIGKILL,
as_root=False, blocking=False, quiet=False,
timeout=None, retries=None):
"""Kill all processes with the given name on the device.
Args:
process_name: A string containing the name of the process to kill.
exact: A boolean indicating whether to kill all processes matching
the string |process_name| exactly, or all of those which contain
|process_name| as a substring. Defaults to False.
signum: An integer containing the signal number to send to kill. Defaults
to SIGKILL (9).
as_root: A boolean indicating whether the kill should be executed with
root privileges.
blocking: A boolean indicating whether we should wait until all processes
with the given |process_name| are dead.
quiet: A boolean indicating whether to ignore the fact that no processes
to kill were found.
timeout: timeout in seconds
retries: number of retries
Returns:
The number of processes attempted to kill.
Raises:
CommandFailedError if no process was killed and |quiet| is False.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
processes = self.ListProcesses(process_name)
if exact:
processes = [p for p in processes if p.name == process_name]
if not processes:
if quiet:
return 0
else:
raise device_errors.CommandFailedError(
'No processes matching %r (exact=%r)' % (process_name, exact),
str(self))
logger.info(
'KillAll(%r, ...) attempting to kill the following:', process_name)
for p in processes:
logger.info(' %05d %s', p.pid, p.name)
pids = set(p.pid for p in processes)
cmd = ['kill', '-%d' % signum] + sorted(str(p) for p in pids)
self.RunShellCommand(cmd, as_root=as_root, check_return=True)
def all_pids_killed():
pids_left = (p.pid for p in self.ListProcesses(process_name))
return not pids.intersection(pids_left)
if blocking:
timeout_retry.WaitFor(all_pids_killed, wait_period=0.1)
return len(pids)
@decorators.WithTimeoutAndRetriesFromInstance()
def StartActivity(self, intent_obj, blocking=False, trace_file_name=None,
force_stop=False, timeout=None, retries=None):
"""Start package's activity on the device.
Args:
intent_obj: An Intent object to send.
blocking: A boolean indicating whether we should wait for the activity to
finish launching.
trace_file_name: If present, a string that both indicates that we want to
profile the activity and contains the path to which the
trace should be saved.
force_stop: A boolean indicating whether we should stop the activity
before starting it.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError if the activity could not be started.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
cmd = ['am', 'start']
if blocking:
cmd.append('-W')
if trace_file_name:
cmd.extend(['--start-profiler', trace_file_name])
if force_stop:
cmd.append('-S')
cmd.extend(intent_obj.am_args)
for line in self.RunShellCommand(cmd, check_return=True):
if line.startswith('Error:'):
raise device_errors.CommandFailedError(line, str(self))
@decorators.WithTimeoutAndRetriesFromInstance()
def StartService(self, intent_obj, user_id=None, timeout=None, retries=None):
"""Start a service on the device.
Args:
intent_obj: An Intent object to send describing the service to start.
user_id: A specific user to start the service as, defaults to current.
timeout: Timeout in seconds.
retries: Number of retries
Raises:
CommandFailedError if the service could not be started.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
# For whatever reason, startservice was changed to start-service on O and
# above.
cmd = ['am', 'startservice']
if self.build_version_sdk >= version_codes.OREO:
cmd[1] = 'start-service'
if user_id:
cmd.extend(['--user', str(user_id)])
cmd.extend(intent_obj.am_args)
for line in self.RunShellCommand(cmd, check_return=True):
if line.startswith('Error:'):
raise device_errors.CommandFailedError(line, str(self))
@decorators.WithTimeoutAndRetriesFromInstance()
def StartInstrumentation(self, component, finish=True, raw=False,
extras=None, timeout=None, retries=None):
if extras is None:
extras = {}
cmd = ['am', 'instrument']
if finish:
cmd.append('-w')
if raw:
cmd.append('-r')
for k, v in extras.iteritems():
cmd.extend(['-e', str(k), str(v)])
cmd.append(component)
# Store the package name in a shell variable to help the command stay under
# the _MAX_ADB_COMMAND_LENGTH limit.
package = component.split('/')[0]
shell_snippet = 'p=%s;%s' % (package,
cmd_helper.ShrinkToSnippet(cmd, 'p', package))
return self.RunShellCommand(shell_snippet, shell=True, check_return=True,
large_output=True)
@decorators.WithTimeoutAndRetriesFromInstance()
def BroadcastIntent(self, intent_obj, timeout=None, retries=None):
"""Send a broadcast intent.
Args:
intent: An Intent to broadcast.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
cmd = ['am', 'broadcast'] + intent_obj.am_args
self.RunShellCommand(cmd, check_return=True)
@decorators.WithTimeoutAndRetriesFromInstance()
def GoHome(self, timeout=None, retries=None):
"""Return to the home screen and obtain launcher focus.
This command launches the home screen and attempts to obtain
launcher focus until the timeout is reached.
Args:
timeout: timeout in seconds
retries: number of retries
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
def is_launcher_focused():
output = self.RunShellCommand(['dumpsys', 'window', 'windows'],
check_return=True, large_output=True)
return any(self._LAUNCHER_FOCUSED_RE.match(l) for l in output)
def dismiss_popups():
# There is a dialog present; attempt to get rid of it.
# Not all dialogs can be dismissed with back.
self.SendKeyEvent(keyevent.KEYCODE_ENTER)
self.SendKeyEvent(keyevent.KEYCODE_BACK)
return is_launcher_focused()
# If Home is already focused, return early to avoid unnecessary work.
if is_launcher_focused():
return
self.StartActivity(
intent.Intent(action='android.intent.action.MAIN',
category='android.intent.category.HOME'),
blocking=True)
if not is_launcher_focused():
timeout_retry.WaitFor(dismiss_popups, wait_period=1)
@decorators.WithTimeoutAndRetriesFromInstance()
def ForceStop(self, package, timeout=None, retries=None):
"""Close the application.
Args:
package: A string containing the name of the package to stop.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
if self.GetApplicationPids(package):
self.RunShellCommand(['am', 'force-stop', package], check_return=True)
@decorators.WithTimeoutAndRetriesFromInstance()
def ClearApplicationState(
self, package, permissions=None, timeout=None, retries=None):
"""Clear all state for the given package.
Args:
package: A string containing the name of the package to stop.
permissions: List of permissions to set after clearing data.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
# Check that the package exists before clearing it for android builds below
# JB MR2. Necessary because calling pm clear on a package that doesn't exist
# may never return.
if ((self.build_version_sdk >= version_codes.JELLY_BEAN_MR2)
or self._GetApplicationPathsInternal(package)):
self.RunShellCommand(['pm', 'clear', package], check_return=True)
self.GrantPermissions(package, permissions)
@decorators.WithTimeoutAndRetriesFromInstance()
def SendKeyEvent(self, keycode, timeout=None, retries=None):
"""Sends a keycode to the device.
See the devil.android.sdk.keyevent module for suitable keycode values.
Args:
keycode: A integer keycode to send to the device.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
self.RunShellCommand(['input', 'keyevent', format(keycode, 'd')],
check_return=True)
PUSH_CHANGED_FILES_DEFAULT_TIMEOUT = 10 * _DEFAULT_TIMEOUT
@decorators.WithTimeoutAndRetriesFromInstance(
min_default_timeout=PUSH_CHANGED_FILES_DEFAULT_TIMEOUT)
def PushChangedFiles(self, host_device_tuples, delete_device_stale=False,
timeout=None, retries=None):
"""Push files to the device, skipping files that don't need updating.
When a directory is pushed, it is traversed recursively on the host and
all files in it are pushed to the device as needed.
Additionally, if delete_device_stale option is True,
files that exist on the device but don't exist on the host are deleted.
Args:
host_device_tuples: A list of (host_path, device_path) tuples, where
|host_path| is an absolute path of a file or directory on the host
that should be minimially pushed to the device, and |device_path| is
an absolute path of the destination on the device.
delete_device_stale: option to delete stale files on device
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError on failure.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
# TODO(crbug.com/1005504): Experiment with this on physical devices after
# upgrading devil's default adb beyond 1.0.39.
# TODO(crbug.com/1020716): disabled as can result in extra directory.
enable_push_sync = False
if enable_push_sync:
try:
self._PushChangedFilesSync(host_device_tuples)
return
except device_errors.AdbVersionError as e:
# If we don't meet the adb requirements, fall back to the previous
# sync-unaware implementation.
logging.warning(str(e))
all_changed_files = []
all_stale_files = []
missing_dirs = set()
cache_commit_funcs = []
for h, d in host_device_tuples:
assert os.path.isabs(h) and posixpath.isabs(d)
h = os.path.realpath(h)
changed_files, up_to_date_files, stale_files, cache_commit_func = (
self._GetChangedAndStaleFiles(h, d, delete_device_stale))
all_changed_files += changed_files
all_stale_files += stale_files
cache_commit_funcs.append(cache_commit_func)
if changed_files and not up_to_date_files and not stale_files:
if os.path.isdir(h):
missing_dirs.add(d)
else:
missing_dirs.add(posixpath.dirname(d))
if delete_device_stale and all_stale_files:
self.RemovePath(all_stale_files, force=True, recursive=True)
if all_changed_files:
if missing_dirs:
try:
self.RunShellCommand(['mkdir', '-p'] + list(missing_dirs),
check_return=True)
except device_errors.AdbShellCommandFailedError as e:
# TODO(crbug.com/739899): This is attempting to diagnose flaky EBUSY
# errors that have been popping up in single-device scenarios.
# Remove it once we've figured out what's causing them and how best
# to handle them.
m = _EBUSY_RE.search(e.output)
if m:
logging.error(
'Hit EBUSY while attempting to make missing directories.')
logging.error('lsof output:')
# Don't check for return below since grep exits with a non-zero when
# no match is found.
for l in self.RunShellCommand(
'lsof | grep %s' % cmd_helper.SingleQuote(m.group(1)),
check_return=False):
logging.error(' %s', l)
raise
self._PushFilesImpl(host_device_tuples, all_changed_files)
for func in cache_commit_funcs:
func()
def _PushChangedFilesSync(self, host_device_tuples):
"""Push changed files via `adb sync`.
Args:
host_device_tuples: Same as PushChangedFiles.
"""
for h, d in host_device_tuples:
for ph, pd, _ in _IterPushableComponents(h, d):
self.adb.Push(ph, pd, sync=True)
def _GetChangedAndStaleFiles(self, host_path, device_path, track_stale=False):
"""Get files to push and delete.
Args:
host_path: an absolute path of a file or directory on the host
device_path: an absolute path of a file or directory on the device
track_stale: whether to bother looking for stale files (slower)
Returns:
a four-element tuple
1st element: a list of (host_files_path, device_files_path) tuples to push
2nd element: a list of host_files_path that are up-to-date
3rd element: a list of stale files under device_path, or [] when
track_stale == False
4th element: a cache commit function.
"""
try:
# Length calculations below assume no trailing /.
host_path = host_path.rstrip('/')
device_path = device_path.rstrip('/')
specific_device_paths = [device_path]
ignore_other_files = not track_stale and os.path.isdir(host_path)
if ignore_other_files:
specific_device_paths = []
for root, _, filenames in os.walk(host_path):
relative_dir = root[len(host_path) + 1:]
specific_device_paths.extend(
posixpath.join(device_path, relative_dir, f) for f in filenames)
def calculate_host_checksums():
return md5sum.CalculateHostMd5Sums([host_path])
def calculate_device_checksums():
if self._enable_device_files_cache:
cache_entry = self._cache['device_path_checksums'].get(device_path)
if cache_entry and cache_entry[0] == ignore_other_files:
return dict(cache_entry[1])
sums = md5sum.CalculateDeviceMd5Sums(specific_device_paths, self)
cache_entry = [ignore_other_files, sums]
self._cache['device_path_checksums'][device_path] = cache_entry
return dict(sums)
host_checksums, device_checksums = reraiser_thread.RunAsync((
calculate_host_checksums,
calculate_device_checksums))
except EnvironmentError as e:
logger.warning('Error calculating md5: %s', e)
return ([(host_path, device_path)], [], [], lambda: 0)
to_push = []
up_to_date = []
to_delete = []
if os.path.isfile(host_path):
host_checksum = host_checksums.get(host_path)
device_checksum = device_checksums.get(device_path)
if host_checksum == device_checksum:
up_to_date.append(host_path)
else:
to_push.append((host_path, device_path))
else:
for host_abs_path, host_checksum in host_checksums.iteritems():
device_abs_path = posixpath.join(
device_path, os.path.relpath(host_abs_path, host_path))
device_checksum = device_checksums.pop(device_abs_path, None)
if device_checksum == host_checksum:
up_to_date.append(host_abs_path)
else:
to_push.append((host_abs_path, device_abs_path))
to_delete = device_checksums.keys()
# We can't rely solely on the checksum approach since it does not catch
# stale directories, which can result in empty directories that cause issues
# during copying in efficient_android_directory_copy.sh. So, find any stale
# directories here so they can be removed in addition to stale files.
if track_stale:
to_delete.extend(self._GetStaleDirectories(host_path, device_path))
def cache_commit_func():
# When host_path is a not a directory, the path.join() call below would
# have an '' as the second argument, causing an unwanted / to be appended.
if os.path.isfile(host_path):
assert len(host_checksums) == 1
new_sums = {device_path: host_checksums[host_path]}
else:
new_sums = {posixpath.join(device_path, path[len(host_path) + 1:]): val
for path, val in host_checksums.iteritems()}
cache_entry = [ignore_other_files, new_sums]
self._cache['device_path_checksums'][device_path] = cache_entry
return (to_push, up_to_date, to_delete, cache_commit_func)
def _GetStaleDirectories(self, host_path, device_path):
"""Gets a list of stale directories on the device.
Args:
host_path: an absolute path of a directory on the host
device_path: an absolute path of a directory on the device
Returns:
A list containing absolute paths to directories on the device that are
considered stale.
"""
def get_device_dirs(path):
directories = set()
command = _RECURSIVE_DIRECTORY_LIST_SCRIPT % cmd_helper.SingleQuote(path)
# We use shell=True to evaluate the command as a script through the shell,
# otherwise RunShellCommand tries to interpret it as the name of a (non
# existent) command to run.
for line in self.RunShellCommand(
command, shell=True, check_return=True):
directories.add(posixpath.relpath(posixpath.normpath(line), path))
return directories
def get_host_dirs(path):
directories = set()
if not os.path.isdir(path):
return directories
for root, _, _ in os.walk(path):
if root != path:
# Strip off the top level directory so we can compare the device and
# host.
directories.add(
os.path.relpath(root, path).replace(os.sep, posixpath.sep))
return directories
host_dirs = get_host_dirs(host_path)
device_dirs = get_device_dirs(device_path)
stale_dirs = device_dirs - host_dirs
return [posixpath.join(device_path, d) for d in stale_dirs]
def _ComputeDeviceChecksumsForApks(self, package_name):
ret = self._cache['package_apk_checksums'].get(package_name)
if ret is None:
device_paths = self._GetApplicationPathsInternal(package_name)
file_to_checksums = md5sum.CalculateDeviceMd5Sums(device_paths, self)
ret = set(file_to_checksums.values())
self._cache['package_apk_checksums'][package_name] = ret
return ret
def _ComputeStaleApks(self, package_name, host_apk_paths):
def calculate_host_checksums():
return md5sum.CalculateHostMd5Sums(host_apk_paths)
def calculate_device_checksums():
return self._ComputeDeviceChecksumsForApks(package_name)
host_checksums, device_checksums = reraiser_thread.RunAsync((
calculate_host_checksums, calculate_device_checksums))
stale_apks = [k for (k, v) in host_checksums.iteritems()
if v not in device_checksums]
return stale_apks, set(host_checksums.values())
def _PushFilesImpl(self, host_device_tuples, files):
if not files:
return
size = sum(host_utils.GetRecursiveDiskUsage(h) for h, _ in files)
file_count = len(files)
dir_size = sum(host_utils.GetRecursiveDiskUsage(h)
for h, _ in host_device_tuples)
dir_file_count = 0
for h, _ in host_device_tuples:
if os.path.isdir(h):
dir_file_count += sum(len(f) for _r, _d, f in os.walk(h))
else:
dir_file_count += 1
push_duration = self._ApproximateDuration(
file_count, file_count, size, False)
dir_push_duration = self._ApproximateDuration(
len(host_device_tuples), dir_file_count, dir_size, False)
zip_duration = self._ApproximateDuration(1, 1, size, True)
if (dir_push_duration < push_duration and dir_push_duration < zip_duration
# TODO(jbudorick): Resume directory pushing once clients have switched
# to 1.0.36-compatible syntax.
and False):
self._PushChangedFilesIndividually(host_device_tuples)
elif push_duration < zip_duration:
self._PushChangedFilesIndividually(files)
elif self._commands_installed is False:
# Already tried and failed to install unzip command.
self._PushChangedFilesIndividually(files)
elif not self._PushChangedFilesZipped(
files, [d for _, d in host_device_tuples]):
self._PushChangedFilesIndividually(files)
def _MaybeInstallCommands(self):
if self._commands_installed is None:
try:
if not install_commands.Installed(self):
install_commands.InstallCommands(self)
self._commands_installed = True
except device_errors.CommandFailedError as e:
logger.warning('unzip not available: %s', str(e))
self._commands_installed = False
return self._commands_installed
@staticmethod
def _ApproximateDuration(adb_calls, file_count, byte_count, is_zipping):
# We approximate the time to push a set of files to a device as:
# t = c1 * a + c2 * f + c3 + b / c4 + b / (c5 * c6), where
# t: total time (sec)
# c1: adb call time delay (sec)
# a: number of times adb is called (unitless)
# c2: push time delay (sec)
# f: number of files pushed via adb (unitless)
# c3: zip time delay (sec)
# c4: zip rate (bytes/sec)
# b: total number of bytes (bytes)
# c5: transfer rate (bytes/sec)
# c6: compression ratio (unitless)
# All of these are approximations.
ADB_CALL_PENALTY = 0.1 # seconds
ADB_PUSH_PENALTY = 0.01 # seconds
ZIP_PENALTY = 2.0 # seconds
ZIP_RATE = 10000000.0 # bytes / second
TRANSFER_RATE = 2000000.0 # bytes / second
COMPRESSION_RATIO = 2.0 # unitless
adb_call_time = ADB_CALL_PENALTY * adb_calls
adb_push_setup_time = ADB_PUSH_PENALTY * file_count
if is_zipping:
zip_time = ZIP_PENALTY + byte_count / ZIP_RATE
transfer_time = byte_count / (TRANSFER_RATE * COMPRESSION_RATIO)
else:
zip_time = 0
transfer_time = byte_count / TRANSFER_RATE
return adb_call_time + adb_push_setup_time + zip_time + transfer_time
def _PushChangedFilesIndividually(self, files):
for h, d in files:
self.adb.Push(h, d)
def _PushChangedFilesZipped(self, files, dirs):
if not self._MaybeInstallCommands():
return False
with tempfile_ext.NamedTemporaryDirectory() as working_dir:
zip_path = os.path.join(working_dir, 'tmp.zip')
try:
zip_utils.WriteZipFile(zip_path, files)
except zip_utils.ZipFailedError:
return False
logger.info('Pushing %d files via .zip of size %d', len(files),
os.path.getsize(zip_path))
self.NeedsSU()
with device_temp_file.DeviceTempFile(
self.adb, suffix='.zip') as device_temp:
self.adb.Push(zip_path, device_temp.name)
quoted_dirs = ' '.join(cmd_helper.SingleQuote(d) for d in dirs)
self.RunShellCommand(
'unzip %s&&chmod -R 777 %s' % (device_temp.name, quoted_dirs),
shell=True, as_root=True,
env={'PATH': '%s:$PATH' % install_commands.BIN_DIR},
check_return=True)
return True
# TODO(nednguyen): remove this and migrate the callsite to PathExists().
@decorators.WithTimeoutAndRetriesFromInstance()
def FileExists(self, device_path, timeout=None, retries=None):
"""Checks whether the given file exists on the device.
Arguments are the same as PathExists.
"""
return self.PathExists(device_path, timeout=timeout, retries=retries)
@decorators.WithTimeoutAndRetriesFromInstance()
def PathExists(self, device_paths, as_root=False, timeout=None, retries=None):
"""Checks whether the given path(s) exists on the device.
Args:
device_path: A string containing the absolute path to the file on the
device, or an iterable of paths to check.
as_root: Whether root permissions should be use to check for the existence
of the given path(s).
timeout: timeout in seconds
retries: number of retries
Returns:
True if the all given paths exist on the device, False otherwise.
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
paths = device_paths
if isinstance(paths, basestring):
paths = (paths,)
if not paths:
return True
cmd = ['test', '-e', paths[0]]
for p in paths[1:]:
cmd.extend(['-a', '-e', p])
try:
self.RunShellCommand(cmd, as_root=as_root, check_return=True,
timeout=timeout, retries=retries)
return True
except device_errors.CommandFailedError:
return False
@decorators.WithTimeoutAndRetriesFromInstance()
def RemovePath(self, device_path, force=False, recursive=False,
as_root=False, rename=False, timeout=None, retries=None):
"""Removes the given path(s) from the device.
Args:
device_path: A string containing the absolute path to the file on the
device, or an iterable of paths to check.
force: Whether to remove the path(s) with force (-f).
recursive: Whether to remove any directories in the path(s) recursively.
as_root: Whether root permissions should be use to remove the given
path(s).
rename: Whether to rename the path(s) before removing to help avoid
filesystem errors. See https://stackoverflow.com/questions/11539657
timeout: timeout in seconds
retries: number of retries
"""
def _RenamePath(path):
random_suffix = hex(random.randint(2 ** 12, 2 ** 16 - 1))[2:]
dest = '%s-%s' % (path, random_suffix)
try:
self.RunShellCommand(
['mv', path, dest], as_root=as_root, check_return=True)
return dest
except device_errors.AdbShellCommandFailedError:
# If it couldn't be moved, just try rm'ing the original path instead.
return path
args = ['rm']
if force:
args.append('-f')
if recursive:
args.append('-r')
if isinstance(device_path, basestring):
args.append(device_path if not rename else _RenamePath(device_path))
else:
args.extend(
device_path if not rename else [_RenamePath(p) for p in device_path])
self.RunShellCommand(args, as_root=as_root, check_return=True)
@contextlib.contextmanager
def _CopyToReadableLocation(self, device_path):
"""Context manager to copy a file to a globally readable temp file.
This uses root permission to copy a file to a globally readable named
temporary file. The temp file is removed when this contextmanager is closed.
Args:
device_path: A string containing the absolute path of the file (on the
device) to copy.
Yields:
The globally readable file object.
"""
with device_temp_file.DeviceTempFile(self.adb) as device_temp:
cmd = 'SRC=%s DEST=%s;cp "$SRC" "$DEST" && chmod 666 "$DEST"' % (
cmd_helper.SingleQuote(device_path),
cmd_helper.SingleQuote(device_temp.name))
self.RunShellCommand(cmd, shell=True, as_root=True, check_return=True)
yield device_temp
@decorators.WithTimeoutAndRetriesFromInstance()
def PullFile(self, device_path, host_path, as_root=False, timeout=None,
retries=None):
"""Pull a file from the device.
Args:
device_path: A string containing the absolute path of the file to pull
from the device.
host_path: A string containing the absolute path of the destination on
the host.
as_root: Whether root permissions should be used to pull the file.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError on failure.
CommandTimeoutError on timeout.
"""
# Create the base dir if it doesn't exist already
dirname = os.path.dirname(host_path)
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
if as_root and self.NeedsSU():
if not self.PathExists(device_path, as_root=True):
raise device_errors.CommandFailedError(
'%r: No such file or directory' % device_path, str(self))
with self._CopyToReadableLocation(device_path) as readable_temp_file:
self.adb.Pull(readable_temp_file.name, host_path)
else:
self.adb.Pull(device_path, host_path)
def _ReadFileWithPull(self, device_path):
try:
d = tempfile.mkdtemp()
host_temp_path = os.path.join(d, 'tmp_ReadFileWithPull')
self.adb.Pull(device_path, host_temp_path)
with open(host_temp_path, 'r') as host_temp:
return host_temp.read()
finally:
if os.path.exists(d):
shutil.rmtree(d)
@decorators.WithTimeoutAndRetriesFromInstance()
def ReadFile(self, device_path, as_root=False, force_pull=False,
timeout=None, retries=None):
"""Reads the contents of a file from the device.
Args:
device_path: A string containing the absolute path of the file to read
from the device.
as_root: A boolean indicating whether the read should be executed with
root privileges.
force_pull: A boolean indicating whether to force the operation to be
performed by pulling a file from the device. The default is, when the
contents are short, to retrieve the contents using cat instead.
timeout: timeout in seconds
retries: number of retries
Returns:
The contents of |device_path| as a string. Contents are intepreted using
universal newlines, so the caller will see them encoded as '\n'. Also,
all lines will be terminated.
Raises:
AdbCommandFailedError if the file can't be read.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
def get_size(path):
return self.FileSize(path, as_root=as_root)
if (not force_pull
and 0 < get_size(device_path) <= self._MAX_ADB_OUTPUT_LENGTH):
return _JoinLines(self.RunShellCommand(
['cat', device_path], as_root=as_root, check_return=True))
elif as_root and self.NeedsSU():
with self._CopyToReadableLocation(device_path) as readable_temp_file:
return self._ReadFileWithPull(readable_temp_file.name)
else:
return self._ReadFileWithPull(device_path)
def _WriteFileWithPush(self, device_path, contents):
with tempfile.NamedTemporaryFile() as host_temp:
host_temp.write(contents)
host_temp.flush()
self.adb.Push(host_temp.name, device_path)
@decorators.WithTimeoutAndRetriesFromInstance()
def WriteFile(self, device_path, contents, as_root=False, force_push=False,
timeout=None, retries=None):
"""Writes |contents| to a file on the device.
Args:
device_path: A string containing the absolute path to the file to write
on the device.
contents: A string containing the data to write to the device.
as_root: A boolean indicating whether the write should be executed with
root privileges (if available).
force_push: A boolean indicating whether to force the operation to be
performed by pushing a file to the device. The default is, when the
contents are short, to pass the contents using a shell script instead.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError if the file could not be written on the device.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
if not force_push and len(contents) < self._MAX_ADB_COMMAND_LENGTH:
# If the contents are small, for efficieny we write the contents with
# a shell command rather than pushing a file.
cmd = 'echo -n %s > %s' % (cmd_helper.SingleQuote(contents),
cmd_helper.SingleQuote(device_path))
self.RunShellCommand(cmd, shell=True, as_root=as_root, check_return=True)
elif as_root and self.NeedsSU():
# Adb does not allow to "push with su", so we first push to a temp file
# on a safe location, and then copy it to the desired location with su.
with device_temp_file.DeviceTempFile(self.adb) as device_temp:
self._WriteFileWithPush(device_temp.name, contents)
# Here we need 'cp' rather than 'mv' because the temp and
# destination files might be on different file systems (e.g.
# on internal storage and an external sd card).
self.RunShellCommand(['cp', device_temp.name, device_path],
as_root=True, check_return=True)
else:
# If root is not needed, we can push directly to the desired location.
self._WriteFileWithPush(device_path, contents)
def _ParseLongLsOutput(self, device_path, as_root=False, **kwargs):
"""Run and scrape the output of 'ls -a -l' on a device directory."""
device_path = posixpath.join(device_path, '') # Force trailing '/'.
output = self.RunShellCommand(
['ls', '-a', '-l', device_path], as_root=as_root,
check_return=True, env={'TZ': 'utc'}, **kwargs)
if output and output[0].startswith('total '):
output.pop(0) # pylint: disable=maybe-no-member
entries = []
for line in output:
m = _LONG_LS_OUTPUT_RE.match(line)
if m:
if m.group('filename') not in ['.', '..']:
item = m.groupdict()
# A change in toybox is causing recent Android versions to escape
# spaces in file names. Here we just unquote those spaces. If we
# later find more essoteric characters in file names, a more careful
# unquoting mechanism may be needed. But hopefully not.
# See: https://goo.gl/JAebZj
item['filename'] = item['filename'].replace('\\ ', ' ')
entries.append(item)
else:
logger.info('Skipping: %s', line)
return entries
def ListDirectory(self, device_path, as_root=False, **kwargs):
"""List all files on a device directory.
Mirroring os.listdir (and most client expectations) the resulting list
does not include the special entries '.' and '..' even if they are present
in the directory.
Args:
device_path: A string containing the path of the directory on the device
to list.
as_root: A boolean indicating whether the to use root privileges to list
the directory contents.
timeout: timeout in seconds
retries: number of retries
Returns:
A list of filenames for all entries contained in the directory.
Raises:
AdbCommandFailedError if |device_path| does not specify a valid and
accessible directory in the device.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
entries = self._ParseLongLsOutput(device_path, as_root=as_root, **kwargs)
return [d['filename'] for d in entries]
def StatDirectory(self, device_path, as_root=False, **kwargs):
"""List file and stat info for all entries on a device directory.
Implementation notes: this is currently implemented by parsing the output
of 'ls -a -l' on the device. Whether possible and convenient, we attempt to
make parsing strict and return values mirroring those of the standard |os|
and |stat| Python modules.
Mirroring os.listdir (and most client expectations) the resulting list
does not include the special entries '.' and '..' even if they are present
in the directory.
Args:
device_path: A string containing the path of the directory on the device
to list.
as_root: A boolean indicating whether the to use root privileges to list
the directory contents.
timeout: timeout in seconds
retries: number of retries
Returns:
A list of dictionaries, each containing the following keys:
filename: A string with the file name.
st_mode: File permissions, use the stat module to interpret these.
st_nlink: Number of hard links (may be missing).
st_owner: A string with the user name of the owner.
st_group: A string with the group name of the owner.
st_rdev_pair: Device type as (major, minior) (only if inode device).
st_size: Size of file, in bytes (may be missing for non-regular files).
st_mtime: Time of most recent modification, in seconds since epoch
(although resolution is in minutes).
symbolic_link_to: If entry is a symbolic link, path where it points to;
missing otherwise.
Raises:
AdbCommandFailedError if |device_path| does not specify a valid and
accessible directory in the device.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
entries = self._ParseLongLsOutput(device_path, as_root=as_root, **kwargs)
for d in entries:
for key, value in d.items():
if value is None:
del d[key] # Remove missing fields.
d['st_mode'] = _ParseModeString(d['st_mode'])
d['st_mtime'] = calendar.timegm(
time.strptime(d['st_mtime'], _LS_DATE_FORMAT))
for key in ['st_nlink', 'st_size', 'st_rdev_major', 'st_rdev_minor']:
if key in d:
d[key] = int(d[key])
if 'st_rdev_major' in d and 'st_rdev_minor' in d:
d['st_rdev_pair'] = (d.pop('st_rdev_major'), d.pop('st_rdev_minor'))
return entries
def StatPath(self, device_path, as_root=False, **kwargs):
"""Get the stat attributes of a file or directory on the device.
Args:
device_path: A string containing the path of a file or directory from
which to get attributes.
as_root: A boolean indicating whether the to use root privileges to
access the file information.
timeout: timeout in seconds
retries: number of retries
Returns:
A dictionary with the stat info collected; see StatDirectory for details.
Raises:
CommandFailedError if device_path cannot be found on the device.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
dirname, filename = posixpath.split(posixpath.normpath(device_path))
for entry in self.StatDirectory(dirname, as_root=as_root, **kwargs):
if entry['filename'] == filename:
return entry
raise device_errors.CommandFailedError(
'Cannot find file or directory: %r' % device_path, str(self))
def FileSize(self, device_path, as_root=False, **kwargs):
"""Get the size of a file on the device.
Note: This is implemented by parsing the output of the 'ls' command on
the device. On some Android versions, when passing a directory or special
file, the size is *not* reported and this function will throw an exception.
Args:
device_path: A string containing the path of a file on the device.
as_root: A boolean indicating whether the to use root privileges to
access the file information.
timeout: timeout in seconds
retries: number of retries
Returns:
The size of the file in bytes.
Raises:
CommandFailedError if device_path cannot be found on the device, or
its size cannot be determited for some reason.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
entry = self.StatPath(device_path, as_root=as_root, **kwargs)
try:
return entry['st_size']
except KeyError:
raise device_errors.CommandFailedError(
'Could not determine the size of: %s' % device_path, str(self))
@decorators.WithTimeoutAndRetriesFromInstance()
def SetJavaAsserts(self, enabled, timeout=None, retries=None):
"""Enables or disables Java asserts.
Args:
enabled: A boolean indicating whether Java asserts should be enabled
or disabled.
timeout: timeout in seconds
retries: number of retries
Returns:
True if the device-side property changed and a restart is required as a
result, False otherwise.
Raises:
CommandTimeoutError on timeout.
"""
def find_property(lines, property_name):
for index, line in enumerate(lines):
if line.strip() == '':
continue
key_value = tuple(s.strip() for s in line.split('=', 1))
if len(key_value) != 2:
continue
key, value = key_value
if key == property_name:
return index, value
return None, ''
new_value = 'all' if enabled else ''
# First ensure the desired property is persisted.
try:
properties = self.ReadFile(self.LOCAL_PROPERTIES_PATH).splitlines()
except device_errors.CommandFailedError:
properties = []
index, value = find_property(properties, self.JAVA_ASSERT_PROPERTY)
if new_value != value:
if new_value:
new_line = '%s=%s' % (self.JAVA_ASSERT_PROPERTY, new_value)
if index is None:
properties.append(new_line)
else:
properties[index] = new_line
else:
assert index is not None # since new_value == '' and new_value != value
properties.pop(index)
self.WriteFile(self.LOCAL_PROPERTIES_PATH, _JoinLines(properties))
# Next, check the current runtime value is what we need, and
# if not, set it and report that a reboot is required.
value = self.GetProp(self.JAVA_ASSERT_PROPERTY)
if new_value != value:
self.SetProp(self.JAVA_ASSERT_PROPERTY, new_value)
return True
else:
return False
def GetLocale(self, cache=False):
"""Returns the locale setting on the device.
Args:
cache: Whether to use cached properties when available.
Returns:
A pair (language, country).
"""
locale = self.GetProp('persist.sys.locale', cache=cache)
if locale:
if '-' not in locale:
logging.error('Unparsable locale: %s', locale)
return ('', '') # Behave as if persist.sys.locale is undefined.
return tuple(locale.split('-', 1))
return (self.GetProp('persist.sys.language', cache=cache),
self.GetProp('persist.sys.country', cache=cache))
def GetLanguage(self, cache=False):
"""Returns the language setting on the device.
DEPRECATED: Prefer GetLocale() instead.
Args:
cache: Whether to use cached properties when available.
"""
return self.GetLocale(cache=cache)[0]
def GetCountry(self, cache=False):
"""Returns the country setting on the device.
DEPRECATED: Prefer GetLocale() instead.
Args:
cache: Whether to use cached properties when available.
"""
return self.GetLocale(cache=cache)[1]
@property
def screen_density(self):
"""Returns the screen density of the device."""
DPI_TO_DENSITY = {
120: 'ldpi',
160: 'mdpi',
240: 'hdpi',
320: 'xhdpi',
480: 'xxhdpi',
640: 'xxxhdpi',
}
return DPI_TO_DENSITY.get(self.pixel_density, 'tvdpi')
@property
def pixel_density(self):
density = self.GetProp('ro.sf.lcd_density', cache=True)
if not density and self.adb.is_emulator:
density = self.GetProp('qemu.sf.lcd_density', cache=True)
return int(density)
@property
def build_description(self):
"""Returns the build description of the system.
For example:
nakasi-user 4.4.4 KTU84P 1227136 release-keys
"""
return self.GetProp('ro.build.description', cache=True)
@property
def build_fingerprint(self):
"""Returns the build fingerprint of the system.
For example:
google/nakasi/grouper:4.4.4/KTU84P/1227136:user/release-keys
"""
return self.GetProp('ro.build.fingerprint', cache=True)
@property
def build_id(self):
"""Returns the build ID of the system (e.g. 'KTU84P')."""
return self.GetProp('ro.build.id', cache=True)
@property
def build_product(self):
"""Returns the build product of the system (e.g. 'grouper')."""
return self.GetProp('ro.build.product', cache=True)
@property
def build_type(self):
"""Returns the build type of the system (e.g. 'user')."""
return self.GetProp('ro.build.type', cache=True)
@property
def build_version_sdk(self):
"""Returns the build version sdk of the system as a number (e.g. 19).
For version code numbers see:
http://developer.android.com/reference/android/os/Build.VERSION_CODES.html
For named constants see devil.android.sdk.version_codes
Raises:
CommandFailedError if the build version sdk is not a number.
"""
value = self.GetProp('ro.build.version.sdk', cache=True)
try:
return int(value)
except ValueError:
raise device_errors.CommandFailedError(
'Invalid build version sdk: %r' % value)
@property
def product_cpu_abi(self):
"""Returns the product cpu abi of the device (e.g. 'armeabi-v7a').
For supported ABIs, the return value will be one of the values defined in
devil.android.ndk.abis.
"""
return self.GetProp('ro.product.cpu.abi', cache=True)
@property
def product_cpu_abis(self):
"""Returns all product cpu abi of the device."""
return self.GetProp('ro.product.cpu.abilist', cache=True).split(',')
@property
def product_model(self):
"""Returns the name of the product model (e.g. 'Nexus 7')."""
return self.GetProp('ro.product.model', cache=True)
@property
def product_name(self):
"""Returns the product name of the device (e.g. 'nakasi')."""
return self.GetProp('ro.product.name', cache=True)
@property
def product_board(self):
"""Returns the product board name of the device (e.g. 'shamu')."""
return self.GetProp('ro.product.board', cache=True)
def _EnsureCacheInitialized(self):
"""Populates cache token, runs getprop and fetches $EXTERNAL_STORAGE."""
if self._cache['token']:
return
with self._cache_lock:
if self._cache['token']:
return
# Change the token every time to ensure that it will match only the
# previously dumped cache.
token = str(uuid.uuid1())
cmd = (
'c=/data/local/tmp/cache_token;'
'echo $EXTERNAL_STORAGE;'
'cat $c 2>/dev/null||echo;'
'echo "%s">$c &&' % token +
'getprop'
)
output = self.RunShellCommand(
cmd, shell=True, check_return=True, large_output=True)
# Error-checking for this existing is done in GetExternalStoragePath().
self._cache['external_storage'] = output[0]
self._cache['prev_token'] = output[1]
output = output[2:]
prop_cache = self._cache['getprop']
prop_cache.clear()
for key, value in _GETPROP_RE.findall(''.join(output)):
prop_cache[key] = value
self._cache['token'] = token
@decorators.WithTimeoutAndRetriesFromInstance()
def GetProp(self, property_name, cache=False, timeout=None, retries=None):
"""Gets a property from the device.
Args:
property_name: A string containing the name of the property to get from
the device.
cache: Whether to use cached properties when available.
timeout: timeout in seconds
retries: number of retries
Returns:
The value of the device's |property_name| property.
Raises:
CommandTimeoutError on timeout.
"""
assert isinstance(property_name, basestring), (
"property_name is not a string: %r" % property_name)
if cache:
# It takes ~120ms to query a single property, and ~130ms to query all
# properties. So, when caching we always query all properties.
self._EnsureCacheInitialized()
else:
# timeout and retries are handled down at run shell, because we don't
# want to apply them in the other branch when reading from the cache
value = self.RunShellCommand(
['getprop', property_name], single_line=True, check_return=True,
timeout=timeout, retries=retries)
self._cache['getprop'][property_name] = value
# Non-existent properties are treated as empty strings by getprop.
return self._cache['getprop'].get(property_name, '')
@decorators.WithTimeoutAndRetriesFromInstance()
def SetProp(self, property_name, value, check=False, timeout=None,
retries=None):
"""Sets a property on the device.
Args:
property_name: A string containing the name of the property to set on
the device.
value: A string containing the value to set to the property on the
device.
check: A boolean indicating whether to check that the property was
successfully set on the device.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError if check is true and the property was not correctly
set on the device (e.g. because it is not rooted).
CommandTimeoutError on timeout.
"""
assert isinstance(property_name, basestring), (
"property_name is not a string: %r" % property_name)
assert isinstance(value, basestring), "value is not a string: %r" % value
self.RunShellCommand(['setprop', property_name, value], check_return=True)
prop_cache = self._cache['getprop']
if property_name in prop_cache:
del prop_cache[property_name]
# TODO(crbug.com/1029772) remove the option and make the check mandatory,
# but using a single shell script to both set- and getprop.
if check and value != self.GetProp(property_name, cache=False):
raise device_errors.CommandFailedError(
'Unable to set property %r on the device to %r'
% (property_name, value), str(self))
@decorators.WithTimeoutAndRetriesFromInstance()
def GetABI(self, timeout=None, retries=None):
"""Gets the device main ABI.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
The device's main ABI name. For supported ABIs, the return value will be
one of the values defined in devil.android.ndk.abis.
Raises:
CommandTimeoutError on timeout.
"""
return self.GetProp('ro.product.cpu.abi', cache=True)
@decorators.WithTimeoutAndRetriesFromInstance()
def GetFeatures(self, timeout=None, retries=None):
"""Returns the features supported on the device."""
lines = self.RunShellCommand(['pm', 'list', 'features'], check_return=True)
return [f[8:] for f in lines if f.startswith('feature:')]
def _GetPsOutput(self, pattern):
"""Runs |ps| command on the device and returns its output,
This private method abstracts away differences between Android verions for
calling |ps|, and implements support for filtering the output by a given
|pattern|, but does not do any output parsing.
"""
try:
ps_cmd = 'ps'
# ps behavior was changed in Android O and above, http://crbug.com/686716
if self.build_version_sdk >= version_codes.OREO:
ps_cmd = 'ps -e'
if pattern:
return self._RunPipedShellCommand(
'%s | grep -F %s' % (ps_cmd, cmd_helper.SingleQuote(pattern)))
else:
return self.RunShellCommand(
ps_cmd.split(), check_return=True, large_output=True)
except device_errors.AdbShellCommandFailedError as e:
if e.status and isinstance(e.status, list) and not e.status[0]:
# If ps succeeded but grep failed, there were no processes with the
# given name.
return []
else:
raise
@decorators.WithTimeoutAndRetriesFromInstance()
def ListProcesses(self, process_name=None, timeout=None, retries=None):
"""Returns a list of tuples with info about processes on the device.
This essentially parses the output of the |ps| command into convenient
ProcessInfo tuples.
Args:
process_name: A string used to filter the returned processes. If given,
only processes whose name have this value as a substring
will be returned.
timeout: timeout in seconds
retries: number of retries
Returns:
A list of ProcessInfo tuples with |name|, |pid|, and |ppid| fields.
"""
process_name = process_name or ''
processes = []
for line in self._GetPsOutput(process_name):
row = line.split()
try:
row = {k: row[i] for k, i in _PS_COLUMNS.iteritems()}
if row['pid'] == 'PID' or process_name not in row['name']:
# Skip over header and non-matching processes.
continue
row['pid'] = int(row['pid'])
row['ppid'] = int(row['ppid'])
except StandardError: # e.g. IndexError, TypeError, ValueError.
logging.warning('failed to parse ps line: %r', line)
continue
processes.append(ProcessInfo(**row))
return processes
def _GetDumpsysOutput(self, extra_args, pattern=None):
"""Runs |dumpsys| command on the device and returns its output.
This private method implements support for filtering the output by a given
|pattern|, but does not do any output parsing.
"""
try:
cmd = ['dumpsys'] + extra_args
if pattern:
cmd = ' '.join(cmd_helper.SingleQuote(s) for s in cmd)
return self._RunPipedShellCommand(
'%s | grep -F %s' % (cmd, cmd_helper.SingleQuote(pattern)))
else:
cmd = ['dumpsys'] + extra_args
return self.RunShellCommand(cmd, check_return=True, large_output=True)
except device_errors.AdbShellCommandFailedError as e:
if e.status and isinstance(e.status, list) and not e.status[0]:
# If dumpsys succeeded but grep failed, there were no lines matching
# the given pattern.
return []
else:
raise
# TODO(#4103): Remove after migrating clients to ListProcesses.
@decorators.WithTimeoutAndRetriesFromInstance()
def GetPids(self, process_name=None, timeout=None, retries=None):
"""Returns the PIDs of processes containing the given name as substring.
DEPRECATED
Note that the |process_name| is often the package name.
Args:
process_name: A string containing the process name to get the PIDs for.
If missing returns PIDs for all processes.
timeout: timeout in seconds
retries: number of retries
Returns:
A dict mapping process name to a list of PIDs for each process that
contained the provided |process_name|.
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
procs_pids = collections.defaultdict(list)
for p in self.ListProcesses(process_name):
procs_pids[p.name].append(str(p.pid))
return procs_pids
@decorators.WithTimeoutAndRetriesFromInstance()
def GetApplicationPids(self, process_name, at_most_one=False,
timeout=None, retries=None):
"""Returns the PID or PIDs of a given process name.
Note that the |process_name|, often the package name, must match exactly.
Args:
process_name: A string containing the process name to get the PIDs for.
at_most_one: A boolean indicating that at most one PID is expected to
be found.
timeout: timeout in seconds
retries: number of retries
Returns:
A list of the PIDs for the named process. If at_most_one=True returns
the single PID found or None otherwise.
Raises:
CommandFailedError if at_most_one=True and more than one PID is found
for the named process.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
pids = [p.pid for p in self.ListProcesses(process_name)
if p.name == process_name]
if at_most_one:
if len(pids) > 1:
raise device_errors.CommandFailedError(
'Expected a single PID for %r but found: %r.' % (
process_name, pids),
device_serial=str(self))
return pids[0] if pids else None
else:
return pids
@decorators.WithTimeoutAndRetriesFromInstance()
def GetEnforce(self, timeout=None, retries=None):
"""Get the current mode of SELinux.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
True (enforcing), False (permissive), or None (disabled).
Raises:
CommandFailedError on failure.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
output = self.RunShellCommand(
['getenforce'], check_return=True, single_line=True).lower()
if output not in _SELINUX_MODE:
raise device_errors.CommandFailedError(
'Unexpected getenforce output: %s' % output)
return _SELINUX_MODE[output]
@decorators.WithTimeoutAndRetriesFromInstance()
def SetEnforce(self, enabled, timeout=None, retries=None):
"""Modify the mode SELinux is running in.
Args:
enabled: a boolean indicating whether to put SELinux in encorcing mode
(if True), or permissive mode (otherwise).
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError on failure.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
self.RunShellCommand(
['setenforce', '1' if int(enabled) else '0'], as_root=True,
check_return=True)
@decorators.WithTimeoutAndRetriesFromInstance()
def GetWebViewUpdateServiceDump(self, timeout=None, retries=None):
"""Get the WebView update command sysdump on the device.
Returns:
A dictionary with these possible entries:
FallbackLogicEnabled: True|False
CurrentWebViewPackage: "package name" or None
MinimumWebViewVersionCode: int
WebViewPackages: Dict of installed WebView providers, mapping "package
name" to "reason it's valid/invalid."
It may return an empty dictionary if device does not
support the "dumpsys webviewupdate" command.
Raises:
CommandFailedError on failure.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
result = {}
# Command was implemented starting in Oreo
if self.build_version_sdk < version_codes.OREO:
return result
output = self.RunShellCommand(
['dumpsys', 'webviewupdate'], check_return=True)
webview_packages = {}
for line in output:
match = re.search(_WEBVIEW_SYSUPDATE_CURRENT_PKG_RE, line)
if match:
result['CurrentWebViewPackage'] = match.group(1)
match = re.search(_WEBVIEW_SYSUPDATE_NULL_PKG_RE, line)
if match:
result['CurrentWebViewPackage'] = None
match = re.search(_WEBVIEW_SYSUPDATE_FALLBACK_LOGIC_RE, line)
if match:
result['FallbackLogicEnabled'] = \
True if match.group(1) == 'true' else False
match = re.search(_WEBVIEW_SYSUPDATE_PACKAGE_INSTALLED_RE, line)
if match:
package_name = match.group(1)
reason = match.group(2)
webview_packages[package_name] = reason
match = re.search(_WEBVIEW_SYSUPDATE_PACKAGE_NOT_INSTALLED_RE, line)
if match:
package_name = match.group(1)
reason = match.group(2)
webview_packages[package_name] = reason
match = re.search(_WEBVIEW_SYSUPDATE_MIN_VERSION_CODE, line)
if match:
result['MinimumWebViewVersionCode'] = int(match.group(1))
if webview_packages:
result['WebViewPackages'] = webview_packages
missing_fields = set(['CurrentWebViewPackage', 'FallbackLogicEnabled']) - \
set(result.keys())
if len(missing_fields) > 0:
raise device_errors.CommandFailedError(
'%s not found in dumpsys webviewupdate' % str(list(missing_fields)))
return result
@decorators.WithTimeoutAndRetriesFromInstance()
def SetWebViewImplementation(self, package_name, timeout=None, retries=None):
"""Select the WebView implementation to the specified package.
Args:
package_name: The package name of a WebView implementation. The package
must be already installed on the device.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError on failure.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
if not self.IsApplicationInstalled(package_name):
raise device_errors.CommandFailedError(
'%s is not installed' % package_name, str(self))
output = self.RunShellCommand(
['cmd', 'webviewupdate', 'set-webview-implementation', package_name],
single_line=True,
check_return=False)
if output == 'Success':
logging.info('WebView provider set to: %s', package_name)
else:
dumpsys_output = self.GetWebViewUpdateServiceDump()
webview_packages = dumpsys_output.get('WebViewPackages')
if webview_packages:
reason = webview_packages.get(package_name)
if not reason:
all_provider_package_names = webview_packages.keys()
raise device_errors.CommandFailedError(
'%s is not in the system WebView provider list. Must choose one '
'of %r.' % (package_name, all_provider_package_names), str(self))
if re.search(r'is\s+NOT\s+installed/enabled for all users', reason):
raise device_errors.CommandFailedError(
'%s is disabled, make sure to disable WebView fallback logic' %
package_name, str(self))
if re.search(r'No WebView-library manifest flag', reason):
raise device_errors.CommandFailedError(
'%s does not declare a WebView native library, so it cannot '
'be a WebView provider' % package_name, str(self))
if re.search(r'SDK version too low', reason):
raise device_errors.CommandFailedError(
'%s needs a higher targetSdkVersion (must be >= %d)' %
(package_name, self.build_version_sdk), str(self))
if re.search(r'Version code too low', reason):
raise device_errors.CommandFailedError(
'%s needs a higher versionCode (must be >= %d)' %
(package_name, dumpsys_output.get('MinimumWebViewVersionCode')),
str(self))
if re.search(r'Incorrect signature', reason):
raise device_errors.CommandFailedError(
'%s is not signed with release keys (but user builds require '
'this for WebView providers)' % package_name, str(self))
raise device_errors.CommandFailedError(
'Error setting WebView provider: %s' % output, str(self))
@decorators.WithTimeoutAndRetriesFromInstance()
def SetWebViewFallbackLogic(self, enabled, timeout=None, retries=None):
"""Set whether WebViewUpdateService's "fallback logic" should be enabled.
WebViewUpdateService has nonintuitive "fallback logic" for devices where
Monochrome (Chrome Stable) is preinstalled as the WebView provider, with a
"stub" (little-to-no code) implementation of standalone WebView.
"Fallback logic" (enabled by default) is designed, in the case where the
user has disabled Chrome, to fall back to the stub standalone WebView by
enabling the package. The implementation plumbs through the Chrome APK until
Play Store installs an update with the full implementation.
A surprising side-effect of "fallback logic" is that, immediately after
sideloading WebView, WebViewUpdateService re-disables the package and
uninstalls the update. This can prevent successfully using standalone
WebView for development, although "fallback logic" can be disabled on
userdebug/eng devices.
Because this is only relevant for devices with the standalone WebView stub,
this command is only relevant on N-P (inclusive).
You can determine if "fallback logic" is currently enabled by checking
FallbackLogicEnabled in the dictionary returned by
GetWebViewUpdateServiceDump.
Args:
enabled: bool - True for enabled, False for disabled
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError on failure.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
# Command is only available on devices which preinstall stub WebView.
if not version_codes.NOUGAT <= self.build_version_sdk <= version_codes.PIE:
return
# redundant-packages is the opposite of fallback logic
enable_string = 'disable' if enabled else 'enable'
output = self.RunShellCommand(
['cmd', 'webviewupdate', '%s-redundant-packages' % enable_string],
single_line=True, check_return=True)
if output == 'Success':
logging.info('WebView Fallback Logic is %s',
'enabled' if enabled else 'disabled')
else:
raise device_errors.CommandFailedError(
'Error setting WebView Fallback Logic: %s' % output, str(self))
@decorators.WithTimeoutAndRetriesFromInstance()
def TakeScreenshot(self, host_path=None, timeout=None, retries=None):
"""Takes a screenshot of the device.
Args:
host_path: A string containing the path on the host to save the
screenshot to. If None, a file name in the current
directory will be generated.
timeout: timeout in seconds
retries: number of retries
Returns:
The name of the file on the host to which the screenshot was saved.
Raises:
CommandFailedError on failure.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
if not host_path:
host_path = os.path.abspath('screenshot-%s-%s.png' % (
self.serial, _GetTimeStamp()))
with device_temp_file.DeviceTempFile(self.adb, suffix='.png') as device_tmp:
self.RunShellCommand(['/system/bin/screencap', '-p', device_tmp.name],
check_return=True)
self.PullFile(device_tmp.name, host_path)
return host_path
@decorators.WithTimeoutAndRetriesFromInstance()
def DismissCrashDialogIfNeeded(self, timeout=None, retries=None):
"""Dismiss the error/ANR dialog if present.
Returns: Name of the crashed package if a dialog is focused,
None otherwise.
"""
def _FindFocusedWindow():
match = None
# TODO(jbudorick): Try to grep the output on the device instead of using
# large_output if/when DeviceUtils exposes a public interface for piped
# shell command handling.
for line in self.RunShellCommand(['dumpsys', 'window', 'windows'],
check_return=True, large_output=True):
match = re.match(_CURRENT_FOCUS_CRASH_RE, line)
if match:
break
return match
match = _FindFocusedWindow()
if not match:
return None
package = match.group(2)
logger.warning('Trying to dismiss %s dialog for %s', *match.groups())
self.SendKeyEvent(keyevent.KEYCODE_DPAD_RIGHT)
self.SendKeyEvent(keyevent.KEYCODE_DPAD_RIGHT)
self.SendKeyEvent(keyevent.KEYCODE_ENTER)
match = _FindFocusedWindow()
if match:
logger.error('Still showing a %s dialog for %s', *match.groups())
return package
def GetLogcatMonitor(self, *args, **kwargs):
"""Returns a new LogcatMonitor associated with this device.
Parameters passed to this function are passed directly to
|logcat_monitor.LogcatMonitor| and are documented there.
"""
return logcat_monitor.LogcatMonitor(self.adb, *args, **kwargs)
def GetClientCache(self, client_name):
"""Returns client cache."""
if client_name not in self._client_caches:
self._client_caches[client_name] = {}
return self._client_caches[client_name]
def ClearCache(self):
"""Clears all caches."""
for client in self._client_caches:
self._client_caches[client].clear()
self._cache = {
# Map of packageId -> list of on-device .apk paths
'package_apk_paths': {},
# Set of packageId that were loaded from LoadCacheData and not yet
# verified.
'package_apk_paths_to_verify': set(),
# Map of packageId -> set of on-device .apk checksums
'package_apk_checksums': {},
# Map of property_name -> value
'getprop': {},
# Map of device_path -> [ignore_other_files, map of path->checksum]
'device_path_checksums': {},
# Location of sdcard ($EXTERNAL_STORAGE).
'external_storage': None,
# Token used to detect when LoadCacheData is stale.
'token': None,
'prev_token': None,
}
@decorators.WithTimeoutAndRetriesFromInstance()
def LoadCacheData(self, data, timeout=None, retries=None):
"""Initializes the cache from data created using DumpCacheData.
The cache is used only if its token matches the one found on the device.
This prevents a stale cache from being used (which can happen when sharing
devices).
Args:
data: A previously serialized cache (string).
timeout: timeout in seconds
retries: number of retries
Returns:
Whether the cache was loaded.
"""
obj = json.loads(data)
self._EnsureCacheInitialized()
given_token = obj.get('token')
if not given_token or self._cache['prev_token'] != given_token:
logger.warning('Stale cache detected. Not using it.')
return False
self._cache['package_apk_paths'] = obj.get('package_apk_paths', {})
# When using a cache across script invokations, verify that apps have
# not been uninstalled.
self._cache['package_apk_paths_to_verify'] = set(
self._cache['package_apk_paths'].iterkeys())
package_apk_checksums = obj.get('package_apk_checksums', {})
for k, v in package_apk_checksums.iteritems():
package_apk_checksums[k] = set(v)
self._cache['package_apk_checksums'] = package_apk_checksums
device_path_checksums = obj.get('device_path_checksums', {})
self._cache['device_path_checksums'] = device_path_checksums
return True
@decorators.WithTimeoutAndRetriesFromInstance()
def DumpCacheData(self, timeout=None, retries=None):
"""Dumps the current cache state to a string.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
A serialized cache as a string.
"""
self._EnsureCacheInitialized()
obj = {}
obj['token'] = self._cache['token']
obj['package_apk_paths'] = self._cache['package_apk_paths']
obj['package_apk_checksums'] = self._cache['package_apk_checksums']
# JSON can't handle sets.
for k, v in obj['package_apk_checksums'].iteritems():
obj['package_apk_checksums'][k] = list(v)
obj['device_path_checksums'] = self._cache['device_path_checksums']
return json.dumps(obj, separators=(',', ':'))
@classmethod
def parallel(cls, devices, async=False):
"""Creates a Parallelizer to operate over the provided list of devices.
Args:
devices: A list of either DeviceUtils instances or objects from
from which DeviceUtils instances can be constructed. If None,
all attached devices will be used.
async: If true, returns a Parallelizer that runs operations
asynchronously.
Returns:
A Parallelizer operating over |devices|.
"""
devices = [d if isinstance(d, cls) else cls(d) for d in devices]
if async:
return parallelizer.Parallelizer(devices)
else:
return parallelizer.SyncParallelizer(devices)
@classmethod
def HealthyDevices(cls, blacklist=None, device_arg='default', retries=1,
enable_usb_resets=False, abis=None, **kwargs):
"""Returns a list of DeviceUtils instances.
Returns a list of DeviceUtils instances that are attached, not blacklisted,
and optionally filtered by --device flags or ANDROID_SERIAL environment
variable.
Args:
blacklist: A DeviceBlacklist instance (optional). Device serials in this
blacklist will never be returned, but a warning will be logged if they
otherwise would have been.
device_arg: The value of the --device flag. This can be:
'default' -> Same as [], but returns an empty list rather than raise a
NoDevicesError.
[] -> Returns all devices, unless $ANDROID_SERIAL is set.
None -> Use $ANDROID_SERIAL if set, otherwise looks for a single
attached device. Raises an exception if multiple devices are
attached.
'serial' -> Returns an instance for the given serial, if not
blacklisted.
['A', 'B', ...] -> Returns instances for the subset that is not
blacklisted.
retries: Number of times to restart adb server and query it again if no
devices are found on the previous attempts, with exponential backoffs
up to 60s between each retry.
enable_usb_resets: If true, will attempt to trigger a USB reset prior to
the last attempt if there are no available devices. It will only reset
those that appear to be android devices.
abis: A list of ABIs for which the device needs to support at least one of
(optional). See devil.android.ndk.abis for valid values.
A device serial, or a list of device serials (optional).
Returns:
A list of DeviceUtils instances.
Raises:
NoDevicesError: Raised when no non-blacklisted devices exist and
device_arg is passed.
MultipleDevicesError: Raise when multiple devices exist, but |device_arg|
is None.
"""
allow_no_devices = False
if device_arg == 'default':
allow_no_devices = True
device_arg = ()
select_multiple = True
if not (isinstance(device_arg, tuple) or isinstance(device_arg, list)):
select_multiple = False
if device_arg:
device_arg = (device_arg,)
blacklisted_devices = blacklist.Read() if blacklist else []
# adb looks for ANDROID_SERIAL, so support it as well.
android_serial = os.environ.get('ANDROID_SERIAL')
if not device_arg and android_serial:
device_arg = (android_serial,)
def blacklisted(serial):
if serial in blacklisted_devices:
logger.warning('Device %s is blacklisted.', serial)
return True
return False
def supports_abi(abi, serial):
if abis and abi not in abis:
logger.warning("Device %s doesn't support required ABIs.", serial)
return False
return True
def _get_devices():
if device_arg:
devices = [cls(x, **kwargs) for x in device_arg if not blacklisted(x)]
else:
devices = []
for adb in adb_wrapper.AdbWrapper.Devices():
serial = adb.GetDeviceSerial()
if not blacklisted(serial):
device = cls(_CreateAdbWrapper(adb), **kwargs)
if supports_abi(device.GetABI(), serial):
devices.append(device)
if len(devices) == 0 and not allow_no_devices:
raise device_errors.NoDevicesError()
if len(devices) > 1 and not select_multiple:
raise device_errors.MultipleDevicesError(devices)
return sorted(devices)
def _reset_devices():
if not reset_usb:
logging.error(
'reset_usb.py not supported on this platform (%s). Skipping usb '
'resets.', sys.platform)
return
if device_arg:
for serial in device_arg:
reset_usb.reset_android_usb(serial)
else:
reset_usb.reset_all_android_devices()
for attempt in xrange(retries+1):
try:
return _get_devices()
except device_errors.NoDevicesError:
if attempt == retries:
logging.error('No devices found after exhausting all retries.')
raise
elif attempt == retries - 1 and enable_usb_resets:
logging.warning(
'Attempting to reset relevant USB devices prior to the last '
'attempt.')
_reset_devices()
# math.pow returns floats, so cast to int for easier testing
sleep_s = min(int(math.pow(2, attempt + 1)), 60)
logger.warning(
'No devices found. Will try again after restarting adb server '
'and a short nap of %d s.', sleep_s)
time.sleep(sleep_s)
RestartServer()
@decorators.WithTimeoutAndRetriesFromInstance()
def RestartAdbd(self, timeout=None, retries=None):
logger.info('Restarting adbd on device.')
with device_temp_file.DeviceTempFile(self.adb, suffix='.sh') as script:
self.WriteFile(script.name, _RESTART_ADBD_SCRIPT)
self.RunShellCommand(
['source', script.name], check_return=True, as_root=True)
self.adb.WaitForDevice()
@decorators.WithTimeoutAndRetriesFromInstance()
def GrantPermissions(self, package, permissions, timeout=None, retries=None):
if not permissions:
return
permissions = set(
p for p in permissions if not _PERMISSIONS_BLACKLIST_RE.match(p))
if ('android.permission.WRITE_EXTERNAL_STORAGE' in permissions
and 'android.permission.READ_EXTERNAL_STORAGE' not in permissions):
permissions.add('android.permission.READ_EXTERNAL_STORAGE')
script = ';'.join([
'p={package}',
'for q in {permissions}',
'do pm grant "$p" "$q"',
'echo "{sep}$q{sep}$?{sep}"',
'done'
]).format(
package=cmd_helper.SingleQuote(package),
permissions=' '.join(
cmd_helper.SingleQuote(p) for p in sorted(permissions)),
sep=_SHELL_OUTPUT_SEPARATOR)
logger.info('Setting permissions for %s.', package)
res = self.RunShellCommand(
script, shell=True, raw_output=True, large_output=True,
check_return=True)
res = res.split(_SHELL_OUTPUT_SEPARATOR)
failures = [
(permission, output.strip())
for permission, status, output in zip(res[1::3], res[2::3], res[0::3])
if int(status)]
if failures:
logger.warning(
'Failed to grant some permissions. Blacklist may need to be updated?')
for permission, output in failures:
# Try to grab the relevant error message from the output.
m = _PERMISSIONS_EXCEPTION_RE.search(output)
if m:
error_msg = m.group(0)
elif len(output) > 200:
error_msg = repr(output[:200]) + ' (truncated)'
else:
error_msg = repr(output)
logger.warning('- %s: %s', permission, error_msg)
@decorators.WithTimeoutAndRetriesFromInstance()
def IsScreenOn(self, timeout=None, retries=None):
"""Determines if screen is on.
Dumpsys input_method exposes screen on/off state. Below is an explination of
the states.
Pre-L:
On: mScreenOn=true
Off: mScreenOn=false
L+:
On: mInteractive=true
Off: mInteractive=false
Returns:
True if screen is on, false if it is off.
Raises:
device_errors.CommandFailedError: If screen state cannot be found.
"""
if self.build_version_sdk < version_codes.LOLLIPOP:
input_check = 'mScreenOn'
check_value = 'mScreenOn=true'
else:
input_check = 'mInteractive'
check_value = 'mInteractive=true'
dumpsys_out = self._RunPipedShellCommand(
'dumpsys input_method | grep %s' % input_check)
if not dumpsys_out:
raise device_errors.CommandFailedError(
'Unable to detect screen state', str(self))
return check_value in dumpsys_out[0]
@decorators.WithTimeoutAndRetriesFromInstance()
def SetScreen(self, on, timeout=None, retries=None):
"""Turns screen on and off.
Args:
on: bool to decide state to switch to. True = on False = off.
"""
def screen_test():
return self.IsScreenOn() == on
if screen_test():
logger.info('Screen already in expected state.')
return
self.SendKeyEvent(keyevent.KEYCODE_POWER)
timeout_retry.WaitFor(screen_test, wait_period=1)
@decorators.WithTimeoutAndRetriesFromInstance()
def ChangeOwner(self, owner_group, paths, timeout=None, retries=None):
"""Changes file system ownership for permissions.
Args:
owner_group: New owner and group to assign. Note that this should be a
string in the form user[.group] where the group is option.
paths: Paths to change ownership of.
Note that the -R recursive option is not supported by all Android
versions.
"""
if not paths:
return
self.RunShellCommand(['chown', owner_group] + paths, check_return=True)
@decorators.WithTimeoutAndRetriesFromInstance()
def ChangeSecurityContext(self, security_context, paths, timeout=None,
retries=None):
"""Changes the SELinux security context for files.
Args:
security_context: The new security context as a string
paths: Paths to change the security context of.
Note that the -R recursive option is not supported by all Android
versions.
"""
if not paths:
return
command = ['chcon', security_context] + paths
# Note, need to force su because chcon can fail with permission errors even
# if the device is rooted.
self.RunShellCommand(command, as_root=_FORCE_SU, check_return=True)
| [
"commit-bot@chromium.org"
] | commit-bot@chromium.org |
a13eef2c9f8d1c682afb5937f3c0ee35111df469 | 543286f4fdefe79bd149ff6e103a2ea5049f2cf4 | /Exercicios&cursos/Curso_em_video(exercicios)/ex069.py | 854d134399c74f92c2fa1147ab4f12df2330b19b | [] | no_license | antonioleitebr1968/Estudos-e-Projetos-Python | fdb0d332cc4f12634b75984bf019ecb314193cc6 | 9c9b20f1c6eabb086b60e3ba1b58132552a84ea6 | refs/heads/master | 2022-04-01T20:03:12.906373 | 2020-02-13T16:20:51 | 2020-02-13T16:20:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 864 | py | faixa = '-=' * 30
faixa2 = '-' * 60
titulo = 'ANALISE DE DADOS'
mulhermenor20 = homens = maior18 = 0
print(f'{faixa}')
print(f'{titulo:^60}')
print(f'{faixa}')
while True:
idade = int(input('Idade: '))
if idade > 18:
maior18 += 1
sexo = 'X'
while sexo not in 'FM':
sexo = str(input('Sexo: [F/M] ')).strip().upper()[0]
if sexo == 'M':
homens += 1
if sexo == 'F' and idade < 20:
mulhermenor20 += 1
resposta = 'X'
while resposta not in 'SN':
resposta = str(input('Quer continuar? [S/N] ')).strip().upper()[0]
if resposta == 'N':
break
print(f'{faixa2}')
print(faixa)
print('FIM DO PROGRAMA')
print(faixa)
print(f'No total foram {maior18} pessoas maiores de 18 anos,', end=' ')
print(f' {homens} homens registrados e {mulhermenor20} mulheres menores de 20 anos.')
| [
"progmatheusmorais@gmail.com"
] | progmatheusmorais@gmail.com |
0971ac262da6ca6e551f02234105af8c202c022b | fa0ae8d2e5ecf78df4547f0a106550724f59879a | /Numpy/day01/DS/code/day01/shape.py | 725f0c91dba7df05a2d0f848223a328dee0cdc5e | [] | no_license | Polaris-d/note | 71f8297bc88ceb44025e37eb63c25c5069b7d746 | 6f1a9d71e02fb35d50957f2cf6098f8aca656da9 | refs/heads/master | 2020-03-22T17:51:49.118575 | 2018-09-06T03:36:00 | 2018-09-06T03:36:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 836 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import numpy as np
a = np.arange(1, 3)
print(a, a.shape, sep='\n')
b = np.array([[1, 2, 3],
[4, 5, 6]])
print(b, b.shape, sep='\n')
c = np.array([[np.arange(1, 5),
np.arange(5, 9),
np.arange(9, 13)],
[np.arange(13, 17),
np.arange(17, 21),
np.arange(21, 25)]])
print(c, c.shape, type(c), sep='\n')
print(a.dtype)
d = np.array(['A', 'B', 'C', 'DEF'])
print(d.dtype)
print(d)
e = d.reshape(2, 2)
print(d)
print(e)
f = a.astype(str)
print(a.dtype)
print(f.dtype)
print(f)
for i in range(c.shape[0]):
for j in range(c.shape[1]):
for k in range(c.shape[2]):
print(c[i][j][k], c[i, j, k])
print(c[0])
print(c[0, 0])
print(c[0, 0, 0])
| [
"610079251@qq.com"
] | 610079251@qq.com |
3aba794c0288ded329f17467797fdeea487a623c | 0a7c9373185d387b025acc4fe3b4efe1e58c183e | /src/experiment1_use_opencv/py/keras_training.py | 1731faedb6b2d62ab47dfda3e5b54b76e690e2cc | [
"MIT"
] | permissive | yingshaoxo/tensorflowjs-posenet-research | a1acc4f1d6876932ad8b6742b2428457bebb9d46 | 91627c52822adb6adfd76a8c4cca34384ec6f077 | refs/heads/master | 2020-03-27T21:19:26.980730 | 2018-09-03T01:31:20 | 2018-09-03T01:31:20 | 147,135,102 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,240 | py | import os
from auto_everything.base import IO
io = IO()
import json
from pprint import pprint
import numpy as np
files = os.listdir("../data")
files = [file for file in files if '.json' in file]
global x, y
for index, file in enumerate(files):
# get x
xx = json.loads(io.read("../data/{name}".format(name=file)))
xx = np.array(xx)
print(xx.shape)
# get y
#yy = np.zeros(xx.shape[0])
yy = np.full(xx.shape[0], index)
if index == 0:
x = xx
y = yy
else:
x = np.append(x, xx, axis=0)
y = np.append(y, yy, axis=0)
# randomnize data
index = np.arange(x.shape[0])
np.random.shuffle(index)
# 3D to 2D
x = x[index]
x = x.reshape(len(x), -1)
y = y[index]
print(x.shape)
print(y.shape[0])
print(x)
print(y)
from keras.models import Sequential
from keras.layers import Dense
model = Sequential()
model.add(Dense(34, input_dim=34, activation='relu'))
model.add(Dense(21, activation='relu'))
model.add(Dense(1))
model.compile(loss='logcosh', optimizer='adam', metrics=['accuracy'])
model.fit(x, y, batch_size=10, epochs=500)
test_x = x[5:50]
test_y = y[5:50]
predicted = model.predict(test_x)
print(test_y)
print(predicted)
model.save("../pose_detect_model.h5")
| [
"yingshaoxo@gmail.com"
] | yingshaoxo@gmail.com |
54ffa7b9ce50ace8890d47976a16236c01bc54da | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/225/users/4491/codes/1573_2897.py | 53be2056601ce33622953ff40274a3c112279c47 | [] | no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 188 | py | # Use este codigo como ponto de partida
# Leitura de valores de entrada e conversao para inteiro
num = int(input("Digite o numero: "))
# Impressao do dobro do numero
f = num * 2
print(f) | [
"jvlo@icomp.ufam.edu.br"
] | jvlo@icomp.ufam.edu.br |
8e4240bce53538f22730417db80c199e0a496034 | 08428ba80f90f73bbce19e5bd0f423a1b4d025d7 | /src/registration_manager/panel.py | 5a00ffb7ad287770870a2bd2e23168d8114c7213 | [] | no_license | marcoverl/openstack-security-integrations | 0d3afe093b361c548b65be9e405e10318d51c7cd | 58c560885b007cf25444e552de17c0d6a5a0e716 | refs/heads/master | 2021-01-16T21:18:56.071490 | 2014-06-17T07:56:48 | 2014-06-17T07:56:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 279 | py |
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.dashboards.admin import dashboard
class RegisterManager(horizon.Panel):
name = _("Registrations")
slug = 'registration_manager'
dashboard.Admin.register(RegisterManager)
| [
"paolo.andreetto@pd.infn.it"
] | paolo.andreetto@pd.infn.it |
74a42dd8995db8965a1cd052d8636c517784548c | ebbe2bf39e5146c0ab95d07e03c6cf65799651a3 | /readingtrackproject/urls.py | 2a131b7666cde880abb5b15e7040ef5a98edbe21 | [] | no_license | TareqMonwer/dpi-python-django-workshop | 43379b114807b236a79b06cce5c8c6356e926bfe | 67b2ea31f01f8e9781b9d2daf2b2bd9eacc15994 | refs/heads/master | 2023-07-30T19:37:16.594801 | 2021-06-16T18:39:43 | 2021-06-16T18:39:43 | 279,134,678 | 1 | 0 | null | 2021-09-22T19:28:15 | 2020-07-12T19:33:54 | JavaScript | UTF-8 | Python | false | false | 817 | py | """readingtrackproject URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('readingtracker.urls')),
]
| [
"tareqmonwer137@gmail.com"
] | tareqmonwer137@gmail.com |
87484cd5541d4353e73c169808da3e10751ad6b6 | 139715a923c8c82b172803d5bdc1b1bca46fbdf3 | /leetcode/edit_distance.py | 93d846c3c6b853be90ff7d4600bfe1c8b780533a | [] | no_license | haoccheng/pegasus | ab32dcc4265ed901e73790d8952aa3d72bdf72e7 | 76cbac7ffbea738c917e96655e206f8ecb705167 | refs/heads/master | 2021-01-10T11:33:23.038288 | 2016-03-18T04:17:28 | 2016-03-18T04:17:28 | 46,103,391 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 730 | py | # edit distance.
import sys
def edit_distance(word1, word2):
if len(word1) == 0 or len(word2) == 0:
return max(len(word1), len(word2))
scores = []
for i in range(len(word1)+1):
row = [0] * (len(word2)+1)
scores.append(row)
for i in range(len(word1)+1):
scores[i][0] = i
for j in range(len(word2)+1):
scores[0][j] = j
for i in range(1, len(word1)+1):
for j in range(1, len(word2)+1):
s0 = sys.maxint
if word1[i-1] == word2[j-1]:
s0 = scores[i-1][j-1]
else:
s0 = scores[i-1][j-1] + 1
s1 = scores[i-1][j] + 1
s2 = scores[i][j-1] + 1
scores[i][j] = min(min(s0, s1), s2)
return scores[-1][len(scores[-1])-1]
print edit_distance('abc', 'abe')
| [
"haoc.cheng@gmail.com"
] | haoc.cheng@gmail.com |
82396827ebf5c7d3fd9a5a7608b6a4bfabebaa49 | b76d2b037c2af38edfeb718deae22697444ee121 | /proto-chain/clientX.py | 088c20d16f02d7dd5c02678f617c8019405a3791 | [] | no_license | hyo07/proto-chain | e8aa1a5d549310da79e9b28c55683a9361c48fe5 | 4fd3e91d6f9d4912634759a908f5a2bf0c1f4d47 | refs/heads/master | 2020-06-06T07:36:27.302334 | 2019-07-01T07:07:46 | 2019-07-01T07:07:46 | 192,679,743 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,661 | py | import signal
from time import sleep
import json
import sys
from core.client_core import ClientCore
from p2p.message_manager import MSG_NEW_TRANSACTION
args = sys.argv
my_p2p_client = None
def signal_handler(signal, frame):
shutdown_client()
def shutdown_client():
global my_p2p_client
my_p2p_client.shutdown()
def make_transaction():
"""
好きなトランザクションを書き込む
"""
transaction_list = []
print("トランザクションを登録できます\n"
"中断する場合は、'Ctlr + C'を入力してください\n")
while True:
transaction = input_set()
transaction_list.append(transaction)
select_num = check_continue_input()
if select_num is 1:
pass
elif select_num is 2:
print("5秒後、トランザクションを送信します\n"
"中断する場合は、'Ctlr + C'を入力してください")
sleep(5)
break
return transaction_list
def input_set():
"""
値受け取り部分
"""
print("-------------------------\n"
"トランザクション登録")
sender = input("sender: ")
recipient = input("recipient: ")
value = int(input("value: "))
return {'sender': sender, 'recipient': recipient, 'value': value}
def check_continue_input():
"""
登録を続行か、送信かの選択
"""
print("続く操作を選択してください")
while True:
select_num = input("""
1: トランザクションの登録を続ける
2: 登録したトランザクションを送信する
>>>>> """)
if (select_num == "1") or (select_num == "2"):
break
else:
print("正しい値を入力してください(1 or 2)")
return int(select_num)
def main():
transaction_list = make_transaction()
print("------------------------------------------------------------\n")
signal.signal(signal.SIGINT, signal_handler)
global my_p2p_client
# my_p2p_client = ClientCore(50087, connect_ip, core_port=50082)
try:
if args[2]:
my_p2p_client = ClientCore(50085, args[1], int(args[2]))
elif args[1]:
my_p2p_client = ClientCore(50085, args[1])
except IndexError:
my_p2p_client = ClientCore(50085)
my_p2p_client.start()
sleep(10)
for transaction in transaction_list:
my_p2p_client.send_message_to_my_core_node(MSG_NEW_TRANSACTION, json.dumps(transaction))
sleep(2)
sleep(10)
shutdown_client()
if __name__ == '__main__':
main()
| [
"yutaka727kato@gmail.com"
] | yutaka727kato@gmail.com |
909cfda3e71c9d16ac86392b52fa53e59d878ebc | f4df3ec2649593deaa645d5a8698e6c5ef527d5e | /back/wsgi/__init__.py | a592f736613bd2df1273c470136f7b8a03ba97be | [] | no_license | Spencatro/spyfall | 8ebcfea87d11c0a764f738bb18ca84998206008d | 848e5494ecc4c9c85c591be04c2b06aa2121a183 | refs/heads/master | 2021-05-29T02:44:29.191589 | 2015-01-01T00:38:56 | 2015-01-01T00:38:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,815 | py | import json
import random
from flask import Flask, jsonify, request, redirect, url_for
from flask import render_template, abort
import urllib
from flask.ext.pymongo import PyMongo
class SpyfallApp(Flask):
def __init__(self, arg):
super(SpyfallApp, self).__init__(arg)
self.route("/")(self.index)
self.route("/confirm_player/<game_name>/<player_name>/")(self.confirm_player)
self.route("/debug/<command>/")(self.debug)
self.route("/delete_all_games/super_secret_password/")(self.delete_all_games)
self.route("/dump_db/")(self.dump_db)
self.route("/game_exists/<game_name>/")(self.game_exists)
self.route("/game_state/<game_name>/")(self.get_game_state)
self.route("/join_game/<game_name>/<player_name>/")(self.join_game)
self.route("/list_games/")(self.list_games)
self.route("/list_location_objects/")(self.list_location_objects)
self.route("/list_players_in_game/<game_name>/")(self.list_players_in_game)
self.route("/map/super_secret_password/")(self.map)
self.route("/new_game/<game_name>/<player_name>/")(self.new_game)
self.route("/player_role/<game_name>/<player_name>/")(self.get_player_role)
self.route("/remove_player_from_game/<game_name>/<player_name>/")(self.remove_player_from_game)
self.route("/remote_log/<game_name>/<player_name>/<timestamp>/<log_str>")(self.remote_log)
self.route("/reset_game/<game_name>/<end_type>/")(self.reset_game)
self.route("/show_logs/")(self.show_logs)
self.mongo = None
def query_to_list(self, query, remove_id = True):
result = []
for q in query:
if remove_id:
q.pop("_id")
result.append(q)
return result
def get_map_list(self):
mongo_result = self.mongo.db.maps.find()
map_list = []
for map in mongo_result:
map_list.append(map['name'])
return map_list
def list_location_objects(self):
map_dict = {}
for map_obj in self.mongo.db.maps.find():
map_dict[map_obj['name']] = None
if 'img_url' in map_obj.keys():
map_dict[map_obj['name']] = map_obj['img_url']
return self.allow_cross(jsonify(map_dict))
def set_mongo(self, mongo):
self.mongo = mongo
def has_no_empty_params(self, rule):
defaults = rule.defaults if rule.defaults is not None else ()
arguments = rule.arguments if rule.arguments is not None else ()
return len(defaults) >= len(arguments)
def allow_cross(self, return_value, code=200):
return return_value, code, {'Access-Control-Allow-Origin': '*'}
def map(self):
output = []
for rule in app.url_map.iter_rules():
options = {}
for arg in rule.arguments:
options[arg] = "[{0}]".format(arg)
methods = ','.join(rule.methods)
url = url_for(rule.endpoint, **options)
line = urllib.unquote("{:50s} {:20s} {}".format(rule.endpoint, methods, url))
output.append(line)
return jsonify({'map':output})
def index(self):
return self.allow_cross("<h1>Backend</h1>")
def debug(self, command):
return str(eval(command))
def dump_db(self):
games_query = self.mongo.db.games.find()
maps_query = self.mongo.db.maps.find()
return jsonify({'maps':self.get_map_list(), 'games':self.query_to_list(games_query)})
def new_game_object(self, game_name, player_name):
d = {}
d['name'] = game_name
d['players'] = []
d['state'] = "adding"
d['map'] = None
d['round'] = 1
d['results'] = {}
return d
def new_log_object(self, game_name, player_name, timestamp, log_str):
d = {}
d['game_name'] = game_name
d['player_name'] = player_name
d['log'] = log_str
d['timestamp'] = timestamp
return d
def new_game(self, game_name, player_name):
result = {'game_created': game_name, 'already_existed':False, 'already_in_game':False}
if not self.game_exists(game_name, no_http=True):
self.mongo.db.games.insert(self.new_game_object(game_name, player_name))
else:
result['already_existed'] = True
join_result = self.join_game(game_name, player_name, no_http=True)
if 'already_in_game' in join_result.keys():
result['already_in_game'] = True
return self.allow_cross(jsonify(result))
def delete_all_games(self):
self.mongo.db.games.drop()
return "success"
def player_is_in_game(self, game_name, player_name):
return self.mongo.db.games.find({"players.name":player_name, "name":game_name}).count() > 0
def game_exists(self, game_name, no_http = False):
result = self.mongo.db.games.find({'name':game_name}).count() > 0
if no_http:
return result
return self.allow_cross(jsonify({"result":result}))
def join_game(self, game_name, player_name, no_http=False):
result = {'success':True, 'already_in_game':False}
if not self.player_is_in_game(game_name, player_name):
self.mongo.db.games.update({'name':game_name},{"$push":{'players':{'name':player_name, 'confirmed':False, 'role':"Player"}}})
else:
result['already_in_game'] = True
if no_http:
return result
return self.allow_cross(jsonify(result))
def remove_player_from_game(self, game_name, player_name):
result = {'success':True, 'not_in_game':False}
if not self.player_is_in_game(game_name, player_name):
result['not_in_game'] = True
else:
self.mongo.db.games.update({'name':game_name, 'players.name':player_name}, {"$pull":{"players":{"name":player_name}}})
return self.allow_cross(jsonify(result))
def remote_log(self, game_name, player_name, timestamp, log_str):
result = "ok"
self.mongo.db.logs.insert(self.new_log_object(game_name, player_name, timestamp, log_str));
return self.allow_cross(result)
def show_logs(self):
logs = self.mongo.db.logs.find()
return jsonify({"logs":self.query_to_list(logs)})
def list_players_in_game(self, game_name, no_http = False):
player_obj = {}
player_object_list = self.mongo.db.games.find_one({"name":game_name})['players']
for p_obj in player_object_list:
player_obj[p_obj['name']] = p_obj['confirmed']
if no_http:
return [key for key in player_obj.keys()]
return self.allow_cross(jsonify({'players':player_obj}))
def list_games(self):
game_obj_list = self.query_to_list(self.mongo.db.games.find())
game_list = []
for obj in game_obj_list:
game_name = obj['name']
game_list.append(game_name)
return self.allow_cross(jsonify({'games':game_list}))
def get_game_state(self, game_name, no_http = False):
state = self.mongo.db.games.find_one({'name':game_name})['state']
if no_http:
return state
return self.allow_cross(jsonify({'state':state}))
def get_player_role(self, game_name, player_name):
result = {}
location = self.mongo.db.games.find_one({"name":game_name})['map']
game = self.mongo.db.games.find_one({"players.name":player_name, "name":game_name})
players = game['players']
result['role'] = "Unknown"
for player_obj in players:
if player_name == player_obj['name']:
result['role'] = player_obj['role']
result['location'] = "Unknown"
if result['role'] != "Spy":
result['location'] = location
result['round'] = game['round']
return self.allow_cross(jsonify(result))
def confirm_player(self, game_name, player_name):
self.mongo.db.games.update({'name':game_name,'players' : {"$elemMatch" : {"name":player_name}},},{"$set" :{"players.$.confirmed" : True}})
players = self.mongo.db.games.find_one({"name":game_name})['players']
all_confirmed = True
for player_obj in players:
if player_obj['confirmed'] == False:
all_confirmed = False
random_player_index = None
len_players = None
random_player_name = None
if all_confirmed and self.get_game_state(game_name, no_http=True) != "playing": # Skip process if game already playing
# Pick a random map
maps = self.get_map_list()
random_map_index = random.randint(0, len(maps)-1)
game_map = maps[random_map_index]
self.mongo.db.games.update({"name":game_name}, {"$set":{"map":game_map, "state":"playing"}})
# Pick a random spy
players = self.list_players_in_game(game_name,no_http=True)
len_players = len(players)
random_player_index = random.randint(0, len_players-1)
random_player_name = players[random_player_index]
self.mongo.db.games.update({"name":game_name},{"$set":{"players."+str(random_player_index)+".role":"Spy"}})
return self.allow_cross(jsonify({'success':True, 'r_int':random_player_index, 'r_name':random_player_name, 'len_p':len_players}))
def reset_game(self, game_name, end_type):
# Pick a random map
maps = self.get_map_list()
random_map_index = random.randint(0, len(maps)-1)
game_map = maps[random_map_index]
# ------------------------------------------------------v set map ---v set state
self.mongo.db.games.update({"name":game_name}, {"$set":{"map":game_map, "state":"playing"}})
# update round
self.mongo.db.games.update({"name":game_name}, {"$inc":{"round":1}})
# Pick a random spy
players = self.list_players_in_game(game_name,no_http=True)
len_players = len(players)
# Reset all players to "Player"
for i in range(len_players):
self.mongo.db.games.update({"name":game_name},{"$set":{"players."+str(i)+".role":"Player"}})
random_player_index = random.randint(0, len_players-1)
random_player_name = players[random_player_index]
self.mongo.db.games.update({"name":game_name},{"$set":{"players."+str(random_player_index)+".role":"Spy"}})
updated_game_obj = mongo.db.games.find_one({"name":game_name})
return self.allow_cross(jsonify({"success":True, "round":updated_game_obj['round']}))
app = SpyfallApp(__name__)
app.config['MONGO_PORT'] = 27021
app.config['MONGO_DBNAME'] = "spyfall"
mongo = PyMongo(app)
app.set_mongo(mongo)
if __name__ == "__main__":
app.run(debug = "True")
| [
"hawkins.spencer@gmail.com"
] | hawkins.spencer@gmail.com |
6cd1198a4f82961a4c7a78dfcd5be9ed8c45de47 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_5366.py | f754dea2bb211c5338c9f0d91f52f31b943a1deb | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 175 | py | # multiple d-bus session bus objects in python
bus1 = dbus.bus.BusConnection("tcp:host=192.168.0.1,port=1234")
bus2 = dbus.bus.BusConnection("tcp:host=192.168.0.2,port=1234")
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
ccad96c6a0e3a3644cb0519285c2ccddd9e3ad2b | 7701773efa258510951bc7d45325b4cca26b3a7d | /kivy_explore/hellow.py | cceaf36129a48ed0edcff3422d8396b5f66fa9b8 | [] | no_license | Archanciel/explore | c170b2c8b5eed0c1220d5e7c2ac326228f6b2485 | 0576369ded0e54ce7ff9596ec4df076e69067e0c | refs/heads/master | 2022-06-17T19:15:03.647074 | 2022-06-01T20:07:04 | 2022-06-01T20:07:04 | 105,314,051 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 309 | py | import kivy
kivy.require('1.0.7')
from kivy.app import App
from kivy.uix.boxlayout import BoxLayout
class MyHelloWGUI(BoxLayout):
def sayHello(self):
print('Hello !')
def sayBye(self):
print('Bye !')
class HelloWApp(App):
pass
if __name__ == '__main__':
HelloWApp().run() | [
"jp.schnyder@gmail.com"
] | jp.schnyder@gmail.com |
55ce960f1c7ccea191a699610a7f6539b4617d3e | 2a54e8d6ed124c64abb9e075cc5524bb859ba0fa | /.history/3-OO-Python/13-dunder-methods_20200417032242.py | d9f6c382c9a09c6d06baeaffae52e92e3eecca08 | [] | no_license | CaptainStorm21/Python-Foundation | 01b5fbaf7a913506518cf22e0339dd948e65cea1 | a385adeda74f43dd7fb2d99d326b0be23db25024 | refs/heads/master | 2021-05-23T01:29:18.885239 | 2020-04-23T19:18:06 | 2020-04-23T19:18:06 | 253,171,611 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 218 | py | class Toy():
def __init__ (self, color, age):
self.color = color
self.age = age
def __str__(self):
action_figure = Toy ('red', 10)
print(action_figure.__str__())
# print(str(action_figure))
| [
"tikana4@yahoo.com"
] | tikana4@yahoo.com |
c37f9cb8b1c274e45069ac3ff3d0dcde11eef52c | f337bc5f179b25969ba73e7680ffb0a0616e3b97 | /python/BOJ/2XXX/2121.py | c87cd111128ccd88a8374937493e42bf22929655 | [] | no_license | raiders032/PS | 31771c5496a70f4730402698f743bbdc501e49a3 | 08e1384655975b868e80521167ec876b96fa01c8 | refs/heads/master | 2023-06-08T10:21:00.230154 | 2023-06-04T01:38:08 | 2023-06-04T01:38:08 | 349,925,005 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 571 | py | """
https://www.acmicpc.net/problem/2121
2121.넷이 놀기
실버3
풀이1.1792ms
"""
import sys
input = sys.stdin.readline
N = int(input())
A, B = map(int, input().split())
points = set()
sheep_count = 0
for _ in range(N):
x, y = map(int, input().split())
points.add((x, y))
dx = [A, A, 0]
dy = [0, B, B]
for x, y in points:
is_valid = True
for i in range(3):
nx = x + dx[i]
ny = y + dy[i]
if (nx, ny) not in points:
is_valid = False
break
if is_valid:
sheep_count += 1
print(sheep_count) | [
"nameks@naver.com"
] | nameks@naver.com |
90f8bc3f0964f54d0e651907e690a7bf85c005a1 | 916480ae24345193efa95df013f637e0a115653b | /web/transiq/sme/migrations/0026_smesummary.py | cdaad0519ec3f2a3ad05aedf28f7880fe64b8bc6 | [
"Apache-2.0"
] | permissive | manibhushan05/tms | 50e289c670e1615a067c61a051c498cdc54958df | 763fafb271ce07d13ac8ce575f2fee653cf39343 | refs/heads/master | 2022-12-11T07:59:30.297259 | 2021-09-08T03:24:59 | 2021-09-08T03:24:59 | 210,017,184 | 0 | 0 | Apache-2.0 | 2022-12-08T02:35:01 | 2019-09-21T16:23:57 | Python | UTF-8 | Python | false | false | 1,208 | py | # Generated by Django 2.0.5 on 2018-11-18 19:28
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('sme', '0025_sme_material'),
]
operations = [
migrations.CreateModel(
name='SmeSummary',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('billed_accounting_summary', django.contrib.postgres.fields.jsonb.JSONField()),
('placed_order_accounting_summary', django.contrib.postgres.fields.jsonb.JSONField()),
('accounting_summary', django.contrib.postgres.fields.jsonb.JSONField()),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_on', models.DateTimeField(auto_now=True)),
('deleted', models.BooleanField(default=False)),
('deleted_on', models.DateTimeField(blank=True, null=True)),
('sme', models.OneToOneField(on_delete=django.db.models.deletion.DO_NOTHING, to='sme.Sme')),
],
),
]
| [
"mani@myhost.local"
] | mani@myhost.local |
8cbbcd0822d14f3434b201030b47195413492bdd | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/NETLINK-SPECIFIC-MIB.py | b7216c9e8be65f794d2fd590978ec324be96be8e | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 403,019 | py | #
# PySNMP MIB module NETLINK-SPECIFIC-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/NETLINK-SPECIFIC-MIB
# Produced by pysmi-0.3.4 at Wed May 1 14:19:55 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsUnion, SingleValueConstraint, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsUnion", "SingleValueConstraint", "ConstraintsIntersection", "ValueSizeConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
mib_2, MibScalar, MibTable, MibTableRow, MibTableColumn, mgmt, Gauge32, IpAddress, Counter32, iso, TimeTicks, enterprises, MibIdentifier, Counter64, ObjectIdentity, Unsigned32, Integer32, ModuleIdentity, NotificationType, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "mib-2", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "mgmt", "Gauge32", "IpAddress", "Counter32", "iso", "TimeTicks", "enterprises", "MibIdentifier", "Counter64", "ObjectIdentity", "Unsigned32", "Integer32", "ModuleIdentity", "NotificationType", "Bits")
PhysAddress, TextualConvention, RowStatus, DisplayString, MacAddress, TimeInterval = mibBuilder.importSymbols("SNMPv2-TC", "PhysAddress", "TextualConvention", "RowStatus", "DisplayString", "MacAddress", "TimeInterval")
class NlSubscriberAddress(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(1, 15)
snaDLC = MibIdentifier((1, 3, 6, 1, 2, 1, 41))
sdlc = MibIdentifier((1, 3, 6, 1, 2, 1, 41, 1))
sdlcLSGroup = MibIdentifier((1, 3, 6, 1, 2, 1, 41, 1, 2))
sdlcLSAdminTable = MibIdentifier((1, 3, 6, 1, 2, 1, 41, 1, 2, 1))
sdlcLSAdminEntry = MibIdentifier((1, 3, 6, 1, 2, 1, 41, 1, 2, 1, 1))
sdlcLSAddress = MibScalar((1, 3, 6, 1, 2, 1, 41, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sdlcLSAddress.setStatus('mandatory')
if mibBuilder.loadTexts: sdlcLSAddress.setDescription('This value is the poll address of the secondary link station for this SDLC link. It uniquely identifies the SDLC link station within a single SDLC port.')
netlink = MibIdentifier((1, 3, 6, 1, 4, 1, 173))
network = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 6))
netstat = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 6, 1))
nsMaxNeigh = MibScalar((1, 3, 6, 1, 4, 1, 173, 6, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nsMaxNeigh.setStatus('mandatory')
if mibBuilder.loadTexts: nsMaxNeigh.setDescription('Maximum number of neighbor nodes. This value is currently always 64 for FRX8000 and 20 for an N7400 or 7500. A neighbor is the closest node in a path from the local node.')
nsThisNode = MibScalar((1, 3, 6, 1, 4, 1, 173, 6, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nsThisNode.setStatus('mandatory')
if mibBuilder.loadTexts: nsThisNode.setDescription('This nodes number. (1-250) ')
nsNodTable = MibTable((1, 3, 6, 1, 4, 1, 173, 6, 1, 3), )
if mibBuilder.loadTexts: nsNodTable.setStatus('mandatory')
if mibBuilder.loadTexts: nsNodTable.setDescription('A table showing all nodes known to this node along with status and neighbor information.')
nsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 6, 1, 3, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nsNodNum"))
if mibBuilder.loadTexts: nsEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nsEntry.setDescription('Each entry of the node table is indexed by node number.')
nsNodNum = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 6, 1, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nsNodNum.setStatus('mandatory')
if mibBuilder.loadTexts: nsNodNum.setDescription('The node number of the desired node. This is the index into the node table.')
nsStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 6, 1, 3, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nsStatus.setStatus('mandatory')
if mibBuilder.loadTexts: nsStatus.setDescription('The operational status of the node. The status is given as: 1 = Node is defined in the database; 9 = Node is operational;')
nsNumNeigh = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 6, 1, 3, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nsNumNeigh.setStatus('mandatory')
if mibBuilder.loadTexts: nsNumNeigh.setDescription('Current number of neighbor nodes to this node.')
nsNeighTable = MibTable((1, 3, 6, 1, 4, 1, 173, 6, 1, 4), )
if mibBuilder.loadTexts: nsNeighTable.setStatus('mandatory')
if mibBuilder.loadTexts: nsNeighTable.setDescription('The neighbor table lists the status of a nodes neighbors.')
nsNeighEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 6, 1, 4, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nsNTNode"), (0, "NETLINK-SPECIFIC-MIB", "nsNTNeigh"))
if mibBuilder.loadTexts: nsNeighEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nsNeighEntry.setDescription('The table entries are indexed by the node number and the neighbors node number.')
nsNTNode = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 6, 1, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nsNTNode.setStatus('mandatory')
if mibBuilder.loadTexts: nsNTNode.setDescription("The node number of the node whose neighbor's status is being sought.")
nsNTNeigh = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 6, 1, 4, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nsNTNeigh.setStatus('mandatory')
if mibBuilder.loadTexts: nsNTNeigh.setDescription('The node number of the neighbor whose status is being sought.')
nsNTNeighStat = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 6, 1, 4, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("notConnected", 1), ("connected", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nsNTNeighStat.setStatus('mandatory')
if mibBuilder.loadTexts: nsNTNeighStat.setDescription('The status of the neighbor node given as: 1 = previously, but not currently, connected; 2 = Currently connected; ')
local = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7))
node = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 1))
nodeCfgTable = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 1, 1))
nodeAlmTable = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 1, 2))
nodeSNMPGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 1, 3))
nodeModel = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 1, 3, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("model1", 1), ("model2", 2), ("rackmount", 3), ("highavail", 4), ("netfrad", 5), ("frx4000", 6), ("ss1800", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nodeModel.setStatus('mandatory')
if mibBuilder.loadTexts: nodeModel.setDescription('Describes the unit model')
nodeTrapText = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 1, 3, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nodeTrapText.setStatus('mandatory')
if mibBuilder.loadTexts: nodeTrapText.setDescription('Text of the last alarm generated')
nodeTrapAdrTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 1, 3, 3), )
if mibBuilder.loadTexts: nodeTrapAdrTable.setStatus('mandatory')
if mibBuilder.loadTexts: nodeTrapAdrTable.setDescription('A table used to define the IP address of end-nodes to receive alarms generated by this node. Up to 16 addresses can be specified.')
tpAdrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 1, 3, 3, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "tpAdrIdx"))
if mibBuilder.loadTexts: tpAdrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: tpAdrEntry.setDescription('A Trap entry containing objects relating to SNMP traps.')
tpAdrIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 1, 3, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tpAdrIdx.setStatus('mandatory')
if mibBuilder.loadTexts: tpAdrIdx.setDescription('The index into the trap configuration table')
tpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 1, 3, 3, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tpAddress.setStatus('mandatory')
if mibBuilder.loadTexts: tpAddress.setDescription('The IP Address of the end-station to send alarms')
tpAdrFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 1, 3, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("disableTraps", 1), ("enableTraps", 2), ("delete", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tpAdrFlag.setStatus('mandatory')
if mibBuilder.loadTexts: tpAdrFlag.setDescription('Defines the state of this entry as: 0 = Do not send traps to the Address; 1 = Send traps to the Address; 2 = This Address entry is deleted from the table; ')
tpAdrSLev = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 1, 3, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("critical", 1), ("major", 2), ("minor", 3), ("informational", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tpAdrSLev.setStatus('mandatory')
if mibBuilder.loadTexts: tpAdrSLev.setDescription('Lowest severity level traps that will be sent to this Address; 1 is the highest, 4 is the lowest.')
nodeBagTable = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 1, 4))
hwcard = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 2))
rlpMaxProtos = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 2, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpMaxProtos.setStatus('mandatory')
if mibBuilder.loadTexts: rlpMaxProtos.setDescription('Maximum number of protocols allowed on each RLP. This value is currently 11 for FRX4000s, 9 for FRX6000s, and 5 for FRX7000s and FRX8000s.')
rlpConfigTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 2, 2), )
if mibBuilder.loadTexts: rlpConfigTable.setStatus('mandatory')
if mibBuilder.loadTexts: rlpConfigTable.setDescription('A Table to describe each RLP on the node')
rlpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 2, 2, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "rlpIndex"))
if mibBuilder.loadTexts: rlpEntry.setStatus('mandatory')
if mibBuilder.loadTexts: rlpEntry.setDescription('An RLP entry containing objects relating to RLPs.')
rlpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 2, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpIndex.setStatus('mandatory')
if mibBuilder.loadTexts: rlpIndex.setDescription('The RLP number on the node')
rlpStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("installed", 1), ("configured", 2), ("load-failed", 3), ("loading", 4), ("ipl-failed", 5), ("ipl-in-progress", 6), ("failed", 7), ("operational", 8), ("power-off", 9), ("power-on", 10)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpStatus.setStatus('mandatory')
if mibBuilder.loadTexts: rlpStatus.setDescription('The current state of this RLP')
rlpMemorySize = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 2, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpMemorySize.setStatus('mandatory')
if mibBuilder.loadTexts: rlpMemorySize.setDescription('The amount of memory installed on this RLP')
rlpLIC1Type = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 2, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 13))).clone(namedValues=NamedValues(("none", 1), ("rs232", 2), ("rs422", 3), ("v35", 4), ("hs-rs232", 5), ("x21", 6), ("rs449", 7), ("universal", 8), ("t1", 10), ("e1", 11), ("voice", 13)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpLIC1Type.setStatus('mandatory')
if mibBuilder.loadTexts: rlpLIC1Type.setDescription('The Type of Line Interface card in the first position')
rlpLIC2Type = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 2, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 13))).clone(namedValues=NamedValues(("none", 1), ("rs232", 2), ("rs422", 3), ("v35", 4), ("hs-rs232", 5), ("x21", 6), ("rs449", 7), ("universal", 8), ("t1", 10), ("e1", 11), ("voice", 13)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpLIC2Type.setStatus('mandatory')
if mibBuilder.loadTexts: rlpLIC2Type.setDescription('The Type of Line Interface card in the second position')
rlpProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 2, 2, 1, 6), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlpProtocol.setStatus('mandatory')
if mibBuilder.loadTexts: rlpProtocol.setDescription('The protocols configured on this RLP. This is an OCTET STRING where each octet represents a protocol type. The size of this is the maximum number of protocols allowed on an RLP (rlpMaxProtos). The protocol types are defined as: 00 = none 01 = X.25 02 = Frame Relay 03 = Async 04 = SDLC 05 = BSC Interactive 07 = IP 08 = SNMP 09 = RIP 0A = LLC2 0B = Trunk 0C = IPX 0D = Config 0E = LLC2-R 0F = IP-R 10 = NVSP 11 = Bridge ')
rlpGroupNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 2, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpGroupNumber.setStatus('mandatory')
if mibBuilder.loadTexts: rlpGroupNumber.setDescription('The group to which this RLP belongs- always 1 if not FRX7000.')
rlpGroupResponsibility = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 2, 2, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("primary", 1), ("secondary", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpGroupResponsibility.setStatus('mandatory')
if mibBuilder.loadTexts: rlpGroupResponsibility.setDescription('The responsibility of this RLP within its group, always primary if not FRX7000.')
port = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 3))
portX25Group = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 3, 1))
portPhyX25AdminTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 1), )
if mibBuilder.loadTexts: portPhyX25AdminTable.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25AdminTable.setDescription('This table contains Netlink Enterprise specific objects to manage an X25 port. Changing one of these parameters may take effect in the operating port immediately or may wait until the interface is restarted depending on the details of the implementation. Most of the objects in this read-write table have corresponding read-only objects in the portX25OperTable that return the current operating value. The operating values may be different from these configured values if a configured parameter was configured after the interface was started.')
portPhyX25AdminEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 1, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"))
if mibBuilder.loadTexts: portPhyX25AdminEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25AdminEntry.setDescription('A list of configured values for an X25 port.')
portPhyX25AdminConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3, 5, 6, 7, 8, 10, 11))).clone(namedValues=NamedValues(("rs232", 3), ("v35", 5), ("rs449", 6), ("rs530", 7), ("x21", 8), ("t1", 10), ("e1", 11)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portPhyX25AdminConnector.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25AdminConnector.setDescription('Physical port interface connector type.')
portPhyX25AdminSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 1, 1, 2), Integer32().clone(64000)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portPhyX25AdminSpeed.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25AdminSpeed.setDescription('This object defines the speed of the X25 port. The speed may only be set to one of a series of reasonable values, and if an attempt is made to set the speed to a value which is within the valid range but not equal to one of these values, the speed will be rounded up. If the connector type of the port is RS232, the port could be a standard port or a high speed port. If the port is a high speed RS232 port, the maximum valid speed is 256000. If the port is a standard RS232 port, the maximum valid speed is 64000 on the FRX4000/SS1840 and 19200 on all other products. It may be possible to set the speed of a standard RS232 port to a speed which is valid for a high speed RS232 port but invalid for a standard RS232 port. In this case, the port may not end up having the invalid speed. The default speed for a standard RS232 port on the FRX6000 is 19200.')
portPhyX25AdminGenerateClock = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portPhyX25AdminGenerateClock.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25AdminGenerateClock.setDescription('Specifies whether the port will generate the clock necessary to synchronize traffic over the link.')
portPhyX25AdminRcvClockFromDTE = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portPhyX25AdminRcvClockFromDTE.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25AdminRcvClockFromDTE.setDescription('This object defines whether the receive clock will be used from the DTE.')
portPhyX25AdminDialOut = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("dialIn", 2), ("dialOut", 3))).clone('none')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portPhyX25AdminDialOut.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25AdminDialOut.setDescription('This flag indicates whether the port is connected to a dial modem, and whethter connections will be initiated through dial-in or dial-out calls.')
portPhyX25AdminInactivityTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 30)).clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portPhyX25AdminInactivityTimer.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25AdminInactivityTimer.setDescription('This timer defines in minutes, the period of inactivity allowed between calls. Once the timer expires, the port is disabled untill the next call is placed, if a Dial out port, or the Disconnect Timer expires, if a Dial in port. This variable is only meaningful if the port is a Dial port.')
portPhyX25AdminDisconnectTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portPhyX25AdminDisconnectTimer.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25AdminDisconnectTimer.setDescription('This timer defines, in seconds, the length of time a dial-in port will remain disabled after expiration of the InActivity Timer.')
portPhyX25AdminSetupTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portPhyX25AdminSetupTimer.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25AdminSetupTimer.setDescription('This timer determines the length of time, in seconds, that a response must be received by the port, after entering the Linkup state. If a response is not received, the port enters a Failed state.')
portPhyX25AdminTrunkFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portPhyX25AdminTrunkFlag.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25AdminTrunkFlag.setDescription('This flag, when set, indicates the port is associated with a network trunk group. It will be disabled/enabled if the network trunk is not operational.')
portPhyX25AdminTrunkGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 1, 1, 10), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portPhyX25AdminTrunkGroup.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25AdminTrunkGroup.setDescription('This variable contains a string of 8 bytes, with each byte indicating 8 ports on an RLP that may be a part of a trunk group. Since a TurboFrad has only 1 RLP, only the first byte is valid if the node is an FRX4000. Each port is represented by a single bit within the RLP byte. The bit position represents the port number...for example, if port 2 on RLP 0 was in a trunk group, the first byte of the string would contain x04 and all other bytes would be 0.')
portPhyX25AdminRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 1, 1, 11), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portPhyX25AdminRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25AdminRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the portPhyX25AdminTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device, and which is supplied by a management station wishing to exercise an on-line update of the existing conceptual row. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
portPhyX25OperTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 2), )
if mibBuilder.loadTexts: portPhyX25OperTable.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25OperTable.setDescription('This table contains Netlink Enterprise specific objects to manage an X25 port. Changing one of these parameters may take effect in the operating port immediately or may wait until the interface is restarted depending on the details of the implementation. All of the objects in this table are read-only. The operating values may be different from these configured values if a configured parameter was configured after the interface was started.')
portPhyX25OperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 2, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"))
if mibBuilder.loadTexts: portPhyX25OperEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25OperEntry.setDescription('A list of configured values for an X25 port.')
portPhyX25OperConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3, 5, 6, 7, 8, 10, 11))).clone(namedValues=NamedValues(("rs232", 3), ("v35", 5), ("rs449", 6), ("rs530", 7), ("x21", 8), ("t1", 10), ("e1", 11)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portPhyX25OperConnector.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25OperConnector.setDescription('Physical port interface connector type.')
portPhyX25OperSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portPhyX25OperSpeed.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25OperSpeed.setDescription('This object defines the speed of the X25 port. The speed may only be set to one of a series of reasonable values, and if an attempt is made to set the speed to a value which is within the valid range but not equal to one of these values, the speed will be rounded up. If the connector type of the port is RS232, the port could be a standard port or a high speed port. If the port is a high speed RS232 port, the maximum valid speed is 256000. If the port is a standard RS232 port, the maximum valid speed is 64000 on the FRX4000/SS1840 and 19200 on all other products. It may be possible to set the speed of a standard RS232 port to a speed which is valid for a high speed RS232 port but invalid for a standard RS232 port. In this case, the port may not end up having the invalid speed. The default speed for a standard RS232 port on the FRX6000 is 19200.')
portPhyX25OperGenerateClock = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portPhyX25OperGenerateClock.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25OperGenerateClock.setDescription('Specifies whether the port will generate the clock necessary to synchronize traffic over the link.')
portPhyX25OperRcvClockFromDTE = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portPhyX25OperRcvClockFromDTE.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25OperRcvClockFromDTE.setDescription('This object defines whether the receive clock will be used from the DTE.')
portPhyX25OperDialOut = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("dialIn", 2), ("dialOut", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portPhyX25OperDialOut.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25OperDialOut.setDescription('This flag indicates whether the port is connected to a dial modem, and whethter connections will be initiated through dial-in or dial-out calls.')
portPhyX25OperInactivityTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portPhyX25OperInactivityTimer.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25OperInactivityTimer.setDescription('This timer defines in minutes, the period of inactivity allowed between calls. Once the timer expires, the port is disabled untill the next call is placed, if a Dial out port, or the Disconnect Timer expires, if a Dial in port. This variable is only meaningful if the port is a Dial port.')
portPhyX25OperDisconnectTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portPhyX25OperDisconnectTimer.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25OperDisconnectTimer.setDescription('This timer defines, in seconds, the length of time a dial-in port will remain disabled after expiration of the InActivity Timer.')
portPhyX25OperSetupTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portPhyX25OperSetupTimer.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25OperSetupTimer.setDescription('This timer determines the length of time, in seconds, that a response must be received by the port, after entering the Linkup state. If a response is not received, the port enters a Failed state.')
portPhyX25OperTrunkFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 2, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portPhyX25OperTrunkFlag.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25OperTrunkFlag.setDescription('This flag, when set, indicates the port is associated with a network trunk group. It will be disabled/enabled if the network trunk is not operational.')
portPhyX25OperTrunkGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 2, 1, 10), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portPhyX25OperTrunkGroup.setStatus('mandatory')
if mibBuilder.loadTexts: portPhyX25OperTrunkGroup.setDescription('This variable contains a string of 8 bytes, with each byte indicating a port on this RLP that may be a part of a trunk group.')
portLogicalX25AdminTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 3), )
if mibBuilder.loadTexts: portLogicalX25AdminTable.setStatus('mandatory')
if mibBuilder.loadTexts: portLogicalX25AdminTable.setDescription('This table contains Netlink Enterprise specific objects to manage an X25 Logical port. Changing one of these parameters may take effect in the operating port immediately or may wait until the interface is restarted depending on the details of the implementation. The operating values may be different from these configured values if a configured parameter was configured after the interface was started.')
portLogicalX25AdminEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 3, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPhyPort"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"))
if mibBuilder.loadTexts: portLogicalX25AdminEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portLogicalX25AdminEntry.setDescription('A list of configured values for an X25 logical port.')
portLogicalX25AdminFrDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 991)).clone(16)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portLogicalX25AdminFrDlci.setStatus('mandatory')
if mibBuilder.loadTexts: portLogicalX25AdminFrDlci.setDescription(' The DLCI number used to identify the entry in the table. The range is 16-991. ')
portLogicalX25AdminCxnPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 9))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portLogicalX25AdminCxnPriority.setStatus('mandatory')
if mibBuilder.loadTexts: portLogicalX25AdminCxnPriority.setDescription(' This field sets the priority of the connection among others on the physical port. The range is 0 for lowest priority to 9 for the highest priority. ')
portLogicalX25AdminRfc1490 = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("annexG", 1), ("rfc1490", 2))).clone('rfc1490')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portLogicalX25AdminRfc1490.setStatus('mandatory')
if mibBuilder.loadTexts: portLogicalX25AdminRfc1490.setDescription(' This field indicates the encapsulation method used')
portLogicalX25AdminBAG = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 3, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portLogicalX25AdminBAG.setStatus('mandatory')
if mibBuilder.loadTexts: portLogicalX25AdminBAG.setDescription('Assigns this DLCI to one of sixteen groups whose parameters regulate bandwidth usage. A 0 value indicates the DLCI does not use BAGs')
portLogicalX25AdminRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 3, 1, 5), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portLogicalX25AdminRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: portLogicalX25AdminRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the portLogicalX25AdminTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device, and which is supplied by a management station wishing to exercise an on-line update of the existing conceptual row. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
portLogicalX25OperTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 4), )
if mibBuilder.loadTexts: portLogicalX25OperTable.setStatus('mandatory')
if mibBuilder.loadTexts: portLogicalX25OperTable.setDescription('This table contains Netlink Enterprise specific objects to manage an X25 Logical port. Changing one of these parameters may take effect in the operating port immediately or may wait until the interface is restarted depending on the details of the implementation. The operating values may be different from these configured values if a configured parameter was configured after the interface was started.')
portLogicalX25OperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 4, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPhyPort"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"))
if mibBuilder.loadTexts: portLogicalX25OperEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portLogicalX25OperEntry.setDescription('A list of configured values for an X25 port.')
portLogicalX25OperFrDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portLogicalX25OperFrDlci.setStatus('mandatory')
if mibBuilder.loadTexts: portLogicalX25OperFrDlci.setDescription(' The DLCI number used to identify the entry in the table. The range is 16-991. ')
portLogicalX25OperCxnPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 4, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 9))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portLogicalX25OperCxnPriority.setStatus('mandatory')
if mibBuilder.loadTexts: portLogicalX25OperCxnPriority.setDescription(' This field sets the priority of the connection among others on the physical port. The range is 0 for lowest priority to 9 for the highest priority. ')
portLogicalX25OperRfc1490 = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 4, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("annexG", 1), ("rfc1490", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portLogicalX25OperRfc1490.setStatus('mandatory')
if mibBuilder.loadTexts: portLogicalX25OperRfc1490.setDescription(' This field indicates the encapsulation method used')
portLogicalX25OperBAG = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 4, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portLogicalX25OperBAG.setStatus('mandatory')
if mibBuilder.loadTexts: portLogicalX25OperBAG.setDescription('Assigns this DLCI to one of sixteen groups whose parameters regulate bandwidth usage.')
portX25AdminTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5), )
if mibBuilder.loadTexts: portX25AdminTable.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminTable.setDescription('This table contains Netlink Enterprise specific objects to manage an X25 port. Changing one of these parameters may take effect in the operating port immediately or may wait until the interface is restarted depending on the details of the implementation. Most of the objects in this read-write table have corresponding read-only objects in the portX25OperTable that return the current operating value. The operating values may be different from these configured values if a configured parameter was configured after the interface was started.')
portX25AdminEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"))
if mibBuilder.loadTexts: portX25AdminEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminEntry.setDescription('A list of configured values for an X25 port.')
portX25AdminBlockedFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portX25AdminBlockedFlag.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminBlockedFlag.setDescription(' A flag which when set, means the port should not be enabled after a boot-up of the node. ')
portX25AdminFlowCtrlNeg = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portX25AdminFlowCtrlNeg.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminFlowCtrlNeg.setDescription('A flag which if set, permits negotiation of the flow control parameters on a per call basis. If N is selected, the default packet and window sizes will be used. If Y is selected, the packet or window size in a call packet (up to the configured Max Packet Size or Max Window Size) is used.')
portX25AdminThruptClassNeg = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portX25AdminThruptClassNeg.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminThruptClassNeg.setDescription('A flag which ,if set, permists negotiation of the throughput class for either direction of data transmission on a per call basis. If N is selected, the configured Max Throughput Class value is used. If Y, any throughput class in a call packet (up to the Max Thruput Class) is used.')
portX25AdminLocChgPrev = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portX25AdminLocChgPrev.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminLocChgPrev.setDescription('If Y is selected, no calls can be charged to port. Incoming calls from the network with reverse charge specified will be rejected. Outgoing calls will insert reverse charge in the call packet if not already included.')
portX25AdminRevChgAccpt = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portX25AdminRevChgAccpt.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminRevChgAccpt.setDescription('A flag, if set, that authorizes transmission of incoming calls that request the reverse charge facility. If N is selected, and a call requests it, it will not be transmitted.')
portX25AdminFastSelAccpt = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portX25AdminFastSelAccpt.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminFastSelAccpt.setDescription('A flag, if set, that authorizes transmission of incoming calls that request the Fast Select facility.')
portX25AdminInCallBar = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portX25AdminInCallBar.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminInCallBar.setDescription('An X25 facility that prevents transmission of incoming calls to the local DTE.')
portX25AdminOutCallBar = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portX25AdminOutCallBar.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminOutCallBar.setDescription('An X25 facility that prevents transmission of incoming calls to the local DTE.')
portX25AdminMaxPktSize = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(128, 4096)).clone(1024)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portX25AdminMaxPktSize.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminMaxPktSize.setDescription(' The maximum data packet size that will be allowed to pass through this port.')
portX25AdminDefPktSize = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 4096)).clone(128)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portX25AdminDefPktSize.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminDefPktSize.setDescription('This is the size that will be assigned to an incoming call setup packet if the packet does not request a packet size.')
portX25AdminMaxWinSize = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 7)).clone(7)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portX25AdminMaxWinSize.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminMaxWinSize.setDescription('This is the maximum number of unacknowledged packets per logical channel that can pass through this port.')
portX25AdminDefWinSize = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 7)).clone(2)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portX25AdminDefWinSize.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminDefWinSize.setDescription("This size will be assigned to an incoming call setup packet if the packet doesn't request a window size.")
portX25AdminMaxThruptClass = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(3, 13))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portX25AdminMaxThruptClass.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminMaxThruptClass.setDescription('This specifies the default throughput class that will be inserted into a Call Request packet if Thruput Class negotiation is not enabled or if a thruput class is not requested in the call request.')
portX25AdminCUGPref = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portX25AdminCUGPref.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminCUGPref.setDescription('This flag, if set, indicates the port belongs to at least one CUG.')
portX25AdminCUGIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5, 1, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 100)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portX25AdminCUGIndex.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminCUGIndex.setDescription('This number is an index into a Closed User Group table which identifies the default Closed User Group for the port.')
portX25AdminCUGIncAccess = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portX25AdminCUGIncAccess.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminCUGIncAccess.setDescription(' This flag, when set, indicates whether this port will be allowed to receive calls from outside its CUGs.')
portX25AdminCUGOutAccess = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 5, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portX25AdminCUGOutAccess.setStatus('mandatory')
if mibBuilder.loadTexts: portX25AdminCUGOutAccess.setDescription(' This flag, when set, indicates whether this port will be allowed to make calls outside its CUGs.')
portX25OperTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6), )
if mibBuilder.loadTexts: portX25OperTable.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperTable.setDescription('This table contains Netlink Enterprise specific objects to manage an X25 port. Changing one of these parameters may take effect in the operating port immediately or may wait until the interface is restarted depending on the details of the implementation. The objects in this read-only table corresponding read-only objects in the portX25OperTable that return the current operating value. The operating values may be different from these configured values if a configured parameter was configured after the interface was started.')
portX25OperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"))
if mibBuilder.loadTexts: portX25OperEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperEntry.setDescription('A list of configured values for an X25 port.')
portX25OperBlockedFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portX25OperBlockedFlag.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperBlockedFlag.setDescription(' A flag which when set, means the port should not be enabled after a boot-up of the node. ')
portX25OperFlowCtrlNeg = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portX25OperFlowCtrlNeg.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperFlowCtrlNeg.setDescription('A flag which if set, permits negotiation of the flow control parameters on a per call basis. If N is selected, the default packet and window sizes will be used. If Y is selected, the packet or window size in a call packet (up to the configured Max Packet Size or Max Window Size) is used.')
portX25OperThruptClassNeg = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portX25OperThruptClassNeg.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperThruptClassNeg.setDescription('A flag which ,if set, permists negotiation of the throughput class for either direction of data transmission on a per call basis. If N is selected, the configured Max Throughput Class value is used. If Y, any throughput class in a call packet (up to the Max Thruput Class) is used.')
portX25OperLocChgPrev = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portX25OperLocChgPrev.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperLocChgPrev.setDescription('If Y is selected, no calls can be charged to port. Incoming calls from the network with reverse charge specified will be rejected. Outgoing calls will insert reverse charge in the call packet if not already included.')
portX25OperRevChgAccpt = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portX25OperRevChgAccpt.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperRevChgAccpt.setDescription('A flag, if set, that authorizes transmission of incoming calls that request the reverse charge facility. If N is selected, and a call requests it, it will not be transmitted.')
portX25OperFastSelAccpt = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portX25OperFastSelAccpt.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperFastSelAccpt.setDescription('A flag, if set, that authorizes transmission of incoming calls that request the Fast Select facility.')
portX25OperInCallBar = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portX25OperInCallBar.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperInCallBar.setDescription('An X25 facility that prevents transmission of incoming calls to the local DTE.')
portX25OperOutCallBar = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portX25OperOutCallBar.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperOutCallBar.setDescription('An X25 facility that prevents transmission of incoming calls to the local DTE.')
portX25OperMaxPktSize = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(128, 4096))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portX25OperMaxPktSize.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperMaxPktSize.setDescription(' The maximum data packet size that will be allowed to pass through this port.')
portX25OperDefPktSize = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 4096))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portX25OperDefPktSize.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperDefPktSize.setDescription('This is the size that will be assigned to an incoming call setup packet if the packet does not request a packet size.')
portX25OperMaxWinSize = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portX25OperMaxWinSize.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperMaxWinSize.setDescription('This is the maximum number of unacknowledged packets per logical channel that can pass through this port.')
portX25OperDefWinSize = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portX25OperDefWinSize.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperDefWinSize.setDescription("This size will be assigned to an incoming call setup packet if the packet doesn't request a window size.")
portX25OperMaxThruptClass = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(3, 13))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portX25OperMaxThruptClass.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperMaxThruptClass.setDescription('This specifies the default throughput class that will be inserted into a Call Request packet if Thruput Class negotiation is not enabled or if a thruput class is not requested in the call request.')
portX25OperCUGPref = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portX25OperCUGPref.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperCUGPref.setDescription('This flag, if set, indicates the port belongs to at least one CUG.')
portX25OperCUGIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6, 1, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portX25OperCUGIndex.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperCUGIndex.setDescription('This number is an index into a Closed User Group table which identifies the default Closed User Group for the port.')
portX25OperCUGIncAccess = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portX25OperCUGIncAccess.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperCUGIncAccess.setDescription(' This flag, when set, indicates whether this port will be able to receive calls from outside its CUGs.')
portX25OperCUGOutAccess = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 1, 6, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portX25OperCUGOutAccess.setStatus('mandatory')
if mibBuilder.loadTexts: portX25OperCUGOutAccess.setDescription(' This flag, when set, indicates whether this port will be able to make calls outside its CUGs.')
portFrGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 3, 2))
portFrConfigTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 1), )
if mibBuilder.loadTexts: portFrConfigTable.setStatus('mandatory')
if mibBuilder.loadTexts: portFrConfigTable.setDescription('A list of Frame Relay ports . The number of entries will be the number of Frame Relay ports on the node.')
portFrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 1, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "portFrRlpIndex"), (0, "NETLINK-SPECIFIC-MIB", "portFrPortIndex"))
if mibBuilder.loadTexts: portFrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portFrEntry.setDescription('An Frame Relay Port entry containing objects relating to the port that are configurable by the user.')
portFrRlpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portFrRlpIndex.setStatus('mandatory')
if mibBuilder.loadTexts: portFrRlpIndex.setDescription(' The RLP number of the Frame Relay port. It will be in the range 1-8.')
portFrPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portFrPortIndex.setStatus('mandatory')
if mibBuilder.loadTexts: portFrPortIndex.setDescription(' The Port number of the Frame Relay port. It will be in the range 1-8 for a physical port. It will be in the range 9-64 for a Frame Relay logical port.')
portFrBlockedFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portFrBlockedFlag.setStatus('mandatory')
if mibBuilder.loadTexts: portFrBlockedFlag.setDescription(' A flag which when set, means the port should not be enabled after a boot-up of the node. ')
portFrMaxBytesPerFrame = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 4096)).clone(1600)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portFrMaxBytesPerFrame.setStatus('mandatory')
if mibBuilder.loadTexts: portFrMaxBytesPerFrame.setDescription(' The maximum number of bytes allowed in an I Frame for this Frame Relay port.')
portFrT392Timer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 30)).clone(15)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portFrT392Timer.setStatus('mandatory')
if mibBuilder.loadTexts: portFrT392Timer.setDescription('This timer indicates how long the network will wait between Status Enquiry messages before recording an error. It should be greater than or equal to the frDlcmiPollingInterval variable in the RFC1315 Mib.')
portFrOutgoingRateControl = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portFrOutgoingRateControl.setStatus('mandatory')
if mibBuilder.loadTexts: portFrOutgoingRateControl.setDescription(' This is a flag which, when set, enables the enforcement of the Outgoing Rate Control parameters.')
portFrBandwidthAllocation = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portFrBandwidthAllocation.setStatus('mandatory')
if mibBuilder.loadTexts: portFrBandwidthAllocation.setDescription(' This is a flag which, when set, enables whether the bandwidth allocation will be enforced. ')
portFrConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3, 5, 6, 7, 8, 10, 11))).clone(namedValues=NamedValues(("rs232", 3), ("v35", 5), ("rs449", 6), ("rs530", 7), ("x21", 8), ("t1", 10), ("e1", 11)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portFrConnector.setStatus('mandatory')
if mibBuilder.loadTexts: portFrConnector.setDescription('This defines the connector type of the Frame Relay port.')
portFrLogicalDCE = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portFrLogicalDCE.setStatus('mandatory')
if mibBuilder.loadTexts: portFrLogicalDCE.setDescription('This defines the port as logical DCE or DTE.')
portFrGenClock = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portFrGenClock.setStatus('mandatory')
if mibBuilder.loadTexts: portFrGenClock.setDescription('This specifies whether the port will generate the clock necessary to synchronize traffic over the link.')
portFrRcvClkFrmDTE = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portFrRcvClkFrmDTE.setStatus('mandatory')
if mibBuilder.loadTexts: portFrRcvClkFrmDTE.setDescription('This allows the clock to be looped back from the DTE using the TT (Terminal Timing) signal, which can be helpful on high-speed lines.')
portFrLLM = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("lmi", 2), ("annexd", 3))).clone('annexd')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portFrLLM.setStatus('mandatory')
if mibBuilder.loadTexts: portFrLLM.setDescription('This determines whether, and what type of, configurable network management (status enquiries) will be allowed.')
portFrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 1, 1, 13), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portFrRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: portFrRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the portFrConfigTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device, and which is supplied by a management station wishing to exercise an on-line update of the existing conceptual row. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
portFrSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 1, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(75, 2048000)).clone(64000)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portFrSpeed.setStatus('mandatory')
if mibBuilder.loadTexts: portFrSpeed.setDescription('This object defines the speed of the Frame Relay port. The speed may only be set to one of a series of reasonable values, and if an attempt is made to set the speed to a value which is within the valid range but not equal to one of these values, the speed will be rounded up. If the connector type of the port is RS232, the port could be a standard port or a high speed port. If the port is a high speed RS232 port, the maximum valid speed is 256000. If the port is a standard RS232 port, the maximum valid speed is 64000 on the FRX4000/SS1840 and 19200 on all other products. It may be possible to set the speed of a standard RS232 port to a speed which is valid for a high speed RS232 port but invalid for a standard RS232 port. In this case, the port may not end up having the invalid speed. The default speed for a standard RS232 port on the FRX6000 is 19200.')
portFrBackupUseOnly = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portFrBackupUseOnly.setStatus('mandatory')
if mibBuilder.loadTexts: portFrBackupUseOnly.setDescription('This object determines whether all DLCIs on the port will be reserved exclusively as backups for other DLCIs in the same node. If yes is specified for this object, the port will remain disabled until needed for backup.')
portDLCIConfigTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 2), )
if mibBuilder.loadTexts: portDLCIConfigTable.setStatus('mandatory')
if mibBuilder.loadTexts: portDLCIConfigTable.setDescription("A list of DLCI's on Frame Relay ports . The number of entries will be the number of DLCIs on all the Frame Relay ports on a node.")
portDLCIEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 2, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "portDLCIRlpIndex"), (0, "NETLINK-SPECIFIC-MIB", "portDLCIPortIndex"), (0, "NETLINK-SPECIFIC-MIB", "portDLCIIndex"))
if mibBuilder.loadTexts: portDLCIEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portDLCIEntry.setDescription('An Frame Relay Port DLCI entry relating to the Rate Control Information that is configurable by the user.')
portDLCIRlpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portDLCIRlpIndex.setStatus('mandatory')
if mibBuilder.loadTexts: portDLCIRlpIndex.setDescription(' The RLP number on which the DLCI is located. The range for this is 1-8. ')
portDLCIPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portDLCIPortIndex.setStatus('mandatory')
if mibBuilder.loadTexts: portDLCIPortIndex.setDescription(' The port number on which the DLCI is located. The range for this is 1-8 for a physical FR port and 9-64 for a logical FR port. ')
portDLCIIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portDLCIIndex.setStatus('mandatory')
if mibBuilder.loadTexts: portDLCIIndex.setDescription(' The DLCI number used to identify the entry in the table. The range is 16-991. ')
portDLCIIncomingCIR = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 2, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDLCIIncomingCIR.setStatus('mandatory')
if mibBuilder.loadTexts: portDLCIIncomingCIR.setDescription(' The committed information rate that is supported on the DLCI for incoming data. ')
portDLCIOutgoingCIR = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDLCIOutgoingCIR.setStatus('mandatory')
if mibBuilder.loadTexts: portDLCIOutgoingCIR.setDescription(' The committed information rate that is supported on the DLCI for outgoing data. ')
portDLCIIncomingBc = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 2, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDLCIIncomingBc.setStatus('mandatory')
if mibBuilder.loadTexts: portDLCIIncomingBc.setDescription(' The committed burst size is the maximum amount of data to be transmitted under normal conditions within the time period defined by Bc/Cir that is supported on the DLCI for incoming data. ')
portDLCIOutgoingBc = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 2, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDLCIOutgoingBc.setStatus('mandatory')
if mibBuilder.loadTexts: portDLCIOutgoingBc.setDescription(' The committed burst size is the maximum amount of data to be transmitted under normal conditions within the time period defined by Bc/Cir that is supported on the DLCI for Outgoing data. ')
portDLCIIncomingBe = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 32767))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDLCIIncomingBe.setStatus('mandatory')
if mibBuilder.loadTexts: portDLCIIncomingBe.setDescription(' The excess burst size is the maximum amount of incoming data in excess of the committed burst size that the network will try to transfer during the time interval determined by Bc/Cir on this DLCI. ')
portDLCIOutgoingBe = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 2, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 32767))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDLCIOutgoingBe.setStatus('mandatory')
if mibBuilder.loadTexts: portDLCIOutgoingBe.setDescription(' The excess burst size is the maximum amount of incoming data in excess of the committed burst size that the network will try to transfer during the time interval determined by Bc/Cir on this DLCI. ')
portDLCIBecnRecoveryCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 2, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDLCIBecnRecoveryCnt.setStatus('mandatory')
if mibBuilder.loadTexts: portDLCIBecnRecoveryCnt.setDescription(" The BECN recovery Count is a method of controlling the rate of return to max traffic flow after it has been reduced due to congestion. The value determines the number of packets received sequentially without BECN set, before increasing the Excess Burst Size by 1/8 of it's configured value. ")
portDLCIPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 2, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDLCIPriority.setStatus('mandatory')
if mibBuilder.loadTexts: portDLCIPriority.setDescription(' This field sets the priority of the DLCI among others on the physical port. The range is 0 for lowest priority to 4 for the highest priority. ')
portDLCIRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 2, 1, 12), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDLCIRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: portDLCIRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the portDLCIConfigTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned. Also note that deleting a DLCI entry will only remove it from the database file, and it's existence will still be known by the protocol until the node is rebooted.")
portDLCIBackupGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 2, 1, 13), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDLCIBackupGroup.setStatus('mandatory')
if mibBuilder.loadTexts: portDLCIBackupGroup.setDescription('This object specifies (if configured as any number other than zero) that this is a primary DLCI, and identifies the backup DLCI(s) (in a Frame Relay backup group) that will take over if this DLCI fails. This is applicable only on an initiating node, which is the node that will initiate the switchover to a backup DLCI. At switchover, the initiating node notifies the remote node of the change.')
portDLCIBackupProtEnb = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 2, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDLCIBackupProtEnb.setStatus('mandatory')
if mibBuilder.loadTexts: portDLCIBackupProtEnb.setDescription('This object specifies whether the DLCI will be used as a non-initiating backup DLCI. The DLCI will wait for a backup protocol message from the initiating end, telling the backup where to send the rest of the messages.')
portFrBackupGroupTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 3), )
if mibBuilder.loadTexts: portFrBackupGroupTable.setStatus('mandatory')
if mibBuilder.loadTexts: portFrBackupGroupTable.setDescription(' A Table describes the Frame Relay Backup MIB . ')
portFrBackupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 3, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "portFrBackupRLP"), (0, "NETLINK-SPECIFIC-MIB", "portFrBackupPort"), (0, "NETLINK-SPECIFIC-MIB", "portFrBackupDLCI"), (0, "NETLINK-SPECIFIC-MIB", "portFrBackupGroup"))
if mibBuilder.loadTexts: portFrBackupEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portFrBackupEntry.setDescription(' This Entry contains the SNMP objects that are used for configurating the Frame Relay Backup. ')
portFrBackupRLP = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portFrBackupRLP.setStatus('mandatory')
if mibBuilder.loadTexts: portFrBackupRLP.setDescription(' This RLP number identifies the RLP containing the primary and backup DLCIs. ')
portFrBackupPort = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portFrBackupPort.setStatus('mandatory')
if mibBuilder.loadTexts: portFrBackupPort.setDescription(' This Port number is the physical port on which the backup DLCI is being configured. ')
portFrBackupDLCI = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 3, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 991))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portFrBackupDLCI.setStatus('mandatory')
if mibBuilder.loadTexts: portFrBackupDLCI.setDescription(' This DLCI number is the backup being configured. ')
portFrBackupGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 3, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portFrBackupGroup.setStatus('mandatory')
if mibBuilder.loadTexts: portFrBackupGroup.setDescription(' This Group number is the backup being configured. ')
portFrBackupWaitTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 3, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(120)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portFrBackupWaitTimer.setStatus('mandatory')
if mibBuilder.loadTexts: portFrBackupWaitTimer.setDescription(' This is the time after a primary DLCI failure that the software will wait for this DLCI to become active before checking the next backup DLCI. If the backup does not become active before the timer expires, the software will search the backup group for the next available backup.')
portFrBackupProtEnab = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 3, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portFrBackupProtEnab.setStatus('mandatory')
if mibBuilder.loadTexts: portFrBackupProtEnab.setDescription(' This enables or disables the backup protocol on the specified DLCI. It should be set to yes if and only if the remote device is an FRX4000 or FRX6000 with the backup protocol enabled on the remote DLCI connected to this backup group entry. ')
portFrBackupRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 2, 3, 1, 7), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portFrBackupRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: portFrBackupRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the portFrBackupGroupTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
portBsciGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 3, 4))
portBsciAdminTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1), )
if mibBuilder.loadTexts: portBsciAdminTable.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminTable.setDescription('A list of BSC Interactive ports . The number of entries will be the number of BSC Interactive ports on the node.')
portBsciAdminEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"))
if mibBuilder.loadTexts: portBsciAdminEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminEntry.setDescription('A BSCI Interactive Port entry containing objects relating to the port that are configurable by the user.')
portBsciAdminBlockedFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminBlockedFlag.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminBlockedFlag.setDescription('Causes the port to be enabled or disabled at node IPL.')
portBsciAdminConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3, 5, 6, 7, 8, 10))).clone(namedValues=NamedValues(("rs232", 3), ("v35", 5), ("rs449", 6), ("rs530", 7), ("x21", 8), ("t1", 10)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminConnector.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminConnector.setDescription('Physical port interface connector type.')
portBsciAdminSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(75, 19200)).clone(9600)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminSpeed.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminSpeed.setDescription('Data transmission rate in bits per second.')
portBsciAdminRetransmitInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 9999)).clone(2000)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminRetransmitInterval.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminRetransmitInterval.setDescription('Length of time before the node will transmit an I-frame if the previous transmission is not acknowledged.')
portBsciAdminMAXRetransmits = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 99)).clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminMAXRetransmits.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminMAXRetransmits.setDescription('Maximum number of times the node will attempt to send an I-frame after a retransmission period expiration.')
portBsciAdminMaxBytesPerFrame = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(25, 4105)).clone(4105)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminMaxBytesPerFrame.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminMaxBytesPerFrame.setDescription('Maximum frame size that will be transmitted on the port.')
portBsciAdminGenerateClock = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminGenerateClock.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminGenerateClock.setDescription('Specifies whether the port will generate the clock necessary to synchronize traffic over the link.')
portBsciAdminRcvClockFromDTE = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminRcvClockFromDTE.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminRcvClockFromDTE.setDescription('Allows the clock to be looped back from the DTE using the TT (Terminal Timing) signal.')
portBsciAdminPadType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("tpad", 1), ("hpad", 2))).clone('tpad')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminPadType.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminPadType.setDescription('BSCI Pad Type')
portBsciAdminUseEBCDIC = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminUseEBCDIC.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminUseEBCDIC.setDescription('Specifies whether all devices on a line use the same character set for successive session polls.')
portBsciAdminCallInfoInRequestPacket = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminCallInfoInRequestPacket.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminCallInfoInRequestPacket.setDescription('Specifies whether the user will have the option of including call information in a call request packet.')
portBsciAdminClearVCOnLastDeviceDown = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminClearVCOnLastDeviceDown.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminClearVCOnLastDeviceDown.setDescription('Causes the virtual circuit to be cleared when no terminals are using it.')
portBsciAdminTransTextSupported = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminTransTextSupported.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminTransTextSupported.setDescription('Causes all characters transmitted to be treated as data.')
portBsciAdminEndToEndAck = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminEndToEndAck.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminEndToEndAck.setDescription('Allows management of acknowledgments end to end across the network rather than locally at each end.')
portBsciAdminFullDuplex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminFullDuplex.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminFullDuplex.setDescription('Specifies full-duplex transmission.')
portBsciAdminMultidrop = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminMultidrop.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminMultidrop.setDescription('Specifies whether transmission will be multidrop.')
portBsciAdminSlowPollRetryCount = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 150)).clone(20)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminSlowPollRetryCount.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminSlowPollRetryCount.setDescription('Specifies how many times the control unit will be polled before it is put on the slow poll list.')
portBsciAdminSlowPollRetryFreq = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 18), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 200)).clone(20)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminSlowPollRetryFreq.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminSlowPollRetryFreq.setDescription('Specifies the number of times active control units will be polled between pollings on the slow poll list.')
portBsciAdminStartSynchChars = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 10)).clone(2)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminStartSynchChars.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminStartSynchChars.setDescription('Specifies the number of synchronization characters that will be added to the beginning of each frame.')
portBsciAdminEndPadChars = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminEndPadChars.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminEndPadChars.setDescription('Specifies the number of padding characters that will be added to the end of each frame.')
portBsciAdminPollInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(100, 1000)).clone(500)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminPollInterval.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminPollInterval.setDescription('Specifies the time between passes through the polling list.')
portBsciAdminNoResponseTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 10)).clone(2)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminNoResponseTimer.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminNoResponseTimer.setDescription('Activated after transmission of a general poll or a data frame.')
portBsciAdminNoResponseRetryCount = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 23), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminNoResponseRetryCount.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminNoResponseRetryCount.setDescription('Specifies how many times the user device will be polled before control is passed on the next cluster.')
portBsciAdminErrorRetransmitCount = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 24), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminErrorRetransmitCount.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminErrorRetransmitCount.setDescription('Specifies the number of times the Netlink device will resend a block of data after the receiving device has detected an error in that block.')
portBsciAdminNAKRetryCount = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 25), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminNAKRetryCount.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminNAKRetryCount.setDescription('Specifies the number of times the Netlink device will send a frame when the receiving device is unable to acknowledge.')
portBsciAdminBlockCheck = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 26), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("crc16", 1), ("even-lrc", 2), ("odd-lrc", 3))).clone('even-lrc')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminBlockCheck.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminBlockCheck.setDescription('Sets the redundancy check parameter.')
portBsciAdminDataMode = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 27), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("odd-7bit", 1), ("even-7bit", 2), ("none-8bit", 3))).clone('none-8bit')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminDataMode.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminDataMode.setDescription('Sets the parity parameter.')
portBsciAdminRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 28), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the portBsciAdminTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device, and which is supplied by a management station wishing to exercise an on-line update of the existing conceptual row. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
portBsciAdminAnswerNonConfigured = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 29), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminAnswerNonConfigured.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminAnswerNonConfigured.setDescription('Only valid if pad type is HPAD. If yes, the HPAD will respond to all devices on the line. If no, the HPAD will respond only to those devices that are configured on the node.')
portBsciAdminActivateConnectionWithoutPoll = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 1, 1, 30), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBsciAdminActivateConnectionWithoutPoll.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciAdminActivateConnectionWithoutPoll.setDescription('Only valid if pad type is TPAD. If yes, will active the TPAD connection without a poll of the connected device.')
portBsciOperTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2), )
if mibBuilder.loadTexts: portBsciOperTable.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperTable.setDescription('A list of BSC Interactive ports . The number of entries will be the number of BSC Interactive ports on the node.')
portBsciOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"))
if mibBuilder.loadTexts: portBsciOperEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperEntry.setDescription('A BSCI Interactive Port entry containing objects relating to the port that are configurable by the user.')
portBsciOperBlockedFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperBlockedFlag.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperBlockedFlag.setDescription('Causes the port to be enabled or disabled at node IPL.')
portBsciOperConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3, 5, 6, 7, 8, 10))).clone(namedValues=NamedValues(("rs232", 3), ("v35", 5), ("rs449", 6), ("rs530", 7), ("x21", 8), ("t1", 10)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperConnector.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperConnector.setDescription('Physical port interface connector type.')
portBsciOperSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperSpeed.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperSpeed.setDescription('Data transmission rate in bits per second.')
portBsciOperRetransmitInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 9999))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperRetransmitInterval.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperRetransmitInterval.setDescription('Length of time before the node will transmit an I-frame if the previous transmission is not acknowledged.')
portBsciOperMAXRetransmits = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 99))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperMAXRetransmits.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperMAXRetransmits.setDescription('Maximum number of times the node will attempt to send an I-frame after a retransmission period expiration.')
portBsciOperMaxBytesPerFrame = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(25, 4105))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperMaxBytesPerFrame.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperMaxBytesPerFrame.setDescription('Maximum frame size that will be transmitted on the port.')
portBsciOperGenerateClock = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperGenerateClock.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperGenerateClock.setDescription('Specifies whether the port will generate the clock necessary to synchronize traffic over the link.')
portBsciOperRcvClockFromDTE = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperRcvClockFromDTE.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperRcvClockFromDTE.setDescription('Allows the clock to be looped back from the DTE using the TT (Terminal Timing) signal.')
portBsciOperPadType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("tpad", 1), ("hpad", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperPadType.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperPadType.setDescription('BSCI Pad Type')
portBsciOperUseEBCDIC = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperUseEBCDIC.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperUseEBCDIC.setDescription('Specifies whether all devices on a line use the same character set for successive session polls.')
portBsciOperCallInfoInRequestPacket = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperCallInfoInRequestPacket.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperCallInfoInRequestPacket.setDescription('Specifies whether the user will have the option of including call information in a call request packet.')
portBsciOperClearVCOnLastDeviceDown = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperClearVCOnLastDeviceDown.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperClearVCOnLastDeviceDown.setDescription('Causes the virtual circuit to be cleared when no terminals are using it.')
portBsciOperTransTextSupported = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperTransTextSupported.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperTransTextSupported.setDescription('Causes all characters transmitted to be treated as data.')
portBsciOperEndToEndAck = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperEndToEndAck.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperEndToEndAck.setDescription('Allows management of acknowledgments end to end across the network rather than locally at each end.')
portBsciOperFullDuplex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperFullDuplex.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperFullDuplex.setDescription('Specifies full-duplex transmission.')
portBsciOperMultidrop = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperMultidrop.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperMultidrop.setDescription('Specifies whether transmission will be multidrop.')
portBsciOperSlowPollRetryCount = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 150))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperSlowPollRetryCount.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperSlowPollRetryCount.setDescription('Specifies how many times the control unit will be polled before it is put on the slow poll list.')
portBsciOperSlowPollRetryFreq = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 18), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 200))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperSlowPollRetryFreq.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperSlowPollRetryFreq.setDescription('Specifies the number of times active control units will be polled between pollings on the slow poll list.')
portBsciOperStartSynchChars = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 10))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperStartSynchChars.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperStartSynchChars.setDescription('Specifies the number of synchronization characters that will be added to the beginning of each frame.')
portBsciOperEndPadChars = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperEndPadChars.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperEndPadChars.setDescription('Specifies the number of padding characters that will be added to the end of each frame.')
portBsciOperPollInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(100, 1000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperPollInterval.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperPollInterval.setDescription('Specifies the time between passes through the polling list.')
portBsciOperNoResponseTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 10))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperNoResponseTimer.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperNoResponseTimer.setDescription('Activated after transmission of a general poll or a data frame.')
portBsciOperNoResponseRetryCount = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 23), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperNoResponseRetryCount.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperNoResponseRetryCount.setDescription('Specifies how many times the user device will be polled before control is passed on the next cluster.')
portBsciOperErrorRetransmitCount = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 24), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperErrorRetransmitCount.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperErrorRetransmitCount.setDescription('Specifies the number of times the Netlink device will resend a block of data after the receiving device has detected an error in that block.')
portBsciOperNAKRetryCount = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 25), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperNAKRetryCount.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperNAKRetryCount.setDescription('Specifies the number of times the Netlink device will send a frame when the receiving device is unable to acknowledge.')
portBsciOperBlockCheck = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 26), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("crc16", 1), ("even-lrc", 2), ("odd-lrc", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperBlockCheck.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperBlockCheck.setDescription('Sets the redundancy check parameter.')
portBsciOperDataMode = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 27), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("odd-7bit", 1), ("even-7bit", 2), ("none-8bit", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperDataMode.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperDataMode.setDescription('Sets the parity parameter.')
portBsciOperAnswerNonConfigured = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 28), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperAnswerNonConfigured.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperAnswerNonConfigured.setDescription('Only valid if pad type is HPAD. If yes, the HPAD will respond to all devices on the line. If no, the HPAD will respond only to those devices that are configured on the node.')
portBsciOperActivateConnectionWithoutPoll = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 2, 1, 29), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portBsciOperActivateConnectionWithoutPoll.setStatus('mandatory')
if mibBuilder.loadTexts: portBsciOperActivateConnectionWithoutPoll.setDescription('Only valid if pad type is TPAD. If yes, will active the TPAD connection without a poll of the connected device.')
bsciSubscrAdminTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 3), )
if mibBuilder.loadTexts: bsciSubscrAdminTable.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrAdminTable.setDescription('A list of BSC Interactive port subscribers.')
bsciSubscrAdminEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 3, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"), (0, "NETLINK-SPECIFIC-MIB", "bsciSubscrAdminSequence"))
if mibBuilder.loadTexts: bsciSubscrAdminEntry.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrAdminEntry.setDescription('A BSCI Interactive Port Subscriber entry containing objects relating to the port that are configurable by the user.')
bsciSubscrAdminSequence = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsciSubscrAdminSequence.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrAdminSequence.setDescription('Subscriber index for a specific BSCI port subscriber.')
bsciSubscrAdminLocalID = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 3, 1, 2), NlSubscriberAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsciSubscrAdminLocalID.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrAdminLocalID.setDescription('Subscriber address of the local end of a BSCI connection.')
bsciSubscrAdminRemoteID = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 3, 1, 3), NlSubscriberAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsciSubscrAdminRemoteID.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrAdminRemoteID.setDescription('Subscriber address of the remote end of a BSCI connection.')
bsciSubscrAdminAutocall = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsciSubscrAdminAutocall.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrAdminAutocall.setDescription('Causes a BSCI TPAD to automatically call its HPAD when the controller becomes active.')
bsciSubscrAdminAutocallRtyTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 3, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(15, 255)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsciSubscrAdminAutocallRtyTimer.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrAdminAutocallRtyTimer.setDescription('Time between autocall retries.')
bsciSubscrAdminAutocallMaxRtry = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 3, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsciSubscrAdminAutocallMaxRtry.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrAdminAutocallMaxRtry.setDescription('Maximum number of times an autocall will be sent.')
bsciSubscrAdminConnectionID = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 3, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsciSubscrAdminConnectionID.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrAdminConnectionID.setDescription('Identifier that will link the BSCI port with a device configured in the BSCI Devices Table.')
bsciSubscrAdminRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 3, 1, 8), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsciSubscrAdminRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrAdminRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the bsciDevAdminTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
bsciSubscrOperTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 4), )
if mibBuilder.loadTexts: bsciSubscrOperTable.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrOperTable.setDescription('A list of BSC Interactive port subscribers.')
bsciSubscrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 4, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"), (0, "NETLINK-SPECIFIC-MIB", "bsciSubscrOperSequence"))
if mibBuilder.loadTexts: bsciSubscrOperEntry.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrOperEntry.setDescription('A BSCI Interactive Port Subscriber entry containing objects relating to the port that are configurable by the user.')
bsciSubscrOperSequence = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsciSubscrOperSequence.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrOperSequence.setDescription('Subscriber index for a specific BSCI port subscriber.')
bsciSubscrOperLocalID = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 4, 1, 2), NlSubscriberAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsciSubscrOperLocalID.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrOperLocalID.setDescription('Subscriber address of the local end of a BSCI connection.')
bsciSubscrOperRemoteID = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 4, 1, 3), NlSubscriberAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsciSubscrOperRemoteID.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrOperRemoteID.setDescription('Subscriber address of the remote end of a BSCI connection.')
bsciSubscrOperAutocall = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 4, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsciSubscrOperAutocall.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrOperAutocall.setDescription('Causes a BSCI TPAD to automatically call its HPAD when the controller becomes active.')
bsciSubscrOperAutocallRtyTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 4, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(15, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsciSubscrOperAutocallRtyTimer.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrOperAutocallRtyTimer.setDescription('Time between autocall retries.')
bsciSubscrOperAutocallMaxRtry = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 4, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsciSubscrOperAutocallMaxRtry.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrOperAutocallMaxRtry.setDescription('Maximum number of times an autocall will be sent.')
bsciSubscrOperConnectionID = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 4, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsciSubscrOperConnectionID.setStatus('mandatory')
if mibBuilder.loadTexts: bsciSubscrOperConnectionID.setDescription('Identifier that will link the BSCI port with a device configured in the BSCI Devices Table.')
bsciDevAdminTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 5), )
if mibBuilder.loadTexts: bsciDevAdminTable.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevAdminTable.setDescription('A list of BSC Interactive devices.')
bsciDevAdminEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 5, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"), (0, "NETLINK-SPECIFIC-MIB", "bsciDevAdminControlUnitID"), (0, "NETLINK-SPECIFIC-MIB", "bsciDevAdminDeviceUnitID"))
if mibBuilder.loadTexts: bsciDevAdminEntry.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevAdminEntry.setDescription('A BSCI Interactive Device entry containing objects relating to the device that are configurable by the user.')
bsciDevAdminControlUnitID = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsciDevAdminControlUnitID.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevAdminControlUnitID.setDescription('Control Unit identifier.')
bsciDevAdminDeviceUnitID = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 5, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsciDevAdminDeviceUnitID.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevAdminDeviceUnitID.setDescription('Device Unit identifier.')
bsciDevAdminConnectionID = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 5, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsciDevAdminConnectionID.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevAdminConnectionID.setDescription('The BSCI port connection ID for the device.')
bsciDevAdminSingleUserVC = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 5, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsciDevAdminSingleUserVC.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevAdminSingleUserVC.setDescription('Specifies whether each end of the connection is within a same Control Unit/Device Type pair.')
bsciDevAdminTransTextSupported = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 5, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsciDevAdminTransTextSupported.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevAdminTransTextSupported.setDescription('Causes all characters transmitted to be treated as data.')
bsciDevAdminPrinterAttached = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 5, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsciDevAdminPrinterAttached.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevAdminPrinterAttached.setDescription('Specifies whether the device has a printer attached.')
bsciDevAdminRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 5, 1, 7), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsciDevAdminRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevAdminRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the bsciDevAdminTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
bsciDevAdminDisableStatusRequest = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 5, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("normal", 1), ("none", 2), ("always-active", 3))).clone('normal')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsciDevAdminDisableStatusRequest.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevAdminDisableStatusRequest.setDescription('Determines how the BSCI port will report the status of the attached device. Normal status- the port will report what the device reports. No status- the port will report status only if the device is active. Always active status- the port will always report the device as active.')
bsciDevOperTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 6), )
if mibBuilder.loadTexts: bsciDevOperTable.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevOperTable.setDescription('A list of BSC Interactive devices.')
bsciDevOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 6, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"), (0, "NETLINK-SPECIFIC-MIB", "bsciDevOperControlUnitID"), (0, "NETLINK-SPECIFIC-MIB", "bsciDevOperDeviceUnitID"))
if mibBuilder.loadTexts: bsciDevOperEntry.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevOperEntry.setDescription('A BSCI Interactive Device entry containing objects relating to the device that are configurable by the user.')
bsciDevOperControlUnitID = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 6, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsciDevOperControlUnitID.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevOperControlUnitID.setDescription('Control Unit identifier.')
bsciDevOperDeviceUnitID = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 6, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsciDevOperDeviceUnitID.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevOperDeviceUnitID.setDescription('Device Unit identifier.')
bsciDevOperConnectionID = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 6, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsciDevOperConnectionID.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevOperConnectionID.setDescription('The BSCI port connection ID for the device.')
bsciDevOperSingleUserVC = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 6, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsciDevOperSingleUserVC.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevOperSingleUserVC.setDescription('Specifies whether each end of the connection is within a same Control Unit/Device Type pair.')
bsciDevOperTransTextSupported = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 6, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsciDevOperTransTextSupported.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevOperTransTextSupported.setDescription('Causes all characters transmitted to be treated as data.')
bsciDevOperPrinterAttached = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 6, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsciDevOperPrinterAttached.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevOperPrinterAttached.setDescription('Specifies whether the device has a printer attached.')
bsciDevOperDisableStatusRequest = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 4, 6, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("normal", 1), ("none", 2), ("always-active", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsciDevOperDisableStatusRequest.setStatus('mandatory')
if mibBuilder.loadTexts: bsciDevOperDisableStatusRequest.setDescription('Determines how the BSCI port will report the status of the attached device. Normal status- the port will report what the device reports. No status- the port will report status only if the device is active. Always active status- the port will always report the device as active.')
portSdlcGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 3, 5))
portSdlcAdminTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 1), )
if mibBuilder.loadTexts: portSdlcAdminTable.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcAdminTable.setDescription('This table contains Netlink Enterprise specific objects to manage SDLC port. Changing on of the these parameters may take effect in the operating port immediately or may wait until the interface is restarted depending on the details of the implementation. Most of the objects in this read-write table have corresponding read-only objects in the portSdlcOperTable that return the current operating value. The operating values may be different from these configured values if a configured parameter was configured afterthe interface was started.')
portSdlcAdminEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 1, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"))
if mibBuilder.loadTexts: portSdlcAdminEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcAdminEntry.setDescription('A list of configured values for an SDLC port.')
portSdlcAdminCommit = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 1, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portSdlcAdminCommit.setStatus('obsolete')
if mibBuilder.loadTexts: portSdlcAdminCommit.setDescription('Writing a value to this object commits the the SDLC port related modified configuration values to the database.')
portSdlcAdminMAXRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 99)).clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portSdlcAdminMAXRetries.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcAdminMAXRetries.setDescription('This object defines the max. number of retries to a non-responding Link station, before putting the Link station to slow-poll list if it is not Normal response Mode OR disconnecting the link station if it is data txfr state.')
portSdlcAdminMAXOut = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 7)).clone(7)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portSdlcAdminMAXOut.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcAdminMAXOut.setDescription('This object defines the number of outstanding frames for triggering window full condition.')
portSdlcAdminPadType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("xpad", 1), ("tpad", 2), ("hpad", 3), ("npad", 4))).clone('tpad')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portSdlcAdminPadType.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcAdminPadType.setDescription('This object defines the SNA PAD types.')
portSdlcAdminGenerateClock = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portSdlcAdminGenerateClock.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcAdminGenerateClock.setDescription('This object defines if the port would generate clock.')
portSdlcAdminRcvClockFromDTE = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portSdlcAdminRcvClockFromDTE.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcAdminRcvClockFromDTE.setDescription('This object defines if the rcv clock will be used from DTE.')
portSdlcAdminNrz = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portSdlcAdminNrz.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcAdminNrz.setDescription('This object defines the data encoding at the physical layer')
portSdlcAdminPacketSize = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 4096)).clone(1024)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portSdlcAdminPacketSize.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcAdminPacketSize.setDescription('This object defines the packet size of the X25 layer used by SNA PAD')
portSdlcAdminDisableRequestDisconnect = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portSdlcAdminDisableRequestDisconnect.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcAdminDisableRequestDisconnect.setDescription('This object defines if a Request To Disconnect(RD) will be sent when remote PAD clear the connection')
portSdlcAdminLPDASupport = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("lpda-1", 2), ("lpda-2", 3))).clone('none')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portSdlcAdminLPDASupport.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcAdminLPDASupport.setDescription('This object defines the type LPDA support on SDLC port.')
portSdlcAdminConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3, 5, 6, 7, 8, 10))).clone(namedValues=NamedValues(("rs232", 3), ("v35", 5), ("rs449", 6), ("rs530", 7), ("x21", 8), ("t1", 10)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portSdlcAdminConnector.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcAdminConnector.setDescription('This object defines the connector type of the SDLC port.')
portSdlcAdminSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 1, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(75, 2048000)).clone(64000)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portSdlcAdminSpeed.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcAdminSpeed.setDescription('This object defines the speed of the SDLC port. The speed may only be set to one of a series of reasonable values, and if an attempt is made to set the speed to a value which is within the valid range but not equal to one of these values, the speed will be rounded up. If the connector type of the port is RS232, the port could be a standard port or a high speed port. If the port is a high speed RS232 port, the maximum valid speed is 256000. If the port is a standard RS232 port, the maximum valid speed is 64000 on the FRX4000/SS1840 and 19200 on all other products. It may be possible to set the speed of a standard RS232 port to a speed which is valid for a high speed RS232 port but invalid for a standard RS232 port. In this case, the port may not end up having the invalid speed. The default speed for a standard RS232 port on the FRX6000 is 19200.')
portSdlcAdminRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 1, 1, 13), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portSdlcAdminRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcAdminRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the portSdlcAdminTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device, and which is supplied by a management station wishing to exercise an on-line update of the existing conceptual row. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
portSdlcAdminIdleFillChar = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("hex-ff", 1), ("hex-7e", 2))).clone('hex-ff')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portSdlcAdminIdleFillChar.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcAdminIdleFillChar.setDescription('This object specifies a character that wll be inserted into the stream when the SNA link is idle.')
portSdlcAdminInactivityTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 1, 1, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(15, 250)).clone(50)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portSdlcAdminInactivityTimer.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcAdminInactivityTimer.setDescription('This object determines how long the node will wait with no activity on the port before it will declare the attached device down.')
portSdlcAdminL1Duplex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("full-duplex", 1), ("half-duplex", 2))).clone('full-duplex')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portSdlcAdminL1Duplex.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcAdminL1Duplex.setDescription('This object determines whether level-1 (physical layer) transmission can be in one (half duplex) or both (full duplex) directions at once.')
portSdlcOperTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 2), )
if mibBuilder.loadTexts: portSdlcOperTable.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcOperTable.setDescription('This table contains current Netlink enterprise specific port parameters')
portSdlcOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 2, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"))
if mibBuilder.loadTexts: portSdlcOperEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcOperEntry.setDescription('A list of operational values for an SDLC port.')
portSdlcOperCommit = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portSdlcOperCommit.setStatus('obsolete')
if mibBuilder.loadTexts: portSdlcOperCommit.setDescription('This object is not used.')
portSdlcOperMAXRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 99))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portSdlcOperMAXRetries.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcOperMAXRetries.setDescription('This object defines the max. number of retries to a non-responding Link station, before putting the Link station to slow-poll list if it is not Normal response Mode OR disconnecting the link station if it is data txfr state.')
portSdlcOperMAXOut = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portSdlcOperMAXOut.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcOperMAXOut.setDescription('This object defines the number of outstanding frames for triggering window full condition.')
portSdlcOperPadType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("xpad", 1), ("tpad", 2), ("hpad", 3), ("npad", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portSdlcOperPadType.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcOperPadType.setDescription('This object defines the SNA PAD type')
portSdlcOperGenerateClock = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portSdlcOperGenerateClock.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcOperGenerateClock.setDescription('This object defines if the port would generate clock.')
portSdlcOperRcvClockFromDTE = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portSdlcOperRcvClockFromDTE.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcOperRcvClockFromDTE.setDescription('This object defines if the rcv clock will be used from DTE.')
portSdlcOperNrz = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 2, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portSdlcOperNrz.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcOperNrz.setDescription('This object defines the data encoding at the physical layer')
portSdlcOperPacketSize = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 4096))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portSdlcOperPacketSize.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcOperPacketSize.setDescription('This object defines the packet size of the X25 layer used by SNA PAD')
portSdlcOperDisableRequestDisconnect = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 2, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portSdlcOperDisableRequestDisconnect.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcOperDisableRequestDisconnect.setDescription('This object defines if a Request To Disconnect(RD) will be sent when remote PAD clear the connection')
portSdlcOperLPDASupport = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 2, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("lpda-1", 2), ("lpda-2", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portSdlcOperLPDASupport.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcOperLPDASupport.setDescription('This object defines the type LPDA support on SDLC port.')
portSdlcOperConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 2, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3, 5, 6, 7, 8, 10))).clone(namedValues=NamedValues(("rs232", 3), ("v35", 5), ("rs449", 6), ("rs530", 7), ("x21", 8), ("t1", 10)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portSdlcOperConnector.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcOperConnector.setDescription('This object defines the operational connector type of the SDLC port.')
portSdlcOperSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 2, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portSdlcOperSpeed.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcOperSpeed.setDescription('This object defines the operational speed of the SDLC port.')
portSdlcOperIdleFillChar = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 2, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("hex-ff", 1), ("hex-7e", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portSdlcOperIdleFillChar.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcOperIdleFillChar.setDescription('This object specifies a character that wll be inserted into the stream when the SNA link is idle.')
portSdlcOperInactivityTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 2, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(15, 250))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portSdlcOperInactivityTimer.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcOperInactivityTimer.setDescription('This object determines how long the node will wait with no activity on the port before it will declare the attached device down.')
portSdlcOperL1Duplex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 2, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("full-duplex", 1), ("half-duplex", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portSdlcOperL1Duplex.setStatus('mandatory')
if mibBuilder.loadTexts: portSdlcOperL1Duplex.setDescription('This object determines whether level-1 (physical layer) transmission can be in one (half duplex) or both (full duplex) directions at once.')
lSSdlcAdminTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 3), )
if mibBuilder.loadTexts: lSSdlcAdminTable.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcAdminTable.setDescription('This table contains Enterprise specific Link station configurable parameters to manage the link stations.')
lSSdlcAdminEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 3, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"), (0, "NETLINK-SPECIFIC-MIB", "sdlcLSAddress"))
if mibBuilder.loadTexts: lSSdlcAdminEntry.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcAdminEntry.setDescription('A list of configured values for an SDLC port.')
lSSdlcAdminLocalSub = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 3, 1, 1), NlSubscriberAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcAdminLocalSub.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcAdminLocalSub.setDescription('This object defines the local subscriber ID of the Link Station')
lSSdlcAdminRemoteSub = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 3, 1, 2), NlSubscriberAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcAdminRemoteSub.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcAdminRemoteSub.setDescription('This object defines the remote subscriber ID used to connect to remote Link Station')
lSSdlcAdminAutoCall = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcAdminAutoCall.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcAdminAutoCall.setDescription('This object defines if auto call is enabled for the link station. If enabled, the link station will initiate connection establishment, when appropriate. Else, it will passively listen to the connection request for establish a connection with the remote PAD')
lSSdlcAdminRetryTime = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 3, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(15, 225)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcAdminRetryTime.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcAdminRetryTime.setDescription('This object is used, only when lSSdlcAdminAutoCall object is yes. This defines the time interval measured in seconds in which the connection request is retried')
lSSdlcAdminRetryCount = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 3, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcAdminRetryCount.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcAdminRetryCount.setDescription('This object is used, only when lSSdlcAdminAutoCall object is yes. This defines the number of times the connection initiation is retried before given up. The value of Zero(0) means to try indefinitely')
lSSdlcAdminLlc2Conversion = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 3, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcAdminLlc2Conversion.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcAdminLlc2Conversion.setDescription('This object defines if this link station connects to a LLC2 device/host.')
lSSdlcAdminLPDAResourceID = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 3, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcAdminLPDAResourceID.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcAdminLPDAResourceID.setDescription('This objects the resource ID used during LPDA command support')
lSSdlcAdminRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 3, 1, 8), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcAdminRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcAdminRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the lSSdlcAdminTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned. Also note that deleting a PU entry will only remove it from the database file, and it's existence will still be known by the protocol until the node is rebooted or until an online update of the port is performed.")
lSSdlcAdminL2DatMode = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 3, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("two-way-alternate", 1), ("two-way-simultaneous", 2))).clone('two-way-alternate')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcAdminL2DatMode.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcAdminL2DatMode.setDescription('This object determines whether transmission can be in one (alternate) or two (simultaneous) directions at the same time.')
lSSdlcOperTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 4), )
if mibBuilder.loadTexts: lSSdlcOperTable.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcOperTable.setDescription('This table contains the operational parameters of the SDLC port')
lSSdlcOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 4, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"), (0, "NETLINK-SPECIFIC-MIB", "sdlcLSAddress"))
if mibBuilder.loadTexts: lSSdlcOperEntry.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcOperEntry.setDescription('A list of configured values for an SDLC port.')
lSSdlcOperLocalSub = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 4, 1, 1), NlSubscriberAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcOperLocalSub.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcOperLocalSub.setDescription('This object defines the local subscriber ID of the Link Station')
lSSdlcOperRemoteSub = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 4, 1, 2), NlSubscriberAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcOperRemoteSub.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcOperRemoteSub.setDescription('This object defines the remote subscriber ID used to connect to remote Link Station')
lSSdlcOperAutoCall = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 4, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcOperAutoCall.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcOperAutoCall.setDescription('This object defines if auto call is enabled for the link station. If enabled, the link station will initiate connection establishment, when appropriate. Else, it will passively listen to the connection request for establish a connection with the remote PAD')
lSSdlcOperRetryTime = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 4, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(15, 225))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcOperRetryTime.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcOperRetryTime.setDescription('This object is valid, only when lSSdlcAdminAutoCall object is yes. This defines the time interval measured in seconds in which the connection request is retried')
lSSdlcOperRetryCount = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 4, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcOperRetryCount.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcOperRetryCount.setDescription('This object is used, only when lSSdlcAdminAutoCall object is yes. This defines the number of times the connection initiation is retried before given up. The value of Zero(0) means to try indefinitely')
lSSdlcOperLlc2Conversion = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 4, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcOperLlc2Conversion.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcOperLlc2Conversion.setDescription('This object defines if this link station connects to a LLC2 device/host.')
lSSdlcOperLPDAResourceID = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 4, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcOperLPDAResourceID.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcOperLPDAResourceID.setDescription('This objects the resource ID used during LPDA command support')
lSSdlcOperL2DatMode = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 4, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("two-way-alternate", 1), ("two-way-simultaneous", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcOperL2DatMode.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcOperL2DatMode.setDescription('This object determines whether transmission can be in one (alternate) or two (simultaneous) directions at the same time.')
lSSdlcLlc2AdminTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 5), )
if mibBuilder.loadTexts: lSSdlcLlc2AdminTable.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2AdminTable.setDescription('This table contains the LLC2 related parameters used to configure LLC2 session, when the SDLC station is connected to a LLC2 device at the remote end')
lSSdlcLlc2AdminEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 5, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"), (0, "NETLINK-SPECIFIC-MIB", "sdlcLSAddress"))
if mibBuilder.loadTexts: lSSdlcLlc2AdminEntry.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2AdminEntry.setDescription('A list of configured values for an SDLC port.')
lSSdlcLlc2AdminLocalSap = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 252)).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcLlc2AdminLocalSap.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2AdminLocalSap.setDescription('This object defines Local SAP address (multiple of 4)')
lSSdlcLlc2AdminLocalMac = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 5, 1, 2), PhysAddress().clone(hexValue="000000000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcLlc2AdminLocalMac.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2AdminLocalMac.setDescription('This object defines the local MAC address')
lSSdlcLlc2AdminIdblk = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 5, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcLlc2AdminIdblk.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2AdminIdblk.setDescription('This object defines idblk used in XID')
lSSdlcLlc2AdminIdnum = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 5, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1048575))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcLlc2AdminIdnum.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2AdminIdnum.setDescription('This object defines idnum used in XID')
lSSdlcLlc2AdminLanTi = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 5, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 50)).clone(30)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcLlc2AdminLanTi.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2AdminLanTi.setDescription('This object defines LLC2 inactivity timer measured in units of seconds')
lSSdlcLlc2AdminLanT1 = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 5, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 250)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcLlc2AdminLanT1.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2AdminLanT1.setDescription('This object defines LLC2 reply timer measured in units of 100 milliseconds')
lSSdlcLlc2AdminLanT2 = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 5, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(100, 5000)).clone(100)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcLlc2AdminLanT2.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2AdminLanT2.setDescription('This object defines Receiver Ack Timer measured in units of milliseconds')
lSSdlcLlc2AdminLanN2 = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 5, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(8)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcLlc2AdminLanN2.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2AdminLanN2.setDescription('This object defines Maximum Retransmissions')
lSSdlcLlc2AdminLanN3 = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 5, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 127)).clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcLlc2AdminLanN3.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2AdminLanN3.setDescription("This object defines count of I-format LPDU's before sending Ack")
lSSdlcLlc2AdminLanTw = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 5, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 127)).clone(7)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcLlc2AdminLanTw.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2AdminLanTw.setDescription('This object defines Outstanding Frames')
lSSdlcLlc2AdminBAG = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 5, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcLlc2AdminBAG.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2AdminBAG.setDescription('This object defines bandwidth allocation group')
lSSdlcLlc2AdminPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 5, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 9))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcLlc2AdminPriority.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2AdminPriority.setDescription('This object defines priority for traffic within the node')
lSSdlcLlc2AdminRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 5, 1, 13), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcLlc2AdminRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2AdminRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the lSSdlcLlc2AdminTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned. Also note that deleting a PU entry will only remove it from the database file, and it's existence will still be known by the protocol until the node is rebooted or until an online update of the port is performed.")
lSSdlcLlc2AdminSuppressXID = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 5, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lSSdlcLlc2AdminSuppressXID.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2AdminSuppressXID.setDescription('This object prevents (if yes) the returning of an XID in response to a null XID. This might be desirable if the remote TPAD is not running PU 2.1. It should be noted that even if an XID is sent, an SNRM (Set Normal Response Mode) immediately follows, so the connection will be established.')
lSSdlcLlc2OperTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 6), )
if mibBuilder.loadTexts: lSSdlcLlc2OperTable.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2OperTable.setDescription('This table contains operartional parameters related to LLC2 session of this link station.')
lSSdlcLlc2OperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 6, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"), (0, "NETLINK-SPECIFIC-MIB", "sdlcLSAddress"))
if mibBuilder.loadTexts: lSSdlcLlc2OperEntry.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2OperEntry.setDescription('A list of configured values for an SDLC port.')
lSSdlcLlc2OperLocalSap = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 6, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 252))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcLlc2OperLocalSap.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2OperLocalSap.setDescription('This object defines Local SAP address (multiple of 4)')
lSSdlcLlc2OperLocalMac = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 6, 1, 2), PhysAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcLlc2OperLocalMac.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2OperLocalMac.setDescription('This object defines the local MAC address')
lSSdlcLlc2OperIdblk = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 6, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcLlc2OperIdblk.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2OperIdblk.setDescription('This object defines idblk used in XID')
lSSdlcLlc2OperIdnum = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 6, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1048575))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcLlc2OperIdnum.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2OperIdnum.setDescription('This object defines idnum used in XID')
lSSdlcLlc2OperLanTi = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 6, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcLlc2OperLanTi.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2OperLanTi.setDescription('This object defines LLC2 inactivity timer measured in units of seconds')
lSSdlcLlc2OperLanT1 = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 6, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 250))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcLlc2OperLanT1.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2OperLanT1.setDescription('This object defines LLC2 reply timer measured in units of 100 milliseconds')
lSSdlcLlc2OperLanT2 = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 6, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(100, 5000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcLlc2OperLanT2.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2OperLanT2.setDescription('This object defines Receiver Ack Timer measured in units of milliseconds')
lSSdlcLlc2OperLanN2 = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 6, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcLlc2OperLanN2.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2OperLanN2.setDescription('This object defines Maximum Retransmissions')
lSSdlcLlc2OperLanN3 = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 6, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcLlc2OperLanN3.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2OperLanN3.setDescription("This object defines count of I-format LPDU's before sending Ack")
lSSdlcLlc2OperLanTw = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 6, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcLlc2OperLanTw.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2OperLanTw.setDescription('This object defines Outstanding Frames')
lSSdlcLlc2OperBAG = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 6, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcLlc2OperBAG.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2OperBAG.setDescription('This object defines bandwidth allocation group')
lSSdlcLlc2OperPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 6, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 9))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcLlc2OperPriority.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2OperPriority.setDescription('This object defines priority for traffic within the node')
lSSdlcLlc2OperSuppressXID = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 5, 6, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lSSdlcLlc2OperSuppressXID.setStatus('mandatory')
if mibBuilder.loadTexts: lSSdlcLlc2OperSuppressXID.setDescription('This object prevents (if yes) the returning of an XID in response to a null XID. This might be desirable if the remote TPAD is not running PU 2.1. It should be noted that even if an XID is sent, an SNRM (Set Normal Response Mode) immediately follows, so the connection will be established.')
portT1Group = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 3, 7))
portT1AdminTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 1), )
if mibBuilder.loadTexts: portT1AdminTable.setStatus('mandatory')
if mibBuilder.loadTexts: portT1AdminTable.setDescription('The configuration information pertaining to a T1 port. T1 ports can operate at full T1 bandwidth or DS0A channelized which provides 24 separate channels.')
portT1AdminEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 1, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"))
if mibBuilder.loadTexts: portT1AdminEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portT1AdminEntry.setDescription('A T1 port entry containing objects relating to T1 ports.')
portT1AdminBlockedPortFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portT1AdminBlockedPortFlag.setStatus('mandatory')
if mibBuilder.loadTexts: portT1AdminBlockedPortFlag.setDescription('')
portT1AdminGenerateClock = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portT1AdminGenerateClock.setStatus('mandatory')
if mibBuilder.loadTexts: portT1AdminGenerateClock.setDescription('')
portT1AdminFramingMode = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("fullT1", 1), ("ds0aT1", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portT1AdminFramingMode.setStatus('mandatory')
if mibBuilder.loadTexts: portT1AdminFramingMode.setDescription('Specifies how this T1 port is to function. ds0AT1 specifies a 24 channelized T1.')
portT1AdminFrameModelSelect = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("d4", 1), ("esf", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portT1AdminFrameModelSelect.setStatus('mandatory')
if mibBuilder.loadTexts: portT1AdminFrameModelSelect.setDescription('')
portT1AdminLineEncoding = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("b8zs", 1), ("ami", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portT1AdminLineEncoding.setStatus('mandatory')
if mibBuilder.loadTexts: portT1AdminLineEncoding.setDescription('')
portT1AdminLineBuildOut = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("zerodb", 1), ("x133-266ft", 2), ("x266-399ft", 3), ("x399-533ft", 4), ("x533-655ft", 5), ("minus7p5db", 6), ("minus15db", 7), ("minus22p5db", 8)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portT1AdminLineBuildOut.setStatus('mandatory')
if mibBuilder.loadTexts: portT1AdminLineBuildOut.setDescription('')
portT1AdminRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 1, 1, 7), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portT1AdminRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: portT1AdminRowStatus.setDescription('')
portT1AdminProtocolFraming = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("sync", 1), ("bisync", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portT1AdminProtocolFraming.setStatus('mandatory')
if mibBuilder.loadTexts: portT1AdminProtocolFraming.setDescription('')
portT1AdminNRZI = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portT1AdminNRZI.setStatus('mandatory')
if mibBuilder.loadTexts: portT1AdminNRZI.setDescription('')
portT1OperTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 2), )
if mibBuilder.loadTexts: portT1OperTable.setStatus('mandatory')
if mibBuilder.loadTexts: portT1OperTable.setDescription('The configuration information pertaining to a T1 port. T1 ports can operate at full T1 bandwidth or DS0A channelized which provides 24 separate channels.')
portT1OperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 2, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"))
if mibBuilder.loadTexts: portT1OperEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portT1OperEntry.setDescription('A T1 port entry containing objects relating to T1 ports.')
portT1OperBlockedPortFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portT1OperBlockedPortFlag.setStatus('mandatory')
if mibBuilder.loadTexts: portT1OperBlockedPortFlag.setDescription('')
portT1OperGenerateClock = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portT1OperGenerateClock.setStatus('mandatory')
if mibBuilder.loadTexts: portT1OperGenerateClock.setDescription('')
portT1OperFramingMode = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("fullT1", 1), ("ds0aT1", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portT1OperFramingMode.setStatus('mandatory')
if mibBuilder.loadTexts: portT1OperFramingMode.setDescription('Specifies how this T1 port is to function. ds0AT1 specifies a 24 channelized T1.')
portT1OperFrameModelSelect = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("d4", 1), ("esf", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portT1OperFrameModelSelect.setStatus('mandatory')
if mibBuilder.loadTexts: portT1OperFrameModelSelect.setDescription('')
portT1OperLineEncoding = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("b8zs", 1), ("ami", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portT1OperLineEncoding.setStatus('mandatory')
if mibBuilder.loadTexts: portT1OperLineEncoding.setDescription('')
portT1OperLineBuildOut = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("zerodb", 1), ("x133-266ft", 2), ("x266-399ft", 3), ("x399-533ft", 4), ("x533-655ft", 5), ("minus7p5db", 6), ("minus15db", 7), ("minus22p5db", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portT1OperLineBuildOut.setStatus('mandatory')
if mibBuilder.loadTexts: portT1OperLineBuildOut.setDescription('')
portT1OperProtocolFraming = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 2, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("sync", 1), ("bisync", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portT1OperProtocolFraming.setStatus('mandatory')
if mibBuilder.loadTexts: portT1OperProtocolFraming.setDescription('')
portT1OperNRZI = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 7, 2, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portT1OperNRZI.setStatus('mandatory')
if mibBuilder.loadTexts: portT1OperNRZI.setDescription('')
portVoiceGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 3, 8))
portVoiceAdminTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1), )
if mibBuilder.loadTexts: portVoiceAdminTable.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminTable.setDescription('A list of Voice ports. The number of entries will be the number of Voice ports on the node.')
portVoiceAdminEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "portVoiceAdminRlpIndex"), (0, "NETLINK-SPECIFIC-MIB", "portVoiceAdminPortIndex"))
if mibBuilder.loadTexts: portVoiceAdminEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminEntry.setDescription('A Voice Port entry containing objects relating to the port that are configurable by the user.')
portVoiceAdminRlpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceAdminRlpIndex.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminRlpIndex.setDescription('The RLP number of the Voice port.')
portVoiceAdminPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceAdminPortIndex.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminPortIndex.setDescription('The Port number of the Voice port.')
portVoiceAdminBlockedFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminBlockedFlag.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminBlockedFlag.setDescription('The flag which causes the port to be enabled or disabled at node IPL. The port will remain in that state until this flag is changed and the node is rebooted, or until an on- line enable or disable is performed.')
portVoiceAdminSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("bps-4800", 1), ("bps-8000", 2), ("bps-32000", 3), ("bps-64000", 4))).clone('bps-8000')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminSpeed.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminSpeed.setDescription('The speed of the port which is transmitted as part of a Call Setup frame. A Voice call will be established if there is enough bandwidth at each end to provide the minimum configured rate.')
portVoiceAdminDTMF = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminDTMF.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminDTMF.setDescription('The flag to enable, disable, or specify a time span over which DTMF (Dual Tone Multi Frequency) tones will be regenerated. 0 = pass tones transparently to the remote device. 1 = detect incoming tones and regenerate as received. 2-255 = regenerate tones for the specified seconds.')
portVoiceAdminInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 6, 10, 11))).clone(namedValues=NamedValues(("em-4w", 1), ("em-2w", 2), ("loop-start", 3), ("ac15-a", 4), ("ac15-b", 6), ("em-4w-te", 10), ("em-2w-te", 11))).clone('loop-start')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminInterface.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminInterface.setDescription('The type of interface (in conjunction with the Telephony Interface Type) that will be used by the Voice channel. This requires rebooting for the change to take effect. Telephony Type Interface Type -------------- -------------- OPX (FXO) Loop Start SLT (FXS) Loop Start E&M 4W E&M, 4W E&M TE, 2W E&M, 2W E&M TE AC15 AC15A, AC15C')
portVoiceAdminTETimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminTETimer.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminTETimer.setDescription("The delay at which the E&M 'E' lead follows the 'M' lead.")
portVoiceAdminLevelIn = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-22, 7))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminLevelIn.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminLevelIn.setDescription('The local voice level into the voice port. This adjusts the sensitivity of the local voice channel to the signal from the attached device. The purpose of this is to match the voice channel dB level to the input signal. A more negative setting produces a higher input gain. If the level is reduced, the voice channel will be more sensitive to the input. This will also cause the voice output at the remote device to sound louder.')
portVoiceAdminLevelOut = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-22, 7)).clone(-4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminLevelOut.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminLevelOut.setDescription('The local voice level out of the voice port. A more positive setting produces a higher volume.')
portVoiceAdminCallTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 30)).clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminCallTimer.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminCallTimer.setDescription('The amount of time the node will wait for a response to a Call Connect or Call Clear request. When the node issues a request, it will set a timer to the value configured here. If that timer expires with no response from the destination, the node will generate a busy tone.')
portVoiceAdminHuntGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("a", 2), ("b", 3))).clone('none')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminHuntGroup.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminHuntGroup.setDescription('The hunt group that includes the voice channel.')
portVoiceAdminLongDialPrefix = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 12), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="2A")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminLongDialPrefix.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminLongDialPrefix.setDescription('The character that will be required preceeding calls using the long call format. When the user at an attached device dials a long form number, it must be preceeded by the prefix character specified here.')
portVoiceAdminSLTTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 99)).clone(30)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminSLTTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminSLTTimeout.setDescription('The time the local port will wait before an actual fax transmission begins. If there is no fax tone when this timer expires, the connection will be terminated.')
portVoiceAdminLinkDownBusy = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminLinkDownBusy.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminLinkDownBusy.setDescription('This flag enables or disables automatic busy-out of channels when the link is down. When enabled, the system will automatically busy-out channels when the composite link is down. When disabled, channels remain on-hook when the link is down. If your application is connected to a PBX that can route calls to another alternate source based on an all busy condition, enable this so the PBX will recognize this condition and reroute the calls while the composite link is down.')
portVoiceAdminFaxSupported = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminFaxSupported.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminFaxSupported.setDescription('The flag to indicate whether or not faxes will be transmitted on the port. If no, all calls will be treated as voice. If yes, both voice and fax will be transmitted.')
portVoiceAdminTelephonyType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("opx", 1), ("slt", 2), ("em", 3), ("ac15", 4))).clone('opx')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminTelephonyType.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminTelephonyType.setDescription('The type of interface (in conjunction with the Interface Type) that will be used by the Voice channel. This requires rebooting for the change to take effect. Telephony Type Interface Type -------------- -------------- OPX (FXO) Loop Start SLT (FXS) Loop Start E&M 4W E&M, 4W E&M TE, 2W E&M, 2W E&M TE AC15 AC15A, AC15C')
portVoiceAdminJitter = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 255)).clone(80)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminJitter.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminJitter.setDescription('The amount of jitter delay.')
portVoiceAdminSampleDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 18), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminSampleDelay.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminSampleDelay.setDescription('The amount of sample delay.')
portVoiceAdminDialTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 10)).clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminDialTimer.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminDialTimer.setDescription('The delay the node will use to determine when variable length dialing is complete. (Variable length dialing allows the user to place a call without entering the configured number of digits for the speed dial number.)')
portVoiceAdminAutoDial = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminAutoDial.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminAutoDial.setDescription('The flag to enable or disable the auto dial feature. Note that enabling this feature will disable the variable length dialing feature for this channel.')
portVoiceAdminSuppression = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("very-low", 1), ("low", 2), ("medium", 3), ("high", 4), ("very-high", 5))).clone('low')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminSuppression.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminSuppression.setDescription('The silence suppression level. A larger silence suppression level allows more data channels to operate simultaneously with the voice channels.')
portVoiceAdminAutoDialNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 22), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 4))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminAutoDialNumber.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminAutoDialNumber.setDescription('The number to which the node will attempt to connect when the handset attached to this port is taken off-hook. This is applicable only when the Auto Dial feature is enabled.')
portVoiceAdminAutoPoll = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminAutoPoll.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminAutoPoll.setDescription('The flag to enable or disable auto polling. This is applicable only when the Auto Dial feature is enabled. When enabled, the node periodically polls the destination. If the poll is not acknowledged, the node will busy-out the channel. This can be useful in situations where the destination is an emergency number that must always be available. If the destination does not answer the automatic poll, the busy-out condition will notify an attached PBX that the emergency call should be routed to another call routing device.')
portVoiceAdminAutoPollTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 24), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 30)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminAutoPollTimer.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminAutoPollTimer.setDescription('The frequency with which an Auto Poll will be sent. This is applicable only when auto poll is enabled.')
portVoiceAdminExtDigitsSource = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("map", 1), ("user", 2))).clone('map')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminExtDigitsSource.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminExtDigitsSource.setDescription('The source of the extended digits when the user of attached equipment dials a speed dial number. This is applicable only when the number of extended dial digits is greater than zero. When map is specified, extended digits are taken from the extended digits field in the map table. When user is specified, extended digits are taken from the user of attached equipment.')
portVoiceAdminNumDigitsDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 26), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminNumDigitsDelete.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminNumDigitsDelete.setDescription('The number of leading dial digits that will be deleted before a dial string is forwarded. This is primarily used to delete leading dial digits that may be inserted by an attached PBX.')
portVoiceAdminForwardDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 27), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 40)).clone(2)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminForwardDelay.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminForwardDelay.setDescription('The length of a pause that can be inserted in an extended dial string that is being forwarded. This is applicable only when forwarded output digits is all or extended. When a comma is encountered in an extended digits string, the node will pause for the length of time specified here before additional extended digits are forwarded.')
portVoiceAdminForwardedType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 28), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dtmf", 1), ("pulse", 2))).clone('dtmf')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminForwardedType.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminForwardedType.setDescription('The method how the dial digits will be forwarded. This is applicable only when forwarded output digits is all or extended.')
portVoiceAdminForwardedDigits = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 29), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("all", 2), ("extended", 3))).clone('none')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminForwardedDigits.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminForwardedDigits.setDescription('The flag indicating which dial digits, if any, to forward. If none, dial digits are not forwarded to the destination device when a call is initiated on this channel. If all, the dialed speed dial number and associated extended digits are forwarded to the destination device. If extended, only the extended digit string is forwarded.')
portVoiceAdminMakeRatio = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 30), Integer32().subtype(subtypeSpec=ValueRangeConstraint(20, 80)).clone(34)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminMakeRatio.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminMakeRatio.setDescription('The make ratio of each digit pulse that is forwarded. This is applicable only when forwarded output digits is all or extended and forwarded digit type is pulse.')
portVoiceAdminBreakRatio = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 31), Integer32().subtype(subtypeSpec=ValueRangeConstraint(20, 80)).clone(66)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminBreakRatio.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminBreakRatio.setDescription('The break ratio of each digit pulse that is forwarded. This is applicable only when forwarded output digits is all or extended and forwarded digit type is pulse.')
portVoiceAdminDTMFOnDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 32), Integer32().subtype(subtypeSpec=ValueRangeConstraint(30, 1000)).clone(100)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminDTMFOnDuration.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminDTMFOnDuration.setDescription('The length of a tone that will be used to produce a single DTMF digit. This is applicable only when forwarded output digits is all or extended and forwarded digit type is DTMF.')
portVoiceAdminDTMFOffDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 33), Integer32().subtype(subtypeSpec=ValueRangeConstraint(30, 1000)).clone(100)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminDTMFOffDuration.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminDTMFOffDuration.setDescription('The length of silence between DTMF tones in a dial digit string. This is applicable only when forwarded output digits is all or extended and forwarded digit type is DTMF.')
portVoiceAdminToneType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 34), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dtmf", 1), ("mf", 2))).clone('dtmf')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminToneType.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminToneType.setDescription('The tone type to detect/regenerate for forwarding DTMF/MF.')
portVoiceAdminRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 1, 1, 35), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVoiceAdminRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceAdminRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the portVoiceAdminTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device, and which is supplied by a management station wishing to exercise an on-line update of the existing conceptual row. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
portVoiceOperTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2), )
if mibBuilder.loadTexts: portVoiceOperTable.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperTable.setDescription('A list of Voice ports. The number of entries will be the number of Voice ports on the node.')
portVoiceOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "portVoiceOperRlpIndex"), (0, "NETLINK-SPECIFIC-MIB", "portVoiceOperPortIndex"))
if mibBuilder.loadTexts: portVoiceOperEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperEntry.setDescription('A Voice Port entry containing objects relating to the port that are configurable by the user.')
portVoiceOperRlpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperRlpIndex.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperRlpIndex.setDescription('The RLP number of the Voice port.')
portVoiceOperPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperPortIndex.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperPortIndex.setDescription('The Port number of the Voice port.')
portVoiceOperBlockedFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperBlockedFlag.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperBlockedFlag.setDescription('The flag which causes the port to be enabled or disabled at node IPL. The port will remain in that state until this flag is changed and the node is rebooted, or until an on- line enable or disable is performed.')
portVoiceOperSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("bps-4800", 1), ("bps-8000", 2), ("bps-32000", 3), ("bps-64000", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperSpeed.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperSpeed.setDescription('The speed of the port which is transmitted as part of a Call Setup frame. A Voice call will be established if there is enough bandwidth at each end to provide the minimum configured rate.')
portVoiceOperDTMF = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperDTMF.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperDTMF.setDescription('The flag to enable, disable, or specify a time span over which DTMF (Dual Tone Multi Frequency) tones will be regenerated. 0 = pass tones transparently to the remote device. 1 = detect incoming tones and regenerate as received. 2-255 = regenerate tones for the specified seconds.')
portVoiceOperInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 6, 10, 11))).clone(namedValues=NamedValues(("em-4w", 1), ("em-2w", 2), ("loop-start", 3), ("ac15-a", 4), ("ac15-b", 6), ("em-4w-te", 10), ("em-2w-te", 11)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperInterface.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperInterface.setDescription('The type of interface (in conjunction with the Telephony Interface Type) that will be used by the Voice channel. This requires rebooting for the change to take effect. Telephony Type Interface Type -------------- -------------- OPX (FXO) Loop Start SLT (FXS) Loop Start E&M 4W E&M, 4W E&M TE, 2W E&M, 2W E&M TE AC15 AC15A, AC15C')
portVoiceOperTETimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperTETimer.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperTETimer.setDescription("The delay at which the E&M 'E' lead follows the 'M' lead.")
portVoiceOperLevelIn = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-22, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperLevelIn.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperLevelIn.setDescription('The local voice level into the voice port. This adjusts the sensitivity of the local voice channel to the signal from the attached device. The purpose of this is to match the voice channel dB level to the input signal. A more negative setting produces a higher input gain. If the level is reduced, the voice channel will be more sensitive to the input. This will also cause the voice output at the remote device to sound louder.')
portVoiceOperLevelOut = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-22, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperLevelOut.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperLevelOut.setDescription('The local voice level out of the voice port. A more positive setting produces a higher volume.')
portVoiceOperCallTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperCallTimer.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperCallTimer.setDescription('The amount of time the node will wait for a response to a Call Connect or Call Clear request. When the node issues a request, it will set a timer to the value configured here. If that timer expires with no response from the destination, the node will generate a busy tone.')
portVoiceOperHuntGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("a", 2), ("b", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperHuntGroup.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperHuntGroup.setDescription('The hunt group that includes the voice channel.')
portVoiceOperLongDialPrefix = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 12), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperLongDialPrefix.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperLongDialPrefix.setDescription('The character that will be required preceeding calls using the long call format. When the user at an attached device dials a long form number, it must be preceeded by the prefix character specified here.')
portVoiceOperSLTTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 99))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperSLTTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperSLTTimeout.setDescription('The time the local port will wait before an actual fax transmission begins. If there is no fax tone when this timer expires, the connection will be terminated.')
portVoiceOperLinkDownBusy = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperLinkDownBusy.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperLinkDownBusy.setDescription('This flag enables or disables automatic busy-out of channels when the link is down. When enabled, the system will automatically busy-out channels when the composite link is down. When disabled, channels remain on-hook when the link is down. If your application is connected to a PBX that can route calls to another alternate source based on an all busy condition, enable this so the PBX will recognize this condition and reroute the calls while the composite link is down.')
portVoiceOperFaxSupported = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperFaxSupported.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperFaxSupported.setDescription('The flag to indicate whether or not faxes will be transmitted on the port. If no, all calls will be treated as voice. If yes, both voice and fax will be transmitted.')
portVoiceOperTelephonyType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("opx", 1), ("slt", 2), ("em", 3), ("ac15", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperTelephonyType.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperTelephonyType.setDescription('The type of interface (in conjunction with the Interface Type) that will be used by the Voice channel. This requires rebooting for the change to take effect. Telephony Type Interface Type -------------- -------------- OPX (FXO) Loop Start SLT (FXS) Loop Start E&M 4W E&M, 4W E&M TE, 2W E&M, 2W E&M TE AC15 AC15A, AC15C')
portVoiceOperJitter = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperJitter.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperJitter.setDescription('The amount of jitter delay.')
portVoiceOperSampleDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 18), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperSampleDelay.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperSampleDelay.setDescription('The amount of sample delay.')
portVoiceOperDialTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 10))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperDialTimer.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperDialTimer.setDescription('The delay the node will use to determine when variable length dialing is complete. (Variable length dialing allows the user to place a call without entering the configured number of digits for the speed dial number.)')
portVoiceOperAutoDial = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperAutoDial.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperAutoDial.setDescription('The flag to enable or disable the auto dial feature. Note that enabling this feature will disable the variable length dialing feature for this channel.')
portVoiceOperSuppression = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("very-low", 1), ("low", 2), ("medium", 3), ("high", 4), ("very-high", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperSuppression.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperSuppression.setDescription('The silence suppression level. A larger silence suppression level allows more data channels to operate simultaneously with the voice channels.')
portVoiceOperAutoDialNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 22), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperAutoDialNumber.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperAutoDialNumber.setDescription('The number to which the node will attempt to connect when the handset attached to this port is taken off-hook. This is applicable only when the Auto Dial feature is enabled.')
portVoiceOperAutoPoll = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperAutoPoll.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperAutoPoll.setDescription('The flag to enable or disable auto polling. This is applicable only when the Auto Dial feature is enabled. When enabled, the node periodically polls the destination. If the poll is not acknowledged, the node will busy-out the channel. This can be useful in situations where the destination is an emergency number that must always be available. If the destination does not answer the automatic poll, the busy-out condition will notify an attached PBX that the emergency call should be routed to another call routing device.')
portVoiceOperAutoPollTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 24), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperAutoPollTimer.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperAutoPollTimer.setDescription('The frequency with which an Auto Poll will be sent. This is applicable only when auto poll is enabled.')
portVoiceOperExtDigitsSource = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("map", 1), ("user", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperExtDigitsSource.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperExtDigitsSource.setDescription('The source of the extended digits when the user of attached equipment dials a speed dial number. This is applicable only when the number of extended dial digits is greater than zero. When map is specified, extended digits are taken from the extended digits field in the map table. When user is specified, extended digits are taken from the user of attached equipment.')
portVoiceOperNumDigitsDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 26), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperNumDigitsDelete.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperNumDigitsDelete.setDescription('The number of leading dial digits that will be deleted before a dial string is forwarded. This is primarily used to delete leading dial digits that may be inserted by an attached PBX.')
portVoiceOperForwardDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 27), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 40))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperForwardDelay.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperForwardDelay.setDescription('The length of a pause that can be inserted in an extended dial string that is being forwarded. This is applicable only when forwarded output digits is all or extended. When a comma is encountered in an extended digits string, the node will pause for the length of time specified here before additional extended digits are forwarded.')
portVoiceOperForwardedType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 28), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dtmf", 1), ("pulse", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperForwardedType.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperForwardedType.setDescription('The method how the dial digits will be forwarded. This is applicable only when forwarded output digits is all or extended.')
portVoiceOperForwardedDigits = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 29), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("all", 2), ("extended", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperForwardedDigits.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperForwardedDigits.setDescription('The flag indicating which dial digits, if any, to forward. If none, dial digits are not forwarded to the destination device when a call is initiated on this channel. If all, the dialed speed dial number and associated extended digits are forwarded to the destination device. If extended, only the extended digit string is forwarded.')
portVoiceOperMakeRatio = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 30), Integer32().subtype(subtypeSpec=ValueRangeConstraint(20, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperMakeRatio.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperMakeRatio.setDescription('The make ratio of each digit pulse that is forwarded. This is applicable only when forwarded output digits is all or extended and forwarded digit type is pulse.')
portVoiceOperBreakRatio = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 31), Integer32().subtype(subtypeSpec=ValueRangeConstraint(20, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperBreakRatio.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperBreakRatio.setDescription('The break ratio of each digit pulse that is forwarded. This is applicable only when forwarded output digits is all or extended and forwarded digit type is pulse.')
portVoiceOperDTMFOnDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 32), Integer32().subtype(subtypeSpec=ValueRangeConstraint(30, 1000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperDTMFOnDuration.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperDTMFOnDuration.setDescription('The length of a tone that will be used to produce a single DTMF digit. This is applicable only when forwarded output digits is all or extended and forwarded digit type is DTMF.')
portVoiceOperDTMFOffDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 33), Integer32().subtype(subtypeSpec=ValueRangeConstraint(30, 1000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperDTMFOffDuration.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperDTMFOffDuration.setDescription('The length of silence between DTMF tones in a dial digit string. This is applicable only when forwarded output digits is all or extended and forwarded digit type is DTMF.')
portVoiceOperToneType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 3, 8, 2, 1, 34), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dtmf", 1), ("mf", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVoiceOperToneType.setStatus('mandatory')
if mibBuilder.loadTexts: portVoiceOperToneType.setDescription('The tone type to detect/regenerate for forwarding DTMF/MF.')
nlInterfaces = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 4))
nlIfTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 4, 1), )
if mibBuilder.loadTexts: nlIfTable.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfTable.setDescription('A Table to describe each interface on a node. This table is indexed by RLP and Port number and can be used to identify the corresponding MIB-II ifIndex of a Port as well as additional port information maintained by Netlink.')
nlIfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 4, 1, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfPort"))
if mibBuilder.loadTexts: nlIfEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfEntry.setDescription('An Interface entry containing objects relating to interfaces.')
nlIfRlp = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfRlp.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfRlp.setDescription('The RLP number on the node. RLP numbers 1-8 are Physical RLP cards, and RLP number 250, 251 and 252 define IP, IPX and LLC2 interfaces respectively.')
nlIfPort = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfPort.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfPort.setDescription('The Port number on the RLP. Port numbers 1-8 are known to be physical ports, and 9-64 are logical ports. If the RLP number identifies an IP, IPX or LLC2 interface, then the port number can be 1-257 (IP), 1-64 (IPX), or 1-96 (LLC2).')
nlIfType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 200, 201, 202, 203, 204, 205, 206, 207, 208))).clone(namedValues=NamedValues(("other", 1), ("regular1822", 2), ("hdh1822", 3), ("ddnX25", 4), ("rfc877x25", 5), ("ethernetCsmacd", 6), ("iso88023Csmacd", 7), ("iso88024TokenBus", 8), ("iso88025TokenRing", 9), ("iso88026Man", 10), ("starLan", 11), ("proteon10Mbit", 12), ("proteon80Mbit", 13), ("hyperchannel", 14), ("fddi", 15), ("lapb", 16), ("sdlc", 17), ("ds1", 18), ("e1", 19), ("basicISDN", 20), ("primaryISDN", 21), ("propPointToPointSerial", 22), ("ppp", 23), ("softwareLoopback", 24), ("eon", 25), ("ethernet3Mbit", 26), ("nsip", 27), ("slip", 28), ("ultra", 29), ("ds3", 30), ("sip", 31), ("frameRelay", 32), ("rs232", 33), ("para", 34), ("arcnet", 35), ("arcnetPlus", 36), ("atm", 37), ("miox25", 38), ("sonet", 39), ("x25ple", 40), ("iso88022llc", 41), ("localTalk", 42), ("smdsDxi", 43), ("frameRelayService", 44), ("v35", 45), ("hssi", 46), ("hippi", 47), ("modem", 48), ("aal5", 49), ("sonetPath", 50), ("sonetVT", 51), ("smdsIcip", 52), ("propVirtual", 53), ("propMultiplexor", 54), ("trunk", 200), ("async", 201), ("bsci", 202), ("logicalPort", 203), ("t1", 204), ("ip", 205), ("ipx", 206), ("llc2", 207), ("voice", 208)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfType.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfType.setDescription('The type of interface')
nlIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIndex.setDescription('The associated index into the MIB-II ifTable for this port')
nlIfTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfTableIndex.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfTableIndex.setDescription('The associated index into an enterprise table used to describe additional information for this port. See nlIfTableOid as the pointer to the actual table.')
nlIfTableOid = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 1, 1, 6), ObjectIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfTableOid.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfTableOid.setDescription('The OID of the table Entry that contains additional information about this port. The OID will point to a specific table depending on the type of port.')
nlIfConnectorType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13))).clone(namedValues=NamedValues(("none", 2), ("rs232", 3), ("v25bis-dial", 4), ("v35", 5), ("rs449", 6), ("rs530", 7), ("x21", 8), ("csudsu", 9), ("t1", 10), ("e1", 11), ("voice", 13)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfConnectorType.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfConnectorType.setDescription('The physical connector type used for this port. Valid for ports 1-8 only.')
nlIfPortStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22))).clone(namedValues=NamedValues(("linkUp", 1), ("restarting", 2), ("operational", 3), ("disabled", 4), ("disconnect", 5), ("configured", 6), ("dialReady", 7), ("quiesced", 8), ("failed", 9), ("hardwareFault", 10), ("other", 11), ("ipl", 12), ("na", 13), ("remoteLoopback", 14), ("blueAlarm", 15), ("yellowAlarm", 16), ("redAlarm", 17), ("onHook", 18), ("offHook", 19), ("dialing", 20), ("activeVoiceCall", 21), ("onHookPending", 22)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfPortStatus.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfPortStatus.setDescription('The status of this port state. Valid for ports 1-64 only.')
nlIfPhyPort = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 1, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfPhyPort.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfPhyPort.setDescription('The Underlying Physical Port number on the RLP of this logical port. Port numbers 1-8 are valid, but this field is only applicable if the nlIfPort field is 9-64.')
nlIfLlc2Interfaces = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 4, 2))
nlIfLlc2LANTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 1), )
if mibBuilder.loadTexts: nlIfLlc2LANTable.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2LANTable.setDescription('A Table to define LLC2 interfaces over LAN.')
nlIfLlc2LANEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 1, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfLlc2LANRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfLlc2LANPort"))
if mibBuilder.loadTexts: nlIfLlc2LANEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2LANEntry.setDescription('An LLC2 LAN Interface entry containing objects relating to LLC2 LAN interfaces.')
nlIfLlc2LANRlp = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfLlc2LANRlp.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2LANRlp.setDescription('The RLP number on the node. For LLC2 LAN interfaces, this must be 252.')
nlIfLlc2LANPort = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfLlc2LANPort.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2LANPort.setDescription('The Port number on the RLP. For LLC2 LAN interfaces, this corresponds to the LAN card number.')
nlIfLlc2LANType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ethernet", 1), ("token-ring", 2))).clone('ethernet')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfLlc2LANType.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2LANType.setDescription('The type of the LLC2 interface- either ethernet or token ring.')
nlIfLlc2LANCard = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfLlc2LANCard.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2LANCard.setDescription('The LAN card that the LLC2 interface is running on.')
nlIfLlc2LANID = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095)).clone(4095)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfLlc2LANID.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2LANID.setDescription('Valid only if the type of the LLC2 LAN interface is token ring. Identifies the token ring adapter to the network.')
nlIfLlc2LANInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfLlc2LANInterface.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2LANInterface.setDescription('The LLC2 interface number.')
nlIfLlc2LANRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 1, 1, 7), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfLlc2LANRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2LANRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the nlIfLlc2LANTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
nlIfLlc2LANPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 9))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfLlc2LANPriority.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2LANPriority.setDescription('Interface priority- allows setting a priority for traffic within the node. The higher the number, the higher the priority relative to other intra-nodal traffic.')
nlIfLlc2LANBlockedPortFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfLlc2LANBlockedPortFlag.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2LANBlockedPortFlag.setDescription('The blocked port flag.')
nlIfLlc2FrTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 2), )
if mibBuilder.loadTexts: nlIfLlc2FrTable.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2FrTable.setDescription('A Table to define LLC2 interfaces over Frame Relay.')
nlIfLlc2FrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 2, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfLlc2FrRlp"), (0, "NETLINK-SPECIFIC-MIB", "nlIfLlc2FrPort"), (0, "NETLINK-SPECIFIC-MIB", "nlIfLlc2FrDLCI"), (0, "NETLINK-SPECIFIC-MIB", "nlIfLlc2FrFormat"))
if mibBuilder.loadTexts: nlIfLlc2FrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2FrEntry.setDescription('An LLC2 FR Interface entry containing objects relating to LLC2 FR interfaces.')
nlIfLlc2FrRlp = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfLlc2FrRlp.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2FrRlp.setDescription('The RLP number on the node.')
nlIfLlc2FrPort = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfLlc2FrPort.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2FrPort.setDescription('The Port number on the RLP. Port numbers 1-8 are known to be physical ports, and channels are 193-241.')
nlIfLlc2FrDLCI = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 991))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfLlc2FrDLCI.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2FrDLCI.setDescription('The DLCI number used to identify the entry in the table. The range is 16-991. ')
nlIfLlc2FrFormat = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("native-llc2", 1), ("tb-8023", 2), ("srb-8025", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfLlc2FrFormat.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2FrFormat.setDescription('The RFC1490 encapsulation method used for LLC2 traffic.')
nlIfLlc2FrPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 9))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfLlc2FrPriority.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2FrPriority.setDescription('Interface priority- allows setting a priority for traffic within the node. The higher the number, the higher the priority relative to other intra-nodal traffic.')
nlIfLlc2FrBAG = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfLlc2FrBAG.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2FrBAG.setDescription('Assigns the interface to one of sixteen groups whose parameters regulate bandwidth usage.')
nlIfLlc2FrHostMACAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 2, 1, 7), MacAddress().clone(hexValue="400000000001")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfLlc2FrHostMACAddress.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2FrHostMACAddress.setDescription('The MAC address of the host or workstation to which the remote device needs to connect. Valid only if the format is native-llc2.')
nlIfLlc2FrSessionType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 2, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("terminated", 1), ("originated", 2), ("not-applicable", 3))).clone('originated')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfLlc2FrSessionType.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2FrSessionType.setDescription('If originated, means that LLC2 connects from the configured Host MAC address can use this interface. If terminated, connections to the configured address can use this interface. Valid only if the format is native-llc2.')
nlIfLlc2FrLANID = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 2, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095)).clone(4095)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfLlc2FrLANID.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2FrLANID.setDescription('Identifies the ring number used by a frame relay-compliant source-route bridge at the other end of this PVC. Valid only if the format is srb-8025.')
nlIfLlc2FrInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 2, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfLlc2FrInterface.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2FrInterface.setDescription('LLC2 interface number.')
nlIfLlc2FrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 2, 1, 11), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfLlc2FrRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2FrRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the nlIfLlc2FrTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
nlIfLlc2FrBlockedPortFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 2, 2, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfLlc2FrBlockedPortFlag.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfLlc2FrBlockedPortFlag.setDescription('The blocked port flag.')
ipxConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 4, 3))
ipxConfigRouting = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 1))
ipxStaticRouteConfigTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 1, 1), )
if mibBuilder.loadTexts: ipxStaticRouteConfigTable.setStatus('mandatory')
if mibBuilder.loadTexts: ipxStaticRouteConfigTable.setDescription('The IPX Static Route Configuration Table')
ipxStaticRouteConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 1, 1, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "ipxStaticRouteConfigCircIndex"), (0, "NETLINK-SPECIFIC-MIB", "ipxStaticRouteConfigNetNum"))
if mibBuilder.loadTexts: ipxStaticRouteConfigEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ipxStaticRouteConfigEntry.setDescription('Each entry corresponds to one instance of IPX Static Routing on the system')
ipxStaticRouteConfigCircIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipxStaticRouteConfigCircIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ipxStaticRouteConfigCircIndex.setDescription('The unique identifier of the circuit used to reach the first hop in the static route')
ipxStaticRouteConfigNetNum = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 1, 1, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4)).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipxStaticRouteConfigNetNum.setStatus('mandatory')
if mibBuilder.loadTexts: ipxStaticRouteConfigNetNum.setDescription("The IPX network number of the route's destination")
ipxStaticRouteConfigRouter = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 1, 1, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxStaticRouteConfigRouter.setStatus('mandatory')
if mibBuilder.loadTexts: ipxStaticRouteConfigRouter.setDescription("The hex value of the node address of the router that will forward a packet when this route is used. If Novell's IPXWAN is used on the interface, this field will be ignored ")
ipxStaticRouteConfigRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 1, 1, 1, 4), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxStaticRouteConfigRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: ipxStaticRouteConfigRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the ipxStaticRouteTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
ipxServConfigTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 1, 2), )
if mibBuilder.loadTexts: ipxServConfigTable.setStatus('mandatory')
if mibBuilder.loadTexts: ipxServConfigTable.setDescription('The IPX Static Route Configuration Table')
ipxServConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 1, 2, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "ipxServConfigServiceType"), (0, "NETLINK-SPECIFIC-MIB", "ipxServConfigServName"))
if mibBuilder.loadTexts: ipxServConfigEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ipxServConfigEntry.setDescription('Each entry corresponds to one instance of IPX Static Routing on the system')
ipxServConfigServiceType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipxServConfigServiceType.setStatus('mandatory')
if mibBuilder.loadTexts: ipxServConfigServiceType.setDescription('The Service Type, a SAP service code ')
ipxServConfigServName = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 1, 2, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 48))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipxServConfigServName.setStatus('mandatory')
if mibBuilder.loadTexts: ipxServConfigServName.setDescription('The Sercice Name, identifies a specific server in the IPX network. ')
ipxServConfigServNetworkAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 1, 2, 1, 3), Integer32().clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxServConfigServNetworkAddress.setStatus('mandatory')
if mibBuilder.loadTexts: ipxServConfigServNetworkAddress.setDescription('The IPX network number portion of the IPX address of the server identified by ServName ')
ipxServConfigServNodeAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 1, 2, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxServConfigServNodeAddress.setStatus('mandatory')
if mibBuilder.loadTexts: ipxServConfigServNodeAddress.setDescription('The node portion of the IPX address of the service')
ipxServConfigServSocketNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxServConfigServSocketNumber.setStatus('mandatory')
if mibBuilder.loadTexts: ipxServConfigServSocketNumber.setDescription('The socket portion of the IPX address of the service')
ipxServConfigInterveningNetworks = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxServConfigInterveningNetworks.setStatus('mandatory')
if mibBuilder.loadTexts: ipxServConfigInterveningNetworks.setDescription('The number of routers that a packet must pass through t reach the server')
ipxServConfigGatewayAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 1, 2, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxServConfigGatewayAddress.setStatus('mandatory')
if mibBuilder.loadTexts: ipxServConfigGatewayAddress.setDescription('The node address of the router thar will forward a packet')
ipxServConfigInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 1, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxServConfigInterface.setStatus('mandatory')
if mibBuilder.loadTexts: ipxServConfigInterface.setDescription('The IPX network number assigned to the interface used to send packets that are destined for this server')
ipxServConfigRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 1, 2, 1, 9), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxServConfigRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: ipxServConfigRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the ipxServTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
ipxConfigInterface = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2))
ipxInterfaceTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6), )
if mibBuilder.loadTexts: ipxInterfaceTable.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceTable.setDescription('The IPX Interface Table')
ipxInterfaceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "ipxInterfaceNumber"))
if mibBuilder.loadTexts: ipxInterfaceEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceEntry.setDescription('')
ipxInterfaceNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipxInterfaceNumber.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceNumber.setDescription('The Interface Number, a sequential number to identify the interface')
ipxInterfaceBlockedPortFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceBlockedPortFlag.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceBlockedPortFlag.setDescription('This flag causes the IPX interface to be enabled (N) or disabled (Y) at node IPL. The port will remain in that state until this parameter is changed')
ipxInterfaceType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("ethernet", 1), ("eight025", 2), ("x25", 3), ("frl", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceType.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceType.setDescription('The interface type. 802.5 is not valid on FRX4000')
ipxInterfaceFrameType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("typeII", 1), ("raw", 2), ("llc", 3), ("snap", 4))).clone('typeII')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceFrameType.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceFrameType.setDescription('The frame type - depends on the interfece type. Valid combinations are: Ethernet, 802.5 - typeII, raw, llc, snap Frame relay, X25 - typeII')
ipxInterfaceMaxTransUnit = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 5), Integer32().clone(4096)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceMaxTransUnit.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceMaxTransUnit.setDescription('Maximum frame size that can be transmitted or received over the IPX interface.: Valid ranges are: Ethernet - (45-1518) 802.5 - (48-4096) Frame relay, X25 - (31-4096) ')
ipxInterfaceNetworkAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceNetworkAddress.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceNetworkAddress.setDescription('Identifies the local interface to the Novell Network')
ipxInterfaceBandwidthAllocGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceBandwidthAllocGroup.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceBandwidthAllocGroup.setDescription('Assigns the IPX interface to one of sixteen groups whose parameters regulate bandwidth usage')
ipxInterfacePortDiagEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfacePortDiagEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfacePortDiagEnabled.setDescription("Enables/Disables the IPX interface's ability to handle Novell diagonistics packets")
ipxInterfaceNetBIOSEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceNetBIOSEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceNetBIOSEnabled.setDescription('Determines whether Novell-encapsulated NetBIOS frames received on the interface will be processed')
ipxInterfaceNetBIOSHops = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(8)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceNetBIOSHops.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceNetBIOSHops.setDescription('Maximum number of hops allowed for routing a Novell-encapsulated NetBIOS frame')
ipxInterfacePeriodicRIPEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfacePeriodicRIPEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfacePeriodicRIPEnabled.setDescription('Determines whether the node will originate periodic IPX Routing Information Protocol messages from this interface to inform the network that the route through this interface is viable')
ipxInterfacePeriodicRIPTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfacePeriodicRIPTimer.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfacePeriodicRIPTimer.setDescription('This is the interval between the periodic IPX RIP broadcasts from this interface')
ipxInterfacePeriodicSAPEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfacePeriodicSAPEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfacePeriodicSAPEnabled.setDescription('Determines whether the node will originate periodic IPX SAP messages from this interface to inform the network which SAP entries in the routing table are viable')
ipxInterfacePeriodicSAPTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfacePeriodicSAPTimer.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfacePeriodicSAPTimer.setDescription('The interval between the periodic SAP broadcasts from this interface')
ipxInterfaceRIPEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceRIPEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceRIPEnabled.setDescription('Datermines whether this interface will support RIP proceswsing ')
ipxInterfaceRIPAgeTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(180)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceRIPAgeTimer.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceRIPAgeTimer.setDescription('This is used to delete route entries in the routing information table. The timer starts/restarts each time an entry is created/updated. If the timer expires, the router will assume that the entry is no longer valid and the entry will be deleteda from the table ')
ipxInterfaceRIPMaxSize = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(54, 446)).clone(446)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceRIPMaxSize.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceRIPMaxSize.setDescription('Maximum possible RIP packet size ')
ipxInterfaceSAPEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceSAPEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceSAPEnabled.setDescription('Determines whether the interface will support SAP or not ')
ipxInterfaceSAPAgeTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(180)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceSAPAgeTimer.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceSAPAgeTimer.setDescription('This is used to delete route entries from the router information table. The timer starts/restars each time an entry is created/updated. If the timer expires, the router will assume that the entry is no longer valid and the entry will be deleted from the table ')
ipxInterfaceTransportTime = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceTransportTime.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceTransportTime.setDescription('is the time (in 55 milliseconds unit) that will normally take to forward frames out of interface. Recommmended values are 1 for LAN interfaces and 30 for WAN interfaces. A value of 0 indicates that the back to back frame forwarding is acceptable. This time is used to calculate the least cost path during routing. If IPXWAM\x7fN is (Y), Transport time will be overridden by the value calculated by IPXWAN ')
ipxInterfaceSerializationEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceSerializationEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceSerializationEnabled.setDescription('Determines whether NetWare file server serialization security frames will be forwarded on the interface. These broadcasts frames are used by Novell to hold informations regarding the licensing of the file server executables ')
ipxInterfaceWatchdogSpoofingEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceWatchdogSpoofingEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceWatchdogSpoofingEnabled.setDescription('Determines whether this interface will be used by the local node to respond to (keep alive) messages on behalf of the client workstations. Novell servers periodically issue these messages to all clients to determine whether each client is still alive ')
ipxInterfaceLanCardNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 23), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceLanCardNumber.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceLanCardNumber.setDescription('The card number installed in an FRX6000. The first or only LAN card installed is always 0, and the second is card 1')
ipxInterfaceWanEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("no", 1), ("yes", 2), ("not-applicable", 3))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceWanEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceWanEnabled.setDescription("Determines whether Novell's IPXWAN protocol will be used on the interface. IPXWAN specifies a method for IPX routers to communicate across WAN links. FRX6000 and FRX4000 implement IPXWAN running over RFC 1490-complaint frame relay and RFC1356-complaint X.25")
ipxInterfaceSourceSubscriber = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 25), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceSourceSubscriber.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceSourceSubscriber.setDescription('Subscruber id that defines an IPX interface in the node, by associating itself with port 66 (a number reserved for the IPXinterface) on the RLP that has an IPX installed on it. .. Valid only for type X.25 ')
ipxInterfaceDestinationSubscriber = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 26), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceDestinationSubscriber.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceDestinationSubscriber.setDescription('Subscriber ID associated with the physical X.25 port that will be used for the IPX interface . .. Valid only for type X.25 ')
ipxInterfaceSVCRetryTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 27), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 60)).clone(20)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceSVCRetryTimer.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceSVCRetryTimer.setDescription('The frequency with which calls will be placed to try to establish a connection .. Valid only for type X.25 ')
ipxInterfaceSVCIdleTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 28), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 5)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceSVCIdleTimer.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceSVCIdleTimer.setDescription('is a period that an IPX connection can remain active with no traffic before the connection is cleared. .. Valid only for type X.25 ')
ipxInterfaceMaxVC = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 29), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceMaxVC.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceMaxVC.setDescription('This specifies a number of virtual circuits over which IPX traffic will be transmitted in parallel. .. Valid only for type X.25 ')
ipxInterfacePVCConnection = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 30), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("no", 1), ("yes", 2), ("not-applicable", 3))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfacePVCConnection.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfacePVCConnection.setDescription('Identifies this interface as one of a PVC. .. Valid only for type X.25 ')
ipxInterfaceSourceCard = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 31), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceSourceCard.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceSourceCard.setDescription('Identifies the location of this end of the frame relay IPX interface. .. Valid only for frame relay ')
ipxInterfaceSourcePort = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 32), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceSourcePort.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceSourcePort.setDescription('Identifies the location of this end of the frame relay IPX interface. .. Valid only for frame relay ')
ipxInterfaceSourceDLCI = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 33), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 991)).clone(16)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceSourceDLCI.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceSourceDLCI.setDescription('Identifies the location of this end of the frame relay IPX interface. .. Valid only for frame relay ')
ipxInterfaceRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 2, 6, 1, 34), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxInterfaceRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: ipxInterfaceRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the ipxInterfaceTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
ipxConfigNodeDefault = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 3))
ipxNodeDefaultConfigNetworkAddress = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 3, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxNodeDefaultConfigNetworkAddress.setStatus('mandatory')
if mibBuilder.loadTexts: ipxNodeDefaultConfigNetworkAddress.setDescription('Identifies the node to the IPX network')
ipxNodeDefaultConfigRIPSAPGap = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 3, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxNodeDefaultConfigRIPSAPGap.setStatus('mandatory')
if mibBuilder.loadTexts: ipxNodeDefaultConfigRIPSAPGap.setDescription('The minimum delay between consecutive RIP and SAP transmissions')
ipxNodeDefaultConfigRouterName = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 4, 3, 3, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 48))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipxNodeDefaultConfigRouterName.setStatus('mandatory')
if mibBuilder.loadTexts: ipxNodeDefaultConfigRouterName.setDescription('An unique identifier within the IPX network')
nlIfIpInterfaces = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 4, 4))
nlIfIpTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1), )
if mibBuilder.loadTexts: nlIfIpTable.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpTable.setDescription('A Table to define IP interfaces over frame relay, X.25, or LAN (ethernet, 802.3, 802.5).')
nlIfIpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfIpInterface"))
if mibBuilder.loadTexts: nlIfIpEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpEntry.setDescription('An IP Interface entry containing objects relating to IP interfaces.')
nlIfIpInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 257))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfIpInterface.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpInterface.setDescription('The interface number which uniquely identifies the interface to which this entry is applicable. The interface identified by a particular value of this number is NOT the same interface as defined by the same value of ifIndex.')
nlIfIpMtu = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4096)).clone(4096)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpMtu.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpMtu.setDescription('The maximum size (in bytes) of the data field that can be used to encapsulate an IP datagram for transmission over the interface. If the interface type is LAN based, then the maximum size value cannot exceed 1500.')
nlIfIpNetworkMask = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 3), IpAddress().clone(hexValue="FF000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpNetworkMask.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpNetworkMask.setDescription('The mask used during routing to determine the portion of the address associated with the network.')
nlIfIpRouteMetric = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpRouteMetric.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpRouteMetric.setDescription('The number of network hops (network gateways in the overall path over the internet) associated with this interface.')
nlIfIpICMPAddRoutes = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpICMPAddRoutes.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpICMPAddRoutes.setDescription('The flag which indicates whether IP may add routes obtained by ICMP, whose error and control function is used to send re-direction options back to the source if problems are discovered.')
nlIfIpRIPDeltaUpdates = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 300)).clone(30)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpRIPDeltaUpdates.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpRIPDeltaUpdates.setDescription('The frequency with which RIP messages will be sent to every neighbor node, containing any changes to the routing table that occurred since the last full update.')
nlIfIpRIPFullUpdates = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 300)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpRIPFullUpdates.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpRIPFullUpdates.setDescription('The frequency with which updates of the entire routing table will be sent to every neighbor node, via one or more RIP messages.')
nlIfIpPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 9)).clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpPriority.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpPriority.setDescription('The priority setting for traffic within the node. The higher the number, the higher the priority relative to other intra-nodal traffic.')
nlIfIpBAG = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpBAG.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpBAG.setDescription('The bandwidth allocation group to which this interface is assigned to. This object is only valid when the interface type is frame relay or proprietary virtual.')
nlIfIpType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 5, 6, 7, 9, 32, 53))).clone(namedValues=NamedValues(("other", 1), ("rfc877x25", 5), ("ethernetCsmacd", 6), ("iso88023Csmacd", 7), ("iso88025TokenRing", 9), ("frameRelay", 32), ("propVirtual", 53))).clone('rfc877x25')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpType.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpType.setDescription('The type of IP interface.')
nlIfIpSourceAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 11), IpAddress().clone(hexValue="00000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpSourceAddress.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpSourceAddress.setDescription('The IP address of the end-to-end sender.')
nlIfIpDestAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 12), IpAddress().clone(hexValue="00000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpDestAddress.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpDestAddress.setDescription('The IP address of the end-to-end receiver. This object is only valid when the interface type is X.25 or proprietary virtual, or when the interface type is frame relay and the inverse ARP flag indicates disabled.')
nlIfIpBroadcastAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 13), IpAddress().clone(hexValue="00000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpBroadcastAddress.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpBroadcastAddress.setDescription('The IP address that is used to send to all LAN hosts on the network. This object is only valid when the interface type is LAN based.')
nlIfIpLANCard = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpLANCard.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpLANCard.setDescription('The LAN card that contains the interface. This object is only valid when the interface type is LAN based.')
nlIfIpSourceSub = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 15), NlSubscriberAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpSourceSub.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpSourceSub.setDescription('The subscriber ID associated with the source IP interface port. This object is only valid when the interface type is X.25.')
nlIfIpDestSub = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 16), NlSubscriberAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpDestSub.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpDestSub.setDescription('The subscriber ID associated with the destination IP interface port. This object is only valid when the interface type is X.25.')
nlIfIpSVCRetryTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 60)).clone(20)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpSVCRetryTimer.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpSVCRetryTimer.setDescription('The frequency with which calls will be placed to try to establish a connection. This object is only valid when the interface type is X.25.')
nlIfIpSVCIdleTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 18), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 5)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpSVCIdleTimer.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpSVCIdleTimer.setDescription('The period that an IP connection remains active with no traffic before the connection is cleared. This object is only valid when the interface type is X.25.')
nlIfIpMaxSVC = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpMaxSVC.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpMaxSVC.setDescription('The number of virtual circuits over which IP traffic will be transmitted in parallel. This object is only valid when the interface type is X.25.')
nlIfIpPVCConnection = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpPVCConnection.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpPVCConnection.setDescription('The flag which indicates whether this interface is a PVC. This object is only valid when the interface type is X.25.')
nlIfIpSourceRlp = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpSourceRlp.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpSourceRlp.setDescription('The RLP of the logical interface of this end of the frame relay link. This object is only valid when the interface type is frame relay.')
nlIfIpSourcePort = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpSourcePort.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpSourcePort.setDescription('The port of the logical interface of this end of the frame relay link. This object is only valid when the interface type is frame relay.')
nlIfIpSourceDLCI = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 23), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 991)).clone(16)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpSourceDLCI.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpSourceDLCI.setDescription('The DLCI of the logical interface of this end of the frame relay link. This object is only valid when the interface type is frame relay.')
nlIfIpRIPSupport = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2), ("receive-only", 3))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpRIPSupport.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpRIPSupport.setDescription('The RIP support that is being run on the interface.')
nlIfIpInverseARP = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpInverseARP.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpInverseARP.setDescription('The flag which indicates whether inverse ARP is enabled or disabled. This object is only valid when the interface type is frame relay.')
nlIfIpProxyARP = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 26), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpProxyARP.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpProxyARP.setDescription('The flag which indicates whether proxy ARP is enabled or disabled. This object is only valid when the interface type is LAN based.')
nlIfIpUnnumberedIf = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 27), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpUnnumberedIf.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpUnnumberedIf.setDescription('The flag which indicates whether this interface is an unnumbered interface (i.e., does not require a specific source IP address). This object is only valid when the interface type is frame relay or X.25.')
nlIfIpRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 1, 1, 28), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the nlIfIpTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
nlIfIpSecondaryAddrTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 2), )
if mibBuilder.loadTexts: nlIfIpSecondaryAddrTable.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpSecondaryAddrTable.setDescription('A Table to define secondary IP interfaces on a LAN (ethernet, 802.3, 802.5) interface.')
nlIfIpSecondaryAddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 2, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfIpInterface"), (0, "NETLINK-SPECIFIC-MIB", "nlIfIpSecondaryAddrSequence"))
if mibBuilder.loadTexts: nlIfIpSecondaryAddrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpSecondaryAddrEntry.setDescription('An IP Secondary Address entry containing objects relating to IP secondary addresses.')
nlIfIpSecondaryAddrSequence = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpSecondaryAddrSequence.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpSecondaryAddrSequence.setDescription('The maximum number of secondary IP addresses that may be configured on an interface.')
nlIfIpSecondaryAddrNetworkMask = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 2, 1, 2), IpAddress().clone(hexValue="00000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpSecondaryAddrNetworkMask.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpSecondaryAddrNetworkMask.setDescription('The mask used during routing to determine the portion of the address associated with the network.')
nlIfIpSecondaryAddrRouteMetric = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpSecondaryAddrRouteMetric.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpSecondaryAddrRouteMetric.setDescription('The number of network hops (network gateways in the overall path over the internet) associated with this interface.')
nlIfIpSecondaryAddrSourceAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 2, 1, 4), IpAddress().clone(hexValue="00000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpSecondaryAddrSourceAddress.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpSecondaryAddrSourceAddress.setDescription('The IP address of the end-to-end sender.')
nlIfIpSecondaryAddrBroadcastAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 2, 1, 5), IpAddress().clone(hexValue="00000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpSecondaryAddrBroadcastAddress.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpSecondaryAddrBroadcastAddress.setDescription('The IP address that is used to send to all LAN hosts on the network.')
nlIfIpSecondaryAddrRIPSupport = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2), ("receive-only", 3))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpSecondaryAddrRIPSupport.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpSecondaryAddrRIPSupport.setDescription('The RIP support that is being run on the interface.')
nlIfIpSecondaryAddrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 4, 2, 1, 7), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfIpSecondaryAddrRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfIpSecondaryAddrRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the nlIfIpSecondaryAddrTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
nlIfVoiceInterfaces = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 4, 5))
nlIfVoiceTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 4, 5, 1), )
if mibBuilder.loadTexts: nlIfVoiceTable.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfVoiceTable.setDescription('A Table to define Voice interfaces over frame relay.')
nlIfVoiceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 4, 5, 1, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlIfVoiceInterface"))
if mibBuilder.loadTexts: nlIfVoiceEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfVoiceEntry.setDescription('A Voice Interface entry containing objects relating to the interface that are configurable by the user.')
nlIfVoiceInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 5, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 129))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlIfVoiceInterface.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfVoiceInterface.setDescription('The interface number which uniquely identifies the interface to which this entry is applicable. The interface identified by a particular value of this number is NOT the same interface as defined by the same value of ifIndex.')
nlIfVoicePeerNodeType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 5, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("netlink", 1), ("act", 2))).clone('act')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfVoicePeerNodeType.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfVoicePeerNodeType.setDescription('The node type for the node at the remote end of the frame relay connection.')
nlIfVoicePeerNodeNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 5, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 61)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfVoicePeerNodeNumber.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfVoicePeerNodeNumber.setDescription('The node number of the remote device.')
nlIfVoicePeerNodePort = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 5, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 17)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfVoicePeerNodePort.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfVoicePeerNodePort.setDescription('The voice port on the remote device.')
nlIfVoiceLocalNodeNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 5, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 61)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfVoiceLocalNodeNumber.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfVoiceLocalNodeNumber.setDescription('The node number of the local device.')
nlIfVoiceLocalNodePort = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 5, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 17)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfVoiceLocalNodePort.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfVoiceLocalNodePort.setDescription('The voice port on the local device.')
nlIfVoiceFrameRelayRlp = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 5, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfVoiceFrameRelayRlp.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfVoiceFrameRelayRlp.setDescription('The RLP of the logical interface of this end of the frame relay link.')
nlIfVoiceFrameRelayPort = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 5, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfVoiceFrameRelayPort.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfVoiceFrameRelayPort.setDescription('The port of the logical interface of this end of the frame relay link.')
nlIfVoiceFrameRelayDLCI = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 5, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 991)).clone(16)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfVoiceFrameRelayDLCI.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfVoiceFrameRelayDLCI.setDescription('The DLCI of the logical interface of this end of the frame relay link.')
nlIfVoiceEnableFragment = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 5, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfVoiceEnableFragment.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfVoiceEnableFragment.setDescription('')
nlIfVoiceRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 4, 5, 1, 1, 11), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIfVoiceRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: nlIfVoiceRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the noIfVoiceTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
subscriber = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 5))
nlLocalSubscriberTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 5, 1), )
if mibBuilder.loadTexts: nlLocalSubscriberTable.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberTable.setDescription('A Table to define each local subscriber.')
nlLocalSubscriberEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 5, 1, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlLocalSubscriberId"))
if mibBuilder.loadTexts: nlLocalSubscriberEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberEntry.setDescription('A Local Subscriber entry containing objects relating to local subscribers.')
nlLocalSubscriberId = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 5, 1, 1, 1), NlSubscriberAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlLocalSubscriberId.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberId.setDescription("The Subscriber ID. A valid subscriber ID is a string of numeric digits represented by their ASCII equivalents. The ASCII equivalents of the wildcard characters '*' and '?' are also allowed. A '*' is a wildcard that matches any number of digits, including zero. A '?' is a wildcard that matches any single digit.")
nlLocalSubscriberName = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 5, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlLocalSubscriberName.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberName.setDescription('The Subscriber name. The subscriber name is not accessible from SNMP, and a blank string is always returned.')
nlLocalSubscriberAlgorithm = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 5, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("round-robin", 1), ("line-failed", 2), ("line-busy", 3), ("least-lcn", 4))).clone('round-robin')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLocalSubscriberAlgorithm.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberAlgorithm.setDescription("The Subscriber routing algorithm. The following 4 types of routing algorithms are supported. 1 - round robin selects ports specified cyclicly in numerical order according to the list of Redirection addresses. If the port is unreachable or congested, the call request is sent to the next port. 2. Alternate Route on Line Failure - selects the first port not in a failed state. If a connection is not made, the call will be cleared even if other ports are available. 3. Alternate Route on Line Busy - selects the first port that has LCN's available (not busy). If a port has failed, the next available port is selected. 4. Least LCN with throughput selects the line with the minumum loading, calculated by weight and number of LCN's in use. If use of a routing algorithm is not desired, accept the default and specify only one routing path.")
nlLocalSubscriberSystematicRedirect = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 5, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLocalSubscriberSystematicRedirect.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberSystematicRedirect.setDescription('Systematic Redirection.')
nlLocalSubscriberRedirectBusy = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 5, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLocalSubscriberRedirectBusy.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberRedirectBusy.setDescription('Redirect calls according to Redirection Addresses if the primary subscriber device is busy.')
nlLocalSubscriberRedirectOO = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 5, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLocalSubscriberRedirectOO.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberRedirectOO.setDescription('Redirect calls according to Redirection Addresses if the primary subscriber device is out of order.')
nlLocalSubscriberPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 5, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 9))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLocalSubscriberPriority.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberPriority.setDescription('Allows setting a priority for traffic within the node.')
nlLocalSubscriberRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 5, 1, 1, 8), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLocalSubscriberRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the nlLocalSubscriberTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
nlLocalSubscriberRouteTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 5, 2), )
if mibBuilder.loadTexts: nlLocalSubscriberRouteTable.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberRouteTable.setDescription('A Table to define local subscriber route paths.')
nlLocalSubscriberRouteEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 5, 2, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlLocalSubscriberId"), (0, "NETLINK-SPECIFIC-MIB", "nlLocalSubscriberRouteIndex"))
if mibBuilder.loadTexts: nlLocalSubscriberRouteEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberRouteEntry.setDescription('A Local Subscriber Route entry containing objects relating to local subscriber routes.')
nlLocalSubscriberRouteIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 5, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlLocalSubscriberRouteIndex.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberRouteIndex.setDescription('Route path index for a specific subscriber route path.')
nlLocalSubscriberRouteConf = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 5, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlLocalSubscriberRouteConf.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberRouteConf.setDescription('Flag to indicate if the LP/port routing path to the subscriber device is configured or not. This object is read- only. The route is always configured for an entry in the table.')
nlLocalSubscriberRouteLP = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 5, 2, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLocalSubscriberRouteLP.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberRouteLP.setDescription('Instance of the LP that makes up the routing path')
nlLocalSubscriberRoutePort = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 5, 2, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLocalSubscriberRoutePort.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberRoutePort.setDescription('Instance of the port that makes up the routing path')
nlLocalSubscriberRouteRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 5, 2, 1, 5), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLocalSubscriberRouteRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberRouteRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the nlLocalSubscriberRouteTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
nlLocalSubscriberRedirTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 5, 3), )
if mibBuilder.loadTexts: nlLocalSubscriberRedirTable.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberRedirTable.setDescription('A Table to list local subscriber redirection addresses.')
nlLocalSubscriberRedirEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 5, 3, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlLocalSubscriberId"), (0, "NETLINK-SPECIFIC-MIB", "nlLocalSubscriberRedirIndex"))
if mibBuilder.loadTexts: nlLocalSubscriberRedirEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberRedirEntry.setDescription('A Local Subscriber Redirection Address entry containing objects relating to local subscriber redirection addresses.')
nlLocalSubscriberRedirIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 5, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 5))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlLocalSubscriberRedirIndex.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberRedirIndex.setDescription('Redirection entry index for a local subscriber.')
nlLocalSubscriberRedirAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 5, 3, 1, 2), NlSubscriberAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLocalSubscriberRedirAddr.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberRedirAddr.setDescription('Subscriber redirection address.')
nlLocalSubscriberRedirRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 5, 3, 1, 3), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLocalSubscriberRedirRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: nlLocalSubscriberRedirRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the nlLocalSubscriberRedirTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
llc2 = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 6))
nlLlc2HostTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 6, 1), )
if mibBuilder.loadTexts: nlLlc2HostTable.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostTable.setDescription('A Table to define LLC2 host entries.')
nlLlc2HostEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 6, 1, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlLlc2HostGroup"), (0, "NETLINK-SPECIFIC-MIB", "nlLlc2HostIndex"))
if mibBuilder.loadTexts: nlLlc2HostEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostEntry.setDescription('An LLC2 Host entry containing objects relating to LLC2 hosts.')
nlLlc2HostIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 251))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlLlc2HostIndex.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostIndex.setDescription('LLC2 Host Index')
nlLlc2HostMACAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 1, 1, 2), MacAddress().clone(hexValue="400000000001")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2HostMACAddress.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostMACAddress.setDescription('The LLC2 Host MAC Address.')
nlLlc2HostSessionType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("originated", 1), ("terminated", 2))).clone('terminated')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2HostSessionType.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostSessionType.setDescription('The mode in which the Host is operating')
nlLlc2HostT1ReplyTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 1, 1, 4), TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(1, 250)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2HostT1ReplyTimer.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostT1ReplyTimer.setDescription('Reply Timer used to detect a failure of the Remote node.')
nlLlc2HostT2RecvAckTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 1, 1, 5), TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(100, 5000)).clone(100)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2HostT2RecvAckTimer.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostT2RecvAckTimer.setDescription('Receive Ack Timer used to delay sending an acknowledgement of a received information LPDU.')
nlLlc2HostTiInactivityTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 1, 1, 6), TimeInterval().subtype(subtypeSpec=ValueRangeConstraint(1, 50)).clone(30)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2HostTiInactivityTimer.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostTiInactivityTimer.setDescription('Timer used to detect inoperative condition of the remote link station or transmission medium.')
nlLlc2HostN3NumberLPDUs = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 127)).clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2HostN3NumberLPDUs.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostN3NumberLPDUs.setDescription('The number of Information LPDUs that need to be received before an acknowledgement is sent.')
nlLlc2HostTwNumberOutstanding = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 127)).clone(7)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2HostTwNumberOutstanding.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostTwNumberOutstanding.setDescription('Maximum number of sequentially numbered information LPDUs that can be outstanding at any time.')
nlLlc2HostN2ExpiredT1LPDUCount = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(8)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2HostN2ExpiredT1LPDUCount.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostN2ExpiredT1LPDUCount.setDescription('Maximum number of LPDU retransmissions occur following the expiration of Timer T1.')
nlLlc2HostPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 9))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2HostPriority.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostPriority.setDescription('Traffic priorities within the node.')
nlLlc2HostBAG = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 1, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16)).clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlLlc2HostBAG.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostBAG.setDescription('Bandwidth Allocation group to which the LLC2 Host belongs. This object is read-only. Bandwidth allocation groups do not apply to LLC2 hosts.')
nlLlc2HostRoutingSubscriberId = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 1, 1, 12), NlSubscriberAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2HostRoutingSubscriberId.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostRoutingSubscriberId.setDescription('LLC2 hosts Routing Subscriber Id')
nlLlc2HostSrcMACAddressMask = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 1, 1, 13), MacAddress().clone(hexValue="000000000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2HostSrcMACAddressMask.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostSrcMACAddressMask.setDescription("LLC2 Host's Source MAC Address Mask")
nlLlc2HostAccess = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("lan", 1), ("frameRelay", 2), ("tokenRing", 3), ("ethernet", 4), ("all", 5), ("not-applicable", 6))).clone('not-applicable')).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlLlc2HostAccess.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostAccess.setDescription('Specifies different type(s) of interface(s) that can be used for connections to a terminated host session.')
nlLlc2HostRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 1, 1, 15), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2HostRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostRowStatus.setDescription('Row status convention for LLC2 Host entry.')
nlLlc2HostInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 1, 1, 16), Integer32().clone(300)).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlLlc2HostInterface.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostInterface.setDescription('LLC2 interface number associated with this host entry, always 300 for non-FRX7000.')
nlLlc2HostGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 1, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlLlc2HostGroup.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2HostGroup.setDescription('Traffic group associated with LLC2 interface, always 1 for non-FRX7000.')
nlLlc2TermConnectionTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 6, 2), )
if mibBuilder.loadTexts: nlLlc2TermConnectionTable.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2TermConnectionTable.setDescription('A Table to describe LLC2 Terminated Host Connections.')
nlLlc2TermConnectionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 6, 2, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlLlc2HostGroup"), (0, "NETLINK-SPECIFIC-MIB", "nlLlc2HostIndex"), (0, "NETLINK-SPECIFIC-MIB", "nlLlc2TermConnectionSequence"))
if mibBuilder.loadTexts: nlLlc2TermConnectionEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2TermConnectionEntry.setDescription('An LLC2 Terminated Host Connection entry containing objects relating to LLC2 terminated host connections.')
nlLlc2TermConnectionSequence = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlLlc2TermConnectionSequence.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2TermConnectionSequence.setDescription('LLC2 Host connection Sequence number')
nlLlc2TermConnectionHSAP = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(4, 254)).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2TermConnectionHSAP.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2TermConnectionHSAP.setDescription('Service Access point used to connect to the Host')
nlLlc2TermConnectionLocalSubscriberId = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 2, 1, 3), NlSubscriberAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2TermConnectionLocalSubscriberId.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2TermConnectionLocalSubscriberId.setDescription("LLC2 Host Connection's Local Subscriber Id")
nlLlc2TermConnectionRemoteSubscriberId = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 2, 1, 4), NlSubscriberAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2TermConnectionRemoteSubscriberId.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2TermConnectionRemoteSubscriberId.setDescription("LLC2 Host Connection's Remote Subscriber Id")
nlLlc2TermConnectionRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 2, 1, 5), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2TermConnectionRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2TermConnectionRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the nlLlc2TermConnectionTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
nlLlc2OrigConnectionTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 6, 3), )
if mibBuilder.loadTexts: nlLlc2OrigConnectionTable.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2OrigConnectionTable.setDescription('A Table to describe LLC2 Originated Host Connections.')
nlLlc2OrigConnectionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 6, 3, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "nlLlc2HostGroup"), (0, "NETLINK-SPECIFIC-MIB", "nlLlc2HostIndex"), (0, "NETLINK-SPECIFIC-MIB", "nlLlc2OrigConnectionSequence"))
if mibBuilder.loadTexts: nlLlc2OrigConnectionEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2OrigConnectionEntry.setDescription('An LLC2 Originated Host Connection entry containing objects relating to LLC2 originated host connections.')
nlLlc2OrigConnectionSequence = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlLlc2OrigConnectionSequence.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2OrigConnectionSequence.setDescription('LLC2 Host connection Sequence number')
nlLlc2OrigConnectionHSAP = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(4, 254)).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2OrigConnectionHSAP.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2OrigConnectionHSAP.setDescription('Service Access point used to connect to the Host')
nlLlc2OrigConnectionType = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("sdlc", 1), ("llc2", 2))).clone('llc2')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2OrigConnectionType.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2OrigConnectionType.setDescription('Specifies Remote subscriber device type')
nlLlc2OrigConnectionLocalSubscriberId = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 3, 1, 4), NlSubscriberAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2OrigConnectionLocalSubscriberId.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2OrigConnectionLocalSubscriberId.setDescription("LLC2 Host Connection's Local Subscriber Id")
nlLlc2OrigConnectionRemoteSubscriberId = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 3, 1, 5), NlSubscriberAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2OrigConnectionRemoteSubscriberId.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2OrigConnectionRemoteSubscriberId.setDescription("LLC2 Host Connection's Remote Subscriber Id")
nlLlc2OrigConnectionIDBLK = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 3, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlLlc2OrigConnectionIDBLK.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2OrigConnectionIDBLK.setDescription('This field is obsolete')
nlLlc2OrigConnectionIDNUM = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 3, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlLlc2OrigConnectionIDNUM.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2OrigConnectionIDNUM.setDescription('This field is obsolete')
nlLlc2OrigConnectionMAXDATA = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 3, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlLlc2OrigConnectionMAXDATA.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2OrigConnectionMAXDATA.setDescription('This field is obsolete')
nlLlc2OrigConnectionMAXIN = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 3, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlLlc2OrigConnectionMAXIN.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2OrigConnectionMAXIN.setDescription('This field is obsolete')
nlLlc2OrigConnectionRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 6, 3, 1, 10), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlLlc2OrigConnectionRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2OrigConnectionRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the nlLlc2OrigConnectionTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
nlLlc2NextHostNumber = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 6, 4), Integer32().clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: nlLlc2NextHostNumber.setStatus('mandatory')
if mibBuilder.loadTexts: nlLlc2NextHostNumber.setDescription('The host number to be used for the next created host record, always 1 on non-FRX7000.')
status = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 7))
pinStatusTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 7, 4), )
if mibBuilder.loadTexts: pinStatusTable.setStatus('mandatory')
if mibBuilder.loadTexts: pinStatusTable.setDescription('A Table to describe the status of each pin on a physical port. This table is indexed by RLP and Port')
portPinEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 7, 4, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "portPinRlp"), (0, "NETLINK-SPECIFIC-MIB", "portPinPort"))
if mibBuilder.loadTexts: portPinEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portPinEntry.setDescription('A port pin entry containing objects relating to ports.')
portPinRlp = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 7, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portPinRlp.setStatus('mandatory')
if mibBuilder.loadTexts: portPinRlp.setDescription('The RLP number on the node')
portPinPort = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 7, 4, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portPinPort.setStatus('mandatory')
if mibBuilder.loadTexts: portPinPort.setDescription('The Port number on the RLP. Port numbers 0-7 are known to be physical ports.')
portPinStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 7, 4, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portPinStatus.setStatus('mandatory')
if mibBuilder.loadTexts: portPinStatus.setDescription('The status of all pins for this physical port. There is one octet value for each of the pins. Each octet value is defined as follows: 00 = unknown 01 = active 02 = inactive 03 = unused ')
statistics = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 8))
statGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 8, 1))
rlpStatsTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 2), )
if mibBuilder.loadTexts: rlpStatsTable.setStatus('mandatory')
if mibBuilder.loadTexts: rlpStatsTable.setDescription('A Table to describe Statistics on each RLP')
rlpStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 2, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "rlpStatsIndex"))
if mibBuilder.loadTexts: rlpStatsEntry.setStatus('mandatory')
if mibBuilder.loadTexts: rlpStatsEntry.setDescription('An RLP statistics entry containing objects relating to RLP statistics.')
rlpStatsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpStatsIndex.setStatus('mandatory')
if mibBuilder.loadTexts: rlpStatsIndex.setDescription('The RLP number')
rlpStatsQMessages = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 2, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpStatsQMessages.setStatus('mandatory')
if mibBuilder.loadTexts: rlpStatsQMessages.setDescription('The number of frames queued on the RLP')
rlpStatsUsedBuffers = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 2, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpStatsUsedBuffers.setStatus('mandatory')
if mibBuilder.loadTexts: rlpStatsUsedBuffers.setDescription('The percentage of total buffers available being used at the moment of inquiry')
rlpStatsInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 2, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpStatsInFrames.setStatus('mandatory')
if mibBuilder.loadTexts: rlpStatsInFrames.setDescription('The number of frames received.')
rlpStatsOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 2, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpStatsOutFrames.setStatus('mandatory')
if mibBuilder.loadTexts: rlpStatsOutFrames.setDescription('The number of frames transmitted.')
rlpStatsFrameRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 2, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpStatsFrameRejects.setStatus('mandatory')
if mibBuilder.loadTexts: rlpStatsFrameRejects.setDescription('The number of frames rejected.')
rlpStatsFrameRetransmits = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 2, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpStatsFrameRetransmits.setStatus('mandatory')
if mibBuilder.loadTexts: rlpStatsFrameRetransmits.setDescription('The number of frames retransmitted . If this value is consistently greater than zero and there are no other problems, then increase the retransmission parameter.')
portStatsTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 3), )
if mibBuilder.loadTexts: portStatsTable.setStatus('mandatory')
if mibBuilder.loadTexts: portStatsTable.setDescription('A Table to describe Statistics on each RLP')
portStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 3, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "portStatsRlpIndex"), (0, "NETLINK-SPECIFIC-MIB", "portStatsIndex"))
if mibBuilder.loadTexts: portStatsEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portStatsEntry.setDescription('A port statistics entry containing objects relating to port statistics.')
portStatsRlpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portStatsRlpIndex.setStatus('mandatory')
if mibBuilder.loadTexts: portStatsRlpIndex.setDescription('The RLP number')
portStatsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 3, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portStatsIndex.setStatus('mandatory')
if mibBuilder.loadTexts: portStatsIndex.setDescription('The Port number')
portStatsInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 3, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portStatsInFrames.setStatus('mandatory')
if mibBuilder.loadTexts: portStatsInFrames.setDescription('The number of frames received.')
portStatsOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 3, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portStatsOutFrames.setStatus('mandatory')
if mibBuilder.loadTexts: portStatsOutFrames.setDescription('The number of frames transmitted.')
portStatsFrameRetrans = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 3, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portStatsFrameRetrans.setStatus('mandatory')
if mibBuilder.loadTexts: portStatsFrameRetrans.setDescription('The number of frames retransmitted.')
portStatsFCSErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 3, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portStatsFCSErrors.setStatus('mandatory')
if mibBuilder.loadTexts: portStatsFCSErrors.setDescription('The number of Frame Check Sequence (FCS) errors. A value consistently greater than zero indicates line probelms.')
portStatsLogicalRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 3, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portStatsLogicalRejects.setStatus('mandatory')
if mibBuilder.loadTexts: portStatsLogicalRejects.setDescription('The sum of Rejects and Frame Rejects. A value consistently greater than zero indicates line probelms.')
portStatsInPercentUtils = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 3, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portStatsInPercentUtils.setStatus('mandatory')
if mibBuilder.loadTexts: portStatsInPercentUtils.setDescription('The ratio of non-flag characters to total characters being received by the port.')
portStatsOutPercentUtils = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 3, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portStatsOutPercentUtils.setStatus('mandatory')
if mibBuilder.loadTexts: portStatsOutPercentUtils.setDescription('The ration of non-flag characters to total characters being transmitted by the port.')
statFrame = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4))
frStatsTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 1), )
if mibBuilder.loadTexts: frStatsTable.setStatus('mandatory')
if mibBuilder.loadTexts: frStatsTable.setDescription('A Table to describe Frame Relay Frame Level Statistics on each Port')
frStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 1, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "frStatsRlpIndex"), (0, "NETLINK-SPECIFIC-MIB", "frStatsPortIndex"))
if mibBuilder.loadTexts: frStatsEntry.setStatus('mandatory')
if mibBuilder.loadTexts: frStatsEntry.setDescription('A frame relay port statistics entry containing objects relating to frame relay port statistics.')
frStatsRlpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frStatsRlpIndex.setStatus('mandatory')
if mibBuilder.loadTexts: frStatsRlpIndex.setDescription('The RLP Number.')
frStatsPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frStatsPortIndex.setStatus('mandatory')
if mibBuilder.loadTexts: frStatsPortIndex.setDescription('The Port number.')
frStatsTxDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frStatsTxDEFrames.setStatus('mandatory')
if mibBuilder.loadTexts: frStatsTxDEFrames.setDescription('The number of frames transmitted on the port with the Discard Eligibility bit set.')
frStatsRxDEFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frStatsRxDEFrames.setStatus('mandatory')
if mibBuilder.loadTexts: frStatsRxDEFrames.setDescription('The number of frames received on the port with the discard eligibility bit set.')
frStatsTxFECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frStatsTxFECNFrames.setStatus('mandatory')
if mibBuilder.loadTexts: frStatsTxFECNFrames.setDescription('The of number of frames transmitted on the port with FECN.')
frStatsRxFECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 1, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frStatsRxFECNFrames.setStatus('mandatory')
if mibBuilder.loadTexts: frStatsRxFECNFrames.setDescription('The number of frames received on the port with FECN.')
frStatsTxBECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frStatsTxBECNFrames.setStatus('mandatory')
if mibBuilder.loadTexts: frStatsTxBECNFrames.setDescription('The number of frames transmitted on the port with BECN.')
frStatsRxBECNFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frStatsRxBECNFrames.setStatus('mandatory')
if mibBuilder.loadTexts: frStatsRxBECNFrames.setDescription('The number of frames received on the port with BECN.')
frStatsTxLMIFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 1, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frStatsTxLMIFrames.setStatus('mandatory')
if mibBuilder.loadTexts: frStatsTxLMIFrames.setDescription('The number of LMI frames transmitted on the port.')
frStatsRxLMIFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 1, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frStatsRxLMIFrames.setStatus('mandatory')
if mibBuilder.loadTexts: frStatsRxLMIFrames.setDescription('The number of LMI frames received on the port.')
frStatsTxANXDFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 1, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frStatsTxANXDFrames.setStatus('mandatory')
if mibBuilder.loadTexts: frStatsTxANXDFrames.setDescription('The number of AnnexD frames transmitted on the port')
frStatsRxANXDFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 1, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frStatsRxANXDFrames.setStatus('mandatory')
if mibBuilder.loadTexts: frStatsRxANXDFrames.setDescription('The number of AnnexD frames received on the port')
frStatsTotDiscFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 1, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frStatsTotDiscFrames.setStatus('mandatory')
if mibBuilder.loadTexts: frStatsTotDiscFrames.setDescription('The total number of discarded frames on the port.')
x25TxStatsTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 2), )
if mibBuilder.loadTexts: x25TxStatsTable.setStatus('mandatory')
if mibBuilder.loadTexts: x25TxStatsTable.setDescription('A Table to describe X25 Frame Level Txmit Statistics on each Port')
x25TxStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 2, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "x25TxRlpIndex"), (0, "NETLINK-SPECIFIC-MIB", "x25TxPortIndex"))
if mibBuilder.loadTexts: x25TxStatsEntry.setStatus('mandatory')
if mibBuilder.loadTexts: x25TxStatsEntry.setDescription('An X25 frame level transmit statistics entry containing objects relating to x25 frame level transmit statistics.')
x25TxRlpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25TxRlpIndex.setStatus('mandatory')
if mibBuilder.loadTexts: x25TxRlpIndex.setDescription('The RLP Number.')
x25TxPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25TxPortIndex.setStatus('mandatory')
if mibBuilder.loadTexts: x25TxPortIndex.setDescription('The Port number.')
x25TxSABMFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 2, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25TxSABMFrames.setStatus('mandatory')
if mibBuilder.loadTexts: x25TxSABMFrames.setDescription("The number of SABM's transmitted on the port.")
x25TxUAFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 2, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25TxUAFrames.setStatus('mandatory')
if mibBuilder.loadTexts: x25TxUAFrames.setDescription('The number of UA Frames transmitted on the port.')
x25TxDISCFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 2, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25TxDISCFrames.setStatus('mandatory')
if mibBuilder.loadTexts: x25TxDISCFrames.setDescription('The number of DISC Frames transmitted on the port.')
x25TxDMFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 2, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25TxDMFrames.setStatus('mandatory')
if mibBuilder.loadTexts: x25TxDMFrames.setDescription('The number of DM frames transmitted on the port.')
x25TxFRMRFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 2, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25TxFRMRFrames.setStatus('mandatory')
if mibBuilder.loadTexts: x25TxFRMRFrames.setDescription('The of Number of FRMR frames transmitted on the port.')
x25TxREJFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 2, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25TxREJFrames.setStatus('mandatory')
if mibBuilder.loadTexts: x25TxREJFrames.setDescription('The number of Reject Frames transmitted on the port.')
x25TxRRFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 2, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25TxRRFrames.setStatus('mandatory')
if mibBuilder.loadTexts: x25TxRRFrames.setDescription('The number of RR frames transmitted on the port.')
x25TxRNRFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 2, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25TxRNRFrames.setStatus('mandatory')
if mibBuilder.loadTexts: x25TxRNRFrames.setDescription('The number of RNR Frames transmitted on the port.')
x25TxINFOFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 2, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25TxINFOFrames.setStatus('mandatory')
if mibBuilder.loadTexts: x25TxINFOFrames.setDescription('The number of INFO Frames transmitted on the port')
x25RxStatsTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 3), )
if mibBuilder.loadTexts: x25RxStatsTable.setStatus('mandatory')
if mibBuilder.loadTexts: x25RxStatsTable.setDescription('A Table to describe X25 Frame Level Rxmit Statistics on each Port')
x25RxStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 3, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "x25RxRlpIndex"), (0, "NETLINK-SPECIFIC-MIB", "x25RxPortIndex"))
if mibBuilder.loadTexts: x25RxStatsEntry.setStatus('mandatory')
if mibBuilder.loadTexts: x25RxStatsEntry.setDescription('An X25 frame level receive statistics entry containing objects relating to x25 frame level receive statistics.')
x25RxRlpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25RxRlpIndex.setStatus('mandatory')
if mibBuilder.loadTexts: x25RxRlpIndex.setDescription('The RLP Number.')
x25RxPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 3, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25RxPortIndex.setStatus('mandatory')
if mibBuilder.loadTexts: x25RxPortIndex.setDescription('The Port number.')
x25RxSABMFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 3, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25RxSABMFrames.setStatus('mandatory')
if mibBuilder.loadTexts: x25RxSABMFrames.setDescription("The number of SABM's received on the port.")
x25RxUAFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 3, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25RxUAFrames.setStatus('mandatory')
if mibBuilder.loadTexts: x25RxUAFrames.setDescription('The number of UA Frames received on the port.')
x25RxDISCFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 3, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25RxDISCFrames.setStatus('mandatory')
if mibBuilder.loadTexts: x25RxDISCFrames.setDescription('The number of DISC Frames received on the port.')
x25RxDMFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 3, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25RxDMFrames.setStatus('mandatory')
if mibBuilder.loadTexts: x25RxDMFrames.setDescription('The number of DM frames received on the port.')
x25RxFRMRFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 3, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25RxFRMRFrames.setStatus('mandatory')
if mibBuilder.loadTexts: x25RxFRMRFrames.setDescription('The of Number of FRMR frames received on the port.')
x25RxREJFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 3, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25RxREJFrames.setStatus('mandatory')
if mibBuilder.loadTexts: x25RxREJFrames.setDescription('The number of Reject Frames received on the port.')
x25RxRRFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 3, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25RxRRFrames.setStatus('mandatory')
if mibBuilder.loadTexts: x25RxRRFrames.setDescription('The number of RR frames received on the port.')
x25RxRNRFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 3, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25RxRNRFrames.setStatus('mandatory')
if mibBuilder.loadTexts: x25RxRNRFrames.setDescription('The number of RNR Frames received on the port.')
x25RxINFOFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 4, 3, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: x25RxINFOFrames.setStatus('mandatory')
if mibBuilder.loadTexts: x25RxINFOFrames.setDescription('The number of INFO Frames received on the port')
statBag = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 5))
statIp = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 6))
statT1 = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7))
t1StatsTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1), )
if mibBuilder.loadTexts: t1StatsTable.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsTable.setDescription('A Table to describe T1 Statistics.')
t1StatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "t1StatsRlpIndex"), (0, "NETLINK-SPECIFIC-MIB", "t1StatsPortIndex"))
if mibBuilder.loadTexts: t1StatsEntry.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsEntry.setDescription('A T1 port statistics entry containing objects relating to T1 port statistics.')
t1StatsRlpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t1StatsRlpIndex.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsRlpIndex.setDescription('The RLP Number.')
t1StatsPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t1StatsPortIndex.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsPortIndex.setDescription('The Port Number.')
t1StatsRcvFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t1StatsRcvFrames.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsRcvFrames.setDescription('Total received frames.')
t1StatsXmitFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t1StatsXmitFrames.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsXmitFrames.setDescription('Total transmitted frames.')
t1StatsLCVCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t1StatsLCVCnt.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsLCVCnt.setDescription('Line code violation count.')
t1StatsPCVRErrs = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t1StatsPCVRErrs.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsPCVRErrs.setDescription('PCVR Bit error count.')
t1StatsOOSCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t1StatsOOSCnt.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsOOSCnt.setDescription('Multiframe out of sync count.')
t1StatsBlueAlarms = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t1StatsBlueAlarms.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsBlueAlarms.setDescription('Blue alarm count.')
t1StatsYellowAlarms = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t1StatsYellowAlarms.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsYellowAlarms.setDescription('Yellow alarm count.')
t1StatsRedAlarms = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t1StatsRedAlarms.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsRedAlarms.setDescription('Red alarm count.')
t1StatsRcvUsage = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t1StatsRcvUsage.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsRcvUsage.setDescription('Receive port utilization.')
t1StatsXmitUsage = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t1StatsXmitUsage.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsXmitUsage.setDescription('Transmit port utilization.')
t1StatsXmitAbortFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t1StatsXmitAbortFrames.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsXmitAbortFrames.setDescription('Total aborted transmit frames.')
t1StatsRcvAbortFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t1StatsRcvAbortFrames.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsRcvAbortFrames.setDescription('Total aborted receive frames.')
t1StatsRcvOverruns = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t1StatsRcvOverruns.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsRcvOverruns.setDescription('Total receive overruns.')
t1StatsRcvErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t1StatsRcvErrors.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsRcvErrors.setDescription('Total bad receives.')
t1StatsRcvChannelErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 7, 1, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t1StatsRcvChannelErrors.setStatus('mandatory')
if mibBuilder.loadTexts: t1StatsRcvChannelErrors.setDescription('Total bad receives.')
statDS0A = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 8))
ds0aStatsTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 8, 1), )
if mibBuilder.loadTexts: ds0aStatsTable.setStatus('mandatory')
if mibBuilder.loadTexts: ds0aStatsTable.setDescription('A Table to describe DS0A Statistics.')
ds0aStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 8, 1, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "ds0aStatsRlpIndex"), (0, "NETLINK-SPECIFIC-MIB", "ds0aStatsPortIndex"), (0, "NETLINK-SPECIFIC-MIB", "ds0aStatsChannelIndex"))
if mibBuilder.loadTexts: ds0aStatsEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ds0aStatsEntry.setDescription('A DS0A port statistics entry containing objects relating to DS0A port statistics.')
ds0aStatsRlpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 8, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ds0aStatsRlpIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ds0aStatsRlpIndex.setDescription('The RLP Number.')
ds0aStatsPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 8, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ds0aStatsPortIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ds0aStatsPortIndex.setDescription('The Port Number.')
ds0aStatsChannelIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 8, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ds0aStatsChannelIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ds0aStatsChannelIndex.setDescription('The Channel Number.')
ds0aStatsXmitFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 8, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ds0aStatsXmitFrames.setStatus('mandatory')
if mibBuilder.loadTexts: ds0aStatsXmitFrames.setDescription('Total transmitted frames.')
ds0aStatsRcvFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 8, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ds0aStatsRcvFrames.setStatus('mandatory')
if mibBuilder.loadTexts: ds0aStatsRcvFrames.setDescription('Total received frames.')
ds0aStatsRcvAbortFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 8, 1, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ds0aStatsRcvAbortFrames.setStatus('mandatory')
if mibBuilder.loadTexts: ds0aStatsRcvAbortFrames.setDescription('Total receive aborted frames.')
ds0aStatsRcvOverruns = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 8, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ds0aStatsRcvOverruns.setStatus('mandatory')
if mibBuilder.loadTexts: ds0aStatsRcvOverruns.setDescription('Total receive overruns.')
ds0aStatsRcvErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 8, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ds0aStatsRcvErrors.setStatus('mandatory')
if mibBuilder.loadTexts: ds0aStatsRcvErrors.setDescription('Total receive errors.')
statVoice = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 9))
voiceStatsTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 9, 1), )
if mibBuilder.loadTexts: voiceStatsTable.setStatus('mandatory')
if mibBuilder.loadTexts: voiceStatsTable.setDescription('A Table to describe Voice over Frame Relay Statistics.')
voiceStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 9, 1, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "voiceStatsRlpIndex"), (0, "NETLINK-SPECIFIC-MIB", "voiceStatsPortIndex"))
if mibBuilder.loadTexts: voiceStatsEntry.setStatus('mandatory')
if mibBuilder.loadTexts: voiceStatsEntry.setDescription('A voice port statistics entry containing objects relating to voice port statistics.')
voiceStatsRlpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 9, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: voiceStatsRlpIndex.setStatus('mandatory')
if mibBuilder.loadTexts: voiceStatsRlpIndex.setDescription('The RLP Number.')
voiceStatsPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 9, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: voiceStatsPortIndex.setStatus('mandatory')
if mibBuilder.loadTexts: voiceStatsPortIndex.setDescription('The Port Number.')
voiceStatsRxCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 9, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: voiceStatsRxCalls.setStatus('mandatory')
if mibBuilder.loadTexts: voiceStatsRxCalls.setDescription('The Number of Calls Received from the Network.')
voiceStatsTxCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 9, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: voiceStatsTxCalls.setStatus('mandatory')
if mibBuilder.loadTexts: voiceStatsTxCalls.setDescription('The Number of Calls Transmitted to the Network.')
voiceStatsRxCallsAccepts = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 9, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: voiceStatsRxCallsAccepts.setStatus('mandatory')
if mibBuilder.loadTexts: voiceStatsRxCallsAccepts.setDescription('The Number of Call Accepts Received from the Network.')
voiceStatsTxCallsAccepts = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 9, 1, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: voiceStatsTxCallsAccepts.setStatus('mandatory')
if mibBuilder.loadTexts: voiceStatsTxCallsAccepts.setDescription('The Number of Call Accepts Transmitted to the Network.')
voiceStatsRxClears = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 9, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: voiceStatsRxClears.setStatus('mandatory')
if mibBuilder.loadTexts: voiceStatsRxClears.setDescription('The Number of Call Receives cleared.')
voiceStatsTxClears = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 9, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: voiceStatsTxClears.setStatus('mandatory')
if mibBuilder.loadTexts: voiceStatsTxClears.setDescription('The Number of Call Transmitts cleared.')
voiceStatsBusyCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 9, 1, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: voiceStatsBusyCalls.setStatus('mandatory')
if mibBuilder.loadTexts: voiceStatsBusyCalls.setDescription('The Number of calls which received Busy indication.')
voiceStatsCallTimeouts = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 9, 1, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: voiceStatsCallTimeouts.setStatus('mandatory')
if mibBuilder.loadTexts: voiceStatsCallTimeouts.setDescription('The Number of calls which transmitted time-out to the network.')
voiceStatsRxCongestions = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 9, 1, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: voiceStatsRxCongestions.setStatus('mandatory')
if mibBuilder.loadTexts: voiceStatsRxCongestions.setDescription('The number of congestion management frames Received.')
voiceStatsTxCongestions = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 1, 9, 1, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: voiceStatsTxCongestions.setStatus('mandatory')
if mibBuilder.loadTexts: voiceStatsTxCongestions.setDescription('The number of congestion management frames Transmitted.')
statThresh = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 8, 2))
rlpThreshTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 1), )
if mibBuilder.loadTexts: rlpThreshTable.setStatus('mandatory')
if mibBuilder.loadTexts: rlpThreshTable.setDescription('A Table to describe Statistic thresholds on each RLP')
rlpThreshEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 1, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "rlpThreshRlpIndex"))
if mibBuilder.loadTexts: rlpThreshEntry.setStatus('mandatory')
if mibBuilder.loadTexts: rlpThreshEntry.setDescription('An RLP statistics threshold entry containing objects relating to RLP statistics thresholds.')
rlpThreshRlpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpThreshRlpIndex.setStatus('mandatory')
if mibBuilder.loadTexts: rlpThreshRlpIndex.setDescription('The RLP Number.')
rlpThreshPercntBufInUse = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpThreshPercntBufInUse.setStatus('mandatory')
if mibBuilder.loadTexts: rlpThreshPercntBufInUse.setDescription('The threshold for Percent of Buffers in use on the RLP')
rlpThreshMsgQueueLen = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpThreshMsgQueueLen.setStatus('mandatory')
if mibBuilder.loadTexts: rlpThreshMsgQueueLen.setDescription('The threshold for Percent of Buffers in use on the RLP')
rlpThreshRxFramesPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpThreshRxFramesPerSec.setStatus('mandatory')
if mibBuilder.loadTexts: rlpThreshRxFramesPerSec.setDescription('The threshold for Rcvd frames per second on the RLP')
rlpThreshTxFramesPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpThreshTxFramesPerSec.setStatus('mandatory')
if mibBuilder.loadTexts: rlpThreshTxFramesPerSec.setDescription('The threshold for Txmitted frames per second on the RLP')
rlpThreshRejFramesPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpThreshRejFramesPerSec.setStatus('mandatory')
if mibBuilder.loadTexts: rlpThreshRejFramesPerSec.setDescription('The threshold for Rejected Frames per second on the RLP')
rlpThreshRtxFramesPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlpThreshRtxFramesPerSec.setStatus('mandatory')
if mibBuilder.loadTexts: rlpThreshRtxFramesPerSec.setDescription('The threshold for Retransmitted frames per second on the RLP')
portThreshTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 2), )
if mibBuilder.loadTexts: portThreshTable.setStatus('mandatory')
if mibBuilder.loadTexts: portThreshTable.setDescription('A Table to describe Statistic thresholds on each port')
portThreshEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 2, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "portThreshRlpIndex"), (0, "NETLINK-SPECIFIC-MIB", "portThreshIndex"))
if mibBuilder.loadTexts: portThreshEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portThreshEntry.setDescription('A port statistics threshold entry containing objects relating to port statistics thresholds.')
portThreshRlpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portThreshRlpIndex.setStatus('mandatory')
if mibBuilder.loadTexts: portThreshRlpIndex.setDescription('The RLP Number.')
portThreshIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portThreshIndex.setStatus('mandatory')
if mibBuilder.loadTexts: portThreshIndex.setDescription('The Port Number.')
portThreshRxFramesPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portThreshRxFramesPerSec.setStatus('mandatory')
if mibBuilder.loadTexts: portThreshRxFramesPerSec.setDescription('The threshold for Rcvd frames per second on the port.')
portThreshTxFramesPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 2, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portThreshTxFramesPerSec.setStatus('mandatory')
if mibBuilder.loadTexts: portThreshTxFramesPerSec.setDescription('The threshold for Txmitted frames per second on the port.')
portThreshRtxFramesPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 2, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portThreshRtxFramesPerSec.setStatus('mandatory')
if mibBuilder.loadTexts: portThreshRtxFramesPerSec.setDescription('The threshold for Retransmitted frames per second on the port.')
portThreshFCSErrPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portThreshFCSErrPerSec.setStatus('mandatory')
if mibBuilder.loadTexts: portThreshFCSErrPerSec.setDescription('The threshold for FCS errors per second on the port.')
portThreshLogRejPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 2, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portThreshLogRejPerSec.setStatus('mandatory')
if mibBuilder.loadTexts: portThreshLogRejPerSec.setDescription('The threshold for Logical Rejects per second on the port.')
portThreshTxErrorRatio = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 2, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portThreshTxErrorRatio.setStatus('mandatory')
if mibBuilder.loadTexts: portThreshTxErrorRatio.setDescription('The threshold for Txmit error ratio on the port.')
portThreshRxErrorRatio = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 2, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portThreshRxErrorRatio.setStatus('mandatory')
if mibBuilder.loadTexts: portThreshRxErrorRatio.setDescription('The threshold for Rcv error ratio on the port.')
portThreshTxPercentUtl = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 2, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portThreshTxPercentUtl.setStatus('mandatory')
if mibBuilder.loadTexts: portThreshTxPercentUtl.setDescription('The threshold for Txmit percent utilization on the port.')
portThreshRxPercentUtl = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 8, 2, 2, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portThreshRxPercentUtl.setStatus('mandatory')
if mibBuilder.loadTexts: portThreshRxPercentUtl.setDescription('The threshold for Rcv percent utilization on the port.')
bridge = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 9))
bridgeAdminVirtualLANID = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 9, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bridgeAdminVirtualLANID.setStatus('mandatory')
if mibBuilder.loadTexts: bridgeAdminVirtualLANID.setDescription('LAN id inserted into token ring RIF field representing internal virtual ring, for LLC2 or bridged traffic')
bridgeOperVirtualLANID = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 9, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeOperVirtualLANID.setStatus('mandatory')
if mibBuilder.loadTexts: bridgeOperVirtualLANID.setDescription('LAN id inserted into token ring RIF field representing internal virtual ring, for LLC2 or bridged traffic')
bridgeEnabled = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 9, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bridgeEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: bridgeEnabled.setDescription('When yes the spanning tree algorithm is in effect and all traffic types may be bridged. When no only llc/2 terminated traffic is allowed on bridge interfaces.')
bridgeMaxSizeForwardingTable = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 9, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(128, 65535)).clone(65535)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bridgeMaxSizeForwardingTable.setStatus('mandatory')
if mibBuilder.loadTexts: bridgeMaxSizeForwardingTable.setDescription('Number of entries allowed in the bridge forwarding table')
bridgeIPEnabled = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 9, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bridgeIPEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: bridgeIPEnabled.setDescription('When yes, and BRIDGING ENABLED, IP traffic will be bridged, otherwise IP traffic will be routed.')
bridgeIPXEnabled = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 9, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("no", 1), ("yes", 2))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bridgeIPXEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: bridgeIPXEnabled.setDescription('When yes, and BRIDGING ENABLED, IPX traffic will be bridged, otherwise IPX traffic will be routed.')
bridgeAdminSRBID = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 9, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bridgeAdminSRBID.setStatus('mandatory')
if mibBuilder.loadTexts: bridgeAdminSRBID.setDescription('bridge ID to be inserted into token ring RIF field, this bridge')
bridgeOperSRBID = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 9, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 15))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeOperSRBID.setStatus('mandatory')
if mibBuilder.loadTexts: bridgeOperSRBID.setDescription('bridge ID to be inserted into token ring RIF field, this bridge')
bridgeDefaultEthernetFrameType = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 9, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("type-II", 1), ("ieee8023", 2))).clone('type-II')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bridgeDefaultEthernetFrameType.setStatus('mandatory')
if mibBuilder.loadTexts: bridgeDefaultEthernetFrameType.setDescription('default frame type to be used when forwarding traffic on ethernet interfaces.')
ipNl = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 11))
nlIpDefaultRIPVersion = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 11, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("ripVersion1", 1), ("rip1Compatible", 2), ("ripVersion2", 3))).clone('ripVersion1')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nlIpDefaultRIPVersion.setStatus('mandatory')
if mibBuilder.loadTexts: nlIpDefaultRIPVersion.setDescription('The version of RIP that will be used by IP.')
voice = MibIdentifier((1, 3, 6, 1, 4, 1, 173, 7, 12))
voiceSystemVoiceNodeNum = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 12, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 9999)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: voiceSystemVoiceNodeNum.setStatus('mandatory')
if mibBuilder.loadTexts: voiceSystemVoiceNodeNum.setDescription('The number that identifies this node.')
voiceSystemRingVolFreq = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 12, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("v75-hz-16-66", 1), ("v80-hz-20-00", 2), ("v75-hz-25-00", 3), ("v60-hz-50-00", 4), ("v75-hz-50-00", 5))).clone('v80-hz-20-00')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: voiceSystemRingVolFreq.setStatus('mandatory')
if mibBuilder.loadTexts: voiceSystemRingVolFreq.setDescription('The frequency and voltage of the telephone ring circuit. In two wire OPX mode, the node provides ring voltage to the telephone when it is called by a remote unit.')
voiceSystemCountryCode = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 12, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 999)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: voiceSystemCountryCode.setStatus('mandatory')
if mibBuilder.loadTexts: voiceSystemCountryCode.setDescription('The telephone dialing prefix code that identifies the country in which the node is installed.')
voiceSystemDialDigits = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 12, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 4)).clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: voiceSystemDialDigits.setStatus('mandatory')
if mibBuilder.loadTexts: voiceSystemDialDigits.setDescription('The number of digits that will be used in the speed dialing scheme, as well as the number of digits one can specify for the Auto Dial feature. This requires rebooting for the change to take effect.')
voiceSystemVoiceRatesMin = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 12, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("bps-4800", 1), ("bps-8000", 2), ("bps-32000", 3), ("bps-64000", 4))).clone('bps-4800')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: voiceSystemVoiceRatesMin.setStatus('mandatory')
if mibBuilder.loadTexts: voiceSystemVoiceRatesMin.setDescription('The minimum operating rate of all voice channels when congestion occurs.')
voiceSystemVoiceRatesMax = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 12, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("bps-4800", 1), ("bps-8000", 2), ("bps-32000", 3), ("bps-64000", 4))).clone('bps-64000')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: voiceSystemVoiceRatesMax.setStatus('mandatory')
if mibBuilder.loadTexts: voiceSystemVoiceRatesMax.setDescription('The maximum operating digitization rate of all voice channels when there is no congestion.')
voiceSystemExtDialDigits = MibScalar((1, 3, 6, 1, 4, 1, 173, 7, 12, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: voiceSystemExtDialDigits.setStatus('mandatory')
if mibBuilder.loadTexts: voiceSystemExtDialDigits.setDescription('The number of extended dial digits that can be defined in the speed map table and/or entered by the user of attached equipment as part of a dial string. This requires rebooting for the change to take effect.')
voiceSpeedDialTable = MibTable((1, 3, 6, 1, 4, 1, 173, 7, 12, 8), )
if mibBuilder.loadTexts: voiceSpeedDialTable.setStatus('mandatory')
if mibBuilder.loadTexts: voiceSpeedDialTable.setDescription('A list of speed-dial numbers.')
voiceSpeedDialEntry = MibTableRow((1, 3, 6, 1, 4, 1, 173, 7, 12, 8, 1), ).setIndexNames((0, "NETLINK-SPECIFIC-MIB", "voiceSpeedDialDigits"))
if mibBuilder.loadTexts: voiceSpeedDialEntry.setStatus('mandatory')
if mibBuilder.loadTexts: voiceSpeedDialEntry.setDescription('A Voice Speed Dial entry containing objects relating to the speed-dial number that are configurable by the user.')
voiceSpeedDialDigits = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 12, 8, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: voiceSpeedDialDigits.setStatus('mandatory')
if mibBuilder.loadTexts: voiceSpeedDialDigits.setDescription('The speed-dial number. A question mark wildcard can be used for any digit, and an asterisk wildcard can be used for any number of digits.')
voiceSpeedDialLongDialMap = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 12, 8, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: voiceSpeedDialLongDialMap.setStatus('mandatory')
if mibBuilder.loadTexts: voiceSpeedDialLongDialMap.setDescription('The number for which the speed-dial number is a shortcut.')
voiceSpeedDialExtDialStr = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 12, 8, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: voiceSpeedDialExtDialStr.setStatus('mandatory')
if mibBuilder.loadTexts: voiceSpeedDialExtDialStr.setDescription('The extended set of digits that will be forwarded with the call if portVoiceOperExtDigitsSource in the physical port record is set to Map.')
voiceSpeedDialRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 173, 7, 12, 8, 1, 4), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: voiceSpeedDialRowStatus.setStatus('mandatory')
if mibBuilder.loadTexts: voiceSpeedDialRowStatus.setDescription("This variable is used to manage the creation and deletion of conceptual rows in the voiceSpeedDialTable and follows the SNMPv2 RowStatus conventions by supporting the following values: - `active', which indicates that the conceptual row is available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row does not exist. - `createAndGo', which is supplied by a management station wishing to create a new instance of a conceptual row and to have its status automatically set to active, making it available for use by the managed device. For a management protocol set operation, a genErr response is returned when the row already exists. - `destroy', which is supplied by a management station wishing to delete all of the instances associated with an existing conceptual row. Note that all of the above values may be specified in a management protocol set operation, and only the 'active' value will be returned in response to a management protocol retrieval operation. For a management protocol set operation, if other variable bindings are included in the same PDU, then a genErr response is returned.")
mibBuilder.exportSymbols("NETLINK-SPECIFIC-MIB", portT1AdminFrameModelSelect=portT1AdminFrameModelSelect, portT1OperEntry=portT1OperEntry, ipxServConfigServNetworkAddress=ipxServConfigServNetworkAddress, nlLlc2HostN2ExpiredT1LPDUCount=nlLlc2HostN2ExpiredT1LPDUCount, nlLocalSubscriberRouteEntry=nlLocalSubscriberRouteEntry, nlIfLlc2FrLANID=nlIfLlc2FrLANID, nlIfLlc2LANRowStatus=nlIfLlc2LANRowStatus, nlLlc2HostPriority=nlLlc2HostPriority, portFrEntry=portFrEntry, ipxInterfaceMaxTransUnit=ipxInterfaceMaxTransUnit, nlLlc2HostEntry=nlLlc2HostEntry, ipxInterfacePeriodicRIPTimer=ipxInterfacePeriodicRIPTimer, nlLlc2OrigConnectionEntry=nlLlc2OrigConnectionEntry, ipxServConfigEntry=ipxServConfigEntry, ipxStaticRouteConfigNetNum=ipxStaticRouteConfigNetNum, portPhyX25AdminTable=portPhyX25AdminTable, portSdlcOperGenerateClock=portSdlcOperGenerateClock, t1StatsOOSCnt=t1StatsOOSCnt, portX25AdminEntry=portX25AdminEntry, portSdlcAdminRowStatus=portSdlcAdminRowStatus, portSdlcAdminSpeed=portSdlcAdminSpeed, statBag=statBag, portDLCIPortIndex=portDLCIPortIndex, tpAdrEntry=tpAdrEntry, portBsciAdminBlockCheck=portBsciAdminBlockCheck, llc2=llc2, portSdlcAdminDisableRequestDisconnect=portSdlcAdminDisableRequestDisconnect, x25TxREJFrames=x25TxREJFrames, nlIfVoiceTable=nlIfVoiceTable, portX25AdminFastSelAccpt=portX25AdminFastSelAccpt, frStatsPortIndex=frStatsPortIndex, portX25AdminRevChgAccpt=portX25AdminRevChgAccpt, portVoiceAdminAutoDialNumber=portVoiceAdminAutoDialNumber, portBsciOperStartSynchChars=portBsciOperStartSynchChars, nlLocalSubscriberRedirectOO=nlLocalSubscriberRedirectOO, nlLocalSubscriberId=nlLocalSubscriberId, portSdlcOperConnector=portSdlcOperConnector, nlLocalSubscriberAlgorithm=nlLocalSubscriberAlgorithm, bsciDevOperTable=bsciDevOperTable, bsciSubscrAdminSequence=bsciSubscrAdminSequence, portSdlcOperEntry=portSdlcOperEntry, bsciDevAdminTransTextSupported=bsciDevAdminTransTextSupported, portSdlcOperLPDASupport=portSdlcOperLPDASupport, nlLocalSubscriberName=nlLocalSubscriberName, nlIfIpSourceRlp=nlIfIpSourceRlp, ds0aStatsEntry=ds0aStatsEntry, node=node, bsciSubscrAdminConnectionID=bsciSubscrAdminConnectionID, nsNTNeighStat=nsNTNeighStat, portBsciAdminMaxBytesPerFrame=portBsciAdminMaxBytesPerFrame, ipxInterfaceSAPAgeTimer=ipxInterfaceSAPAgeTimer, lSSdlcLlc2AdminLanN3=lSSdlcLlc2AdminLanN3, portVoiceAdminToneType=portVoiceAdminToneType, portPhyX25AdminConnector=portPhyX25AdminConnector, portT1AdminEntry=portT1AdminEntry, nlIfVoiceLocalNodePort=nlIfVoiceLocalNodePort, nlIfIpProxyARP=nlIfIpProxyARP, lSSdlcLlc2OperLanN3=lSSdlcLlc2OperLanN3, bsciDevAdminDisableStatusRequest=bsciDevAdminDisableStatusRequest, lSSdlcAdminL2DatMode=lSSdlcAdminL2DatMode, portPhyX25AdminRcvClockFromDTE=portPhyX25AdminRcvClockFromDTE, bsciSubscrOperRemoteID=bsciSubscrOperRemoteID, portBsciOperDataMode=portBsciOperDataMode, portSdlcOperSpeed=portSdlcOperSpeed, portX25OperMaxThruptClass=portX25OperMaxThruptClass, portFrRowStatus=portFrRowStatus, lSSdlcLlc2OperTable=lSSdlcLlc2OperTable, portVoiceOperJitter=portVoiceOperJitter, nlIfLlc2LANPort=nlIfLlc2LANPort, rlpStatsUsedBuffers=rlpStatsUsedBuffers, rlpThreshEntry=rlpThreshEntry, portFrRlpIndex=portFrRlpIndex, rlpThreshRlpIndex=rlpThreshRlpIndex, lSSdlcAdminLlc2Conversion=lSSdlcAdminLlc2Conversion, bsciDevOperSingleUserVC=bsciDevOperSingleUserVC, portBsciAdminActivateConnectionWithoutPoll=portBsciAdminActivateConnectionWithoutPoll, nlLocalSubscriberPriority=nlLocalSubscriberPriority, x25TxStatsEntry=x25TxStatsEntry, portPhyX25OperTrunkGroup=portPhyX25OperTrunkGroup, nlLocalSubscriberRedirectBusy=nlLocalSubscriberRedirectBusy, bsciDevOperTransTextSupported=bsciDevOperTransTextSupported, portBsciOperEndPadChars=portBsciOperEndPadChars, portPhyX25AdminDisconnectTimer=portPhyX25AdminDisconnectTimer, ipxInterfaceLanCardNumber=ipxInterfaceLanCardNumber, nlLlc2HostGroup=nlLlc2HostGroup, portSdlcGroup=portSdlcGroup, portT1OperBlockedPortFlag=portT1OperBlockedPortFlag, portPinEntry=portPinEntry, rlpThreshRxFramesPerSec=rlpThreshRxFramesPerSec, ipxServConfigGatewayAddress=ipxServConfigGatewayAddress, portT1OperTable=portT1OperTable, lSSdlcAdminLocalSub=lSSdlcAdminLocalSub, portVoiceOperSuppression=portVoiceOperSuppression, nlLocalSubscriberTable=nlLocalSubscriberTable, lSSdlcOperRetryCount=lSSdlcOperRetryCount, ipxInterfaceNetworkAddress=ipxInterfaceNetworkAddress, statFrame=statFrame, portStatsFCSErrors=portStatsFCSErrors, portSdlcAdminCommit=portSdlcAdminCommit, frStatsTable=frStatsTable, portVoiceOperSLTTimeout=portVoiceOperSLTTimeout, portSdlcAdminMAXOut=portSdlcAdminMAXOut, portFrOutgoingRateControl=portFrOutgoingRateControl, portSdlcAdminGenerateClock=portSdlcAdminGenerateClock, nlIfTableIndex=nlIfTableIndex, portBsciAdminTable=portBsciAdminTable, nlIfLlc2LANType=nlIfLlc2LANType, nlLocalSubscriberRedirAddr=nlLocalSubscriberRedirAddr, lSSdlcAdminRemoteSub=lSSdlcAdminRemoteSub, portVoiceAdminLongDialPrefix=portVoiceAdminLongDialPrefix, portBsciOperClearVCOnLastDeviceDown=portBsciOperClearVCOnLastDeviceDown, ipxInterfaceSVCRetryTimer=ipxInterfaceSVCRetryTimer, t1StatsRcvUsage=t1StatsRcvUsage, nlIfLlc2FrPort=nlIfLlc2FrPort, nlIfIpICMPAddRoutes=nlIfIpICMPAddRoutes, rlpThreshTable=rlpThreshTable, portThreshTxErrorRatio=portThreshTxErrorRatio, portBsciOperPollInterval=portBsciOperPollInterval, frStatsTxBECNFrames=frStatsTxBECNFrames, portFrMaxBytesPerFrame=portFrMaxBytesPerFrame, nlLocalSubscriberRedirEntry=nlLocalSubscriberRedirEntry, nlIfIpRowStatus=nlIfIpRowStatus, nlIfVoiceFrameRelayDLCI=nlIfVoiceFrameRelayDLCI, nlIfIpSecondaryAddrRowStatus=nlIfIpSecondaryAddrRowStatus, bsciSubscrAdminAutocallRtyTimer=bsciSubscrAdminAutocallRtyTimer, nlIfTableOid=nlIfTableOid, sdlc=sdlc, portSdlcOperCommit=portSdlcOperCommit, lSSdlcOperTable=lSSdlcOperTable, nlIfLlc2LANBlockedPortFlag=nlIfLlc2LANBlockedPortFlag, bridgeOperSRBID=bridgeOperSRBID, nodeTrapText=nodeTrapText, lSSdlcLlc2AdminLanN2=lSSdlcLlc2AdminLanN2, portLogicalX25OperRfc1490=portLogicalX25OperRfc1490, nlLlc2HostInterface=nlLlc2HostInterface, nodeBagTable=nodeBagTable, voiceStatsTxCalls=voiceStatsTxCalls, nlIfVoiceFrameRelayPort=nlIfVoiceFrameRelayPort, x25TxRNRFrames=x25TxRNRFrames, lSSdlcLlc2AdminLocalSap=lSSdlcLlc2AdminLocalSap, nlLlc2HostT2RecvAckTimer=nlLlc2HostT2RecvAckTimer, nlLlc2OrigConnectionHSAP=nlLlc2OrigConnectionHSAP, nlLocalSubscriberEntry=nlLocalSubscriberEntry, sdlcLSAdminEntry=sdlcLSAdminEntry, portStatsTable=portStatsTable, portVoiceAdminLevelOut=portVoiceAdminLevelOut, x25RxDISCFrames=x25RxDISCFrames, portX25AdminMaxThruptClass=portX25AdminMaxThruptClass, portT1OperNRZI=portT1OperNRZI, portVoiceOperLevelOut=portVoiceOperLevelOut, portVoiceOperTETimer=portVoiceOperTETimer, nlLlc2HostRowStatus=nlLlc2HostRowStatus, portFrBandwidthAllocation=portFrBandwidthAllocation, portBsciAdminNAKRetryCount=portBsciAdminNAKRetryCount, t1StatsXmitAbortFrames=t1StatsXmitAbortFrames, portVoiceAdminAutoPoll=portVoiceAdminAutoPoll, lSSdlcOperEntry=lSSdlcOperEntry, portVoiceAdminAutoPollTimer=portVoiceAdminAutoPollTimer, nlIfLlc2FrRowStatus=nlIfLlc2FrRowStatus, portDLCIOutgoingBe=portDLCIOutgoingBe, voice=voice, portX25OperDefPktSize=portX25OperDefPktSize, portVoiceOperBreakRatio=portVoiceOperBreakRatio, ipxNodeDefaultConfigNetworkAddress=ipxNodeDefaultConfigNetworkAddress, ipxInterfaceSVCIdleTimer=ipxInterfaceSVCIdleTimer, voiceStatsBusyCalls=voiceStatsBusyCalls, portThreshRlpIndex=portThreshRlpIndex, bsciDevOperDisableStatusRequest=bsciDevOperDisableStatusRequest, nlLlc2HostIndex=nlLlc2HostIndex, portLogicalX25AdminEntry=portLogicalX25AdminEntry, nlIfLlc2LANTable=nlIfLlc2LANTable, ds0aStatsTable=ds0aStatsTable, portVoiceAdminDTMFOffDuration=portVoiceAdminDTMFOffDuration, ds0aStatsRcvErrors=ds0aStatsRcvErrors, lSSdlcLlc2AdminEntry=lSSdlcLlc2AdminEntry, ipxInterfaceRIPAgeTimer=ipxInterfaceRIPAgeTimer, x25TxPortIndex=x25TxPortIndex, bsciDevOperDeviceUnitID=bsciDevOperDeviceUnitID, nlIfLlc2FrInterface=nlIfLlc2FrInterface, portVoiceOperInterface=portVoiceOperInterface, nlIfIpTable=nlIfIpTable, nlLlc2NextHostNumber=nlLlc2NextHostNumber, ipxNodeDefaultConfigRIPSAPGap=ipxNodeDefaultConfigRIPSAPGap, portX25OperLocChgPrev=portX25OperLocChgPrev, frStatsTxLMIFrames=frStatsTxLMIFrames, portVoiceAdminForwardDelay=portVoiceAdminForwardDelay, nlLlc2TermConnectionSequence=nlLlc2TermConnectionSequence, nlIfIpUnnumberedIf=nlIfIpUnnumberedIf, t1StatsBlueAlarms=t1StatsBlueAlarms, portBsciAdminEndToEndAck=portBsciAdminEndToEndAck, x25RxRlpIndex=x25RxRlpIndex, portT1OperGenerateClock=portT1OperGenerateClock, portThreshIndex=portThreshIndex, nlIfPortStatus=nlIfPortStatus, voiceStatsEntry=voiceStatsEntry, portBsciOperEndToEndAck=portBsciOperEndToEndAck, portPhyX25AdminTrunkGroup=portPhyX25AdminTrunkGroup, bsciSubscrOperAutocallMaxRtry=bsciSubscrOperAutocallMaxRtry, ds0aStatsXmitFrames=ds0aStatsXmitFrames, portX25OperEntry=portX25OperEntry, local=local, bsciSubscrAdminRemoteID=bsciSubscrAdminRemoteID, nlIfLlc2LANRlp=nlIfLlc2LANRlp, x25RxUAFrames=x25RxUAFrames, nodeAlmTable=nodeAlmTable, ipxInterfaceBandwidthAllocGroup=ipxInterfaceBandwidthAllocGroup, portSdlcOperRcvClockFromDTE=portSdlcOperRcvClockFromDTE, ipxInterfaceNetBIOSEnabled=ipxInterfaceNetBIOSEnabled, lSSdlcAdminAutoCall=lSSdlcAdminAutoCall, nlIfIpMtu=nlIfIpMtu, bridgeOperVirtualLANID=bridgeOperVirtualLANID, portVoiceAdminForwardedType=portVoiceAdminForwardedType, nlIfLlc2Interfaces=nlIfLlc2Interfaces, portX25AdminCUGOutAccess=portX25AdminCUGOutAccess, nlIfType=nlIfType, portFrBackupUseOnly=portFrBackupUseOnly, ipxConfigInterface=ipxConfigInterface, portStatsRlpIndex=portStatsRlpIndex, portThreshTxFramesPerSec=portThreshTxFramesPerSec, t1StatsRedAlarms=t1StatsRedAlarms, portVoiceAdminSuppression=portVoiceAdminSuppression, portX25AdminDefPktSize=portX25AdminDefPktSize, portVoiceAdminDTMF=portVoiceAdminDTMF, nlLlc2OrigConnectionIDNUM=nlLlc2OrigConnectionIDNUM, portX25OperOutCallBar=portX25OperOutCallBar, nlIfLlc2FrTable=nlIfLlc2FrTable, nsStatus=nsStatus, x25TxRlpIndex=x25TxRlpIndex, ipxInterfaceNetBIOSHops=ipxInterfaceNetBIOSHops, nlLlc2OrigConnectionMAXIN=nlLlc2OrigConnectionMAXIN, portT1OperFramingMode=portT1OperFramingMode, lSSdlcLlc2OperLocalSap=lSSdlcLlc2OperLocalSap, t1StatsXmitFrames=t1StatsXmitFrames, lSSdlcOperLocalSub=lSSdlcOperLocalSub, netlink=netlink, t1StatsRcvAbortFrames=t1StatsRcvAbortFrames, rlpStatsEntry=rlpStatsEntry, portPhyX25AdminDialOut=portPhyX25AdminDialOut, portPhyX25AdminSpeed=portPhyX25AdminSpeed, nlIfLlc2FrBAG=nlIfLlc2FrBAG, nlIfLlc2FrRlp=nlIfLlc2FrRlp, portLogicalX25AdminCxnPriority=portLogicalX25AdminCxnPriority, nodeSNMPGroup=nodeSNMPGroup, nlIfIpInverseARP=nlIfIpInverseARP, t1StatsRcvOverruns=t1StatsRcvOverruns, ipxStaticRouteConfigRowStatus=ipxStaticRouteConfigRowStatus, statT1=statT1, portSdlcOperPacketSize=portSdlcOperPacketSize, ipxInterfaceMaxVC=ipxInterfaceMaxVC, nlLlc2TermConnectionTable=nlLlc2TermConnectionTable, nlIfIpRIPSupport=nlIfIpRIPSupport, portBsciOperNoResponseRetryCount=portBsciOperNoResponseRetryCount, nlLocalSubscriberRouteTable=nlLocalSubscriberRouteTable, ipxInterfaceNumber=ipxInterfaceNumber)
mibBuilder.exportSymbols("NETLINK-SPECIFIC-MIB", nlLlc2OrigConnectionType=nlLlc2OrigConnectionType, bsciDevAdminPrinterAttached=bsciDevAdminPrinterAttached, portT1AdminTable=portT1AdminTable, tpAddress=tpAddress, portBsciAdminSlowPollRetryFreq=portBsciAdminSlowPollRetryFreq, lSSdlcOperLPDAResourceID=lSSdlcOperLPDAResourceID, portPhyX25OperGenerateClock=portPhyX25OperGenerateClock, lSSdlcLlc2OperLanT1=lSSdlcLlc2OperLanT1, subscriber=subscriber, portX25OperBlockedFlag=portX25OperBlockedFlag, portBsciAdminSlowPollRetryCount=portBsciAdminSlowPollRetryCount, nlIfVoicePeerNodeType=nlIfVoicePeerNodeType, bsciSubscrAdminAutocall=bsciSubscrAdminAutocall, nlIfIpDestAddress=nlIfIpDestAddress, voiceSpeedDialExtDialStr=voiceSpeedDialExtDialStr, nlIfIpSourceSub=nlIfIpSourceSub, ipxInterfaceRowStatus=ipxInterfaceRowStatus, t1StatsTable=t1StatsTable, portX25AdminDefWinSize=portX25AdminDefWinSize, nlIfIpSecondaryAddrNetworkMask=nlIfIpSecondaryAddrNetworkMask, nlIfVoicePeerNodePort=nlIfVoicePeerNodePort, nlIfLlc2FrHostMACAddress=nlIfLlc2FrHostMACAddress, portVoiceAdminPortIndex=portVoiceAdminPortIndex, portSdlcOperNrz=portSdlcOperNrz, frStatsRxDEFrames=frStatsRxDEFrames, portBsciOperActivateConnectionWithoutPoll=portBsciOperActivateConnectionWithoutPoll, portVoiceOperDTMFOnDuration=portVoiceOperDTMFOnDuration, nsNeighTable=nsNeighTable, portVoiceOperLongDialPrefix=portVoiceOperLongDialPrefix, nlIfTable=nlIfTable, portBsciOperMultidrop=portBsciOperMultidrop, ipxInterfaceSAPEnabled=ipxInterfaceSAPEnabled, portFrBackupRowStatus=portFrBackupRowStatus, portDLCIOutgoingBc=portDLCIOutgoingBc, portBsciOperErrorRetransmitCount=portBsciOperErrorRetransmitCount, bsciSubscrOperAutocallRtyTimer=bsciSubscrOperAutocallRtyTimer, lSSdlcOperRetryTime=lSSdlcOperRetryTime, lSSdlcLlc2AdminIdnum=lSSdlcLlc2AdminIdnum, ipxInterfaceSourceSubscriber=ipxInterfaceSourceSubscriber, nlLlc2OrigConnectionSequence=nlLlc2OrigConnectionSequence, nlLlc2HostT1ReplyTimer=nlLlc2HostT1ReplyTimer, NlSubscriberAddress=NlSubscriberAddress, portStatsOutPercentUtils=portStatsOutPercentUtils, ipxInterfacePeriodicSAPTimer=ipxInterfacePeriodicSAPTimer, portVoiceOperBlockedFlag=portVoiceOperBlockedFlag, lSSdlcLlc2OperSuppressXID=lSSdlcLlc2OperSuppressXID, nlIfVoiceRowStatus=nlIfVoiceRowStatus, frStatsRlpIndex=frStatsRlpIndex, portBsciAdminRowStatus=portBsciAdminRowStatus, portT1OperFrameModelSelect=portT1OperFrameModelSelect, voiceStatsTxCallsAccepts=voiceStatsTxCallsAccepts, nlLlc2TermConnectionRemoteSubscriberId=nlLlc2TermConnectionRemoteSubscriberId, portVoiceOperRlpIndex=portVoiceOperRlpIndex, portVoiceOperNumDigitsDelete=portVoiceOperNumDigitsDelete, portVoiceAdminEntry=portVoiceAdminEntry, portVoiceAdminSpeed=portVoiceAdminSpeed, rlpMemorySize=rlpMemorySize, nlIfVoiceEnableFragment=nlIfVoiceEnableFragment, nsNTNeigh=nsNTNeigh, portSdlcAdminNrz=portSdlcAdminNrz, portSdlcAdminLPDASupport=portSdlcAdminLPDASupport, portFrBackupPort=portFrBackupPort, lSSdlcAdminLPDAResourceID=lSSdlcAdminLPDAResourceID, lSSdlcLlc2OperLocalMac=lSSdlcLlc2OperLocalMac, nlIfIpPriority=nlIfIpPriority, portStatsLogicalRejects=portStatsLogicalRejects, rlpStatsFrameRejects=rlpStatsFrameRejects, nlIfRlp=nlIfRlp, bsciSubscrAdminLocalID=bsciSubscrAdminLocalID, ipxServConfigRowStatus=ipxServConfigRowStatus, nlLocalSubscriberRedirRowStatus=nlLocalSubscriberRedirRowStatus, portFrConnector=portFrConnector, portBsciAdminRcvClockFromDTE=portBsciAdminRcvClockFromDTE, x25TxStatsTable=x25TxStatsTable, nlLlc2HostAccess=nlLlc2HostAccess, portVoiceOperAutoPollTimer=portVoiceOperAutoPollTimer, portX25AdminTable=portX25AdminTable, frStatsRxLMIFrames=frStatsRxLMIFrames, nlLlc2HostMACAddress=nlLlc2HostMACAddress, portDLCIBackupGroup=portDLCIBackupGroup, lSSdlcLlc2AdminSuppressXID=lSSdlcLlc2AdminSuppressXID, nlLlc2TermConnectionRowStatus=nlLlc2TermConnectionRowStatus, rlpEntry=rlpEntry, portBsciOperAnswerNonConfigured=portBsciOperAnswerNonConfigured, portBsciOperBlockCheck=portBsciOperBlockCheck, portVoiceOperDTMF=portVoiceOperDTMF, portX25AdminOutCallBar=portX25AdminOutCallBar, portX25OperFlowCtrlNeg=portX25OperFlowCtrlNeg, nlIpDefaultRIPVersion=nlIpDefaultRIPVersion, portX25AdminMaxWinSize=portX25AdminMaxWinSize, t1StatsRlpIndex=t1StatsRlpIndex, portX25OperCUGIndex=portX25OperCUGIndex, nlIfLlc2FrPriority=nlIfLlc2FrPriority, portX25AdminInCallBar=portX25AdminInCallBar, portSdlcAdminPacketSize=portSdlcAdminPacketSize, portPhyX25AdminInactivityTimer=portPhyX25AdminInactivityTimer, lSSdlcLlc2OperBAG=lSSdlcLlc2OperBAG, statThresh=statThresh, nlIfLlc2LANPriority=nlIfLlc2LANPriority, portFrBackupEntry=portFrBackupEntry, portT1AdminProtocolFraming=portT1AdminProtocolFraming, lSSdlcLlc2AdminTable=lSSdlcLlc2AdminTable, nlIfVoiceEntry=nlIfVoiceEntry, portVoiceOperAutoPoll=portVoiceOperAutoPoll, portDLCIPriority=portDLCIPriority, bsciDevAdminRowStatus=bsciDevAdminRowStatus, nlLlc2TermConnectionHSAP=nlLlc2TermConnectionHSAP, x25TxRRFrames=x25TxRRFrames, portFrBackupDLCI=portFrBackupDLCI, portThreshEntry=portThreshEntry, portX25AdminLocChgPrev=portX25AdminLocChgPrev, portVoiceAdminTable=portVoiceAdminTable, portX25OperMaxWinSize=portX25OperMaxWinSize, voiceStatsTxClears=voiceStatsTxClears, portBsciOperRetransmitInterval=portBsciOperRetransmitInterval, statDS0A=statDS0A, voiceStatsRlpIndex=voiceStatsRlpIndex, portVoiceOperTable=portVoiceOperTable, portBsciAdminEntry=portBsciAdminEntry, bsciSubscrAdminTable=bsciSubscrAdminTable, nlLlc2TermConnectionLocalSubscriberId=nlLlc2TermConnectionLocalSubscriberId, ipxStaticRouteConfigEntry=ipxStaticRouteConfigEntry, portSdlcOperMAXOut=portSdlcOperMAXOut, nodeTrapAdrTable=nodeTrapAdrTable, bsciSubscrAdminRowStatus=bsciSubscrAdminRowStatus, nlIfIpRIPDeltaUpdates=nlIfIpRIPDeltaUpdates, portT1OperLineBuildOut=portT1OperLineBuildOut, nlIfLlc2LANEntry=nlIfLlc2LANEntry, portVoiceAdminDTMFOnDuration=portVoiceAdminDTMFOnDuration, portLogicalX25AdminTable=portLogicalX25AdminTable, portBsciAdminStartSynchChars=portBsciAdminStartSynchChars, portBsciOperMaxBytesPerFrame=portBsciOperMaxBytesPerFrame, lSSdlcAdminRetryCount=lSSdlcAdminRetryCount, nsNodNum=nsNodNum, portFrLogicalDCE=portFrLogicalDCE, lSSdlcAdminRetryTime=lSSdlcAdminRetryTime, statistics=statistics, nlLocalSubscriberRowStatus=nlLocalSubscriberRowStatus, voiceStatsTable=voiceStatsTable, voiceStatsRxCalls=voiceStatsRxCalls, nlLlc2OrigConnectionTable=nlLlc2OrigConnectionTable, portX25OperThruptClassNeg=portX25OperThruptClassNeg, ipxInterfaceBlockedPortFlag=ipxInterfaceBlockedPortFlag, portVoiceAdminRowStatus=portVoiceAdminRowStatus, portPhyX25OperSetupTimer=portPhyX25OperSetupTimer, ipxServConfigServNodeAddress=ipxServConfigServNodeAddress, x25RxINFOFrames=x25RxINFOFrames, nlLocalSubscriberSystematicRedirect=nlLocalSubscriberSystematicRedirect, nlLlc2HostSrcMACAddressMask=nlLlc2HostSrcMACAddressMask, portT1OperLineEncoding=portT1OperLineEncoding, x25RxSABMFrames=x25RxSABMFrames, portBsciAdminNoResponseRetryCount=portBsciAdminNoResponseRetryCount, nlIfIpEntry=nlIfIpEntry, nlLocalSubscriberRedirTable=nlLocalSubscriberRedirTable, portVoiceAdminSLTTimeout=portVoiceAdminSLTTimeout, lSSdlcLlc2AdminLanTw=lSSdlcLlc2AdminLanTw, portVoiceAdminNumDigitsDelete=portVoiceAdminNumDigitsDelete, ipNl=ipNl, bridgeMaxSizeForwardingTable=bridgeMaxSizeForwardingTable, bsciDevAdminConnectionID=bsciDevAdminConnectionID, ipxServConfigServName=ipxServConfigServName, nlLocalSubscriberRouteConf=nlLocalSubscriberRouteConf, ipxStaticRouteConfigCircIndex=ipxStaticRouteConfigCircIndex, portPhyX25OperDialOut=portPhyX25OperDialOut, ds0aStatsRcvFrames=ds0aStatsRcvFrames, nsNumNeigh=nsNumNeigh, lSSdlcLlc2OperLanTi=lSSdlcLlc2OperLanTi, portDLCIIncomingBc=portDLCIIncomingBc, x25RxRRFrames=x25RxRRFrames, portSdlcOperMAXRetries=portSdlcOperMAXRetries, ipxInterfaceSourceDLCI=ipxInterfaceSourceDLCI, tpAdrFlag=tpAdrFlag, portPhyX25AdminRowStatus=portPhyX25AdminRowStatus, nlIfIpSourcePort=nlIfIpSourcePort, nlIfIpInterfaces=nlIfIpInterfaces, portBsciAdminTransTextSupported=portBsciAdminTransTextSupported, nlIfIpSecondaryAddrTable=nlIfIpSecondaryAddrTable, sdlcLSAddress=sdlcLSAddress, x25TxUAFrames=x25TxUAFrames, portPhyX25OperEntry=portPhyX25OperEntry, nlIfIpSourceDLCI=nlIfIpSourceDLCI, ipxServConfigTable=ipxServConfigTable, nlIfLlc2LANCard=nlIfLlc2LANCard, voiceSystemExtDialDigits=voiceSystemExtDialDigits, portThreshRxErrorRatio=portThreshRxErrorRatio, portBsciOperEntry=portBsciOperEntry, rlpStatsInFrames=rlpStatsInFrames, nlIfIpBAG=nlIfIpBAG, nlLocalSubscriberRedirIndex=nlLocalSubscriberRedirIndex, portT1Group=portT1Group, nlIfVoicePeerNodeNumber=nlIfVoicePeerNodeNumber, portX25OperFastSelAccpt=portX25OperFastSelAccpt, ipxConfig=ipxConfig, portLogicalX25AdminRowStatus=portLogicalX25AdminRowStatus, t1StatsXmitUsage=t1StatsXmitUsage, portVoiceOperAutoDial=portVoiceOperAutoDial, portBsciOperNAKRetryCount=portBsciOperNAKRetryCount, nlIfVoiceInterfaces=nlIfVoiceInterfaces, portSdlcAdminEntry=portSdlcAdminEntry, nlIfVoiceInterface=nlIfVoiceInterface, portDLCIIncomingCIR=portDLCIIncomingCIR, portVoiceAdminBreakRatio=portVoiceAdminBreakRatio, portVoiceOperExtDigitsSource=portVoiceOperExtDigitsSource, ipxInterfaceSerializationEnabled=ipxInterfaceSerializationEnabled, portBsciAdminSpeed=portBsciAdminSpeed, ipxInterfaceWanEnabled=ipxInterfaceWanEnabled, t1StatsPortIndex=t1StatsPortIndex, lSSdlcLlc2AdminLanT1=lSSdlcLlc2AdminLanT1, nlIfEntry=nlIfEntry, ipxServConfigInterveningNetworks=ipxServConfigInterveningNetworks, voiceStatsPortIndex=voiceStatsPortIndex, nlLlc2HostSessionType=nlLlc2HostSessionType, ipxInterfaceRIPEnabled=ipxInterfaceRIPEnabled, portLogicalX25OperEntry=portLogicalX25OperEntry, portDLCIBecnRecoveryCnt=portDLCIBecnRecoveryCnt, bsciSubscrOperLocalID=bsciSubscrOperLocalID, nlIfIpSecondaryAddrSequence=nlIfIpSecondaryAddrSequence, rlpStatsQMessages=rlpStatsQMessages, lSSdlcLlc2OperIdblk=lSSdlcLlc2OperIdblk, portX25OperCUGPref=portX25OperCUGPref, sdlcLSGroup=sdlcLSGroup, nsThisNode=nsThisNode, portFrBackupGroupTable=portFrBackupGroupTable, x25TxFRMRFrames=x25TxFRMRFrames, portFrPortIndex=portFrPortIndex, portVoiceOperFaxSupported=portVoiceOperFaxSupported, lSSdlcLlc2OperLanN2=lSSdlcLlc2OperLanN2, portX25AdminThruptClassNeg=portX25AdminThruptClassNeg, t1StatsEntry=t1StatsEntry, portSdlcOperDisableRequestDisconnect=portSdlcOperDisableRequestDisconnect, rlpMaxProtos=rlpMaxProtos, lSSdlcLlc2AdminLanTi=lSSdlcLlc2AdminLanTi, portLogicalX25AdminFrDlci=portLogicalX25AdminFrDlci, portPhyX25OperSpeed=portPhyX25OperSpeed, nlIfPhyPort=nlIfPhyPort, nlIfLlc2LANInterface=nlIfLlc2LANInterface, portPhyX25AdminEntry=portPhyX25AdminEntry, voiceSystemRingVolFreq=voiceSystemRingVolFreq, rlpThreshRtxFramesPerSec=rlpThreshRtxFramesPerSec, portDLCIEntry=portDLCIEntry, nlIfIpLANCard=nlIfIpLANCard, portFrT392Timer=portFrT392Timer, nlIfLlc2FrSessionType=nlIfLlc2FrSessionType, rlpStatsOutFrames=rlpStatsOutFrames, portVoiceAdminForwardedDigits=portVoiceAdminForwardedDigits, portBsciAdminNoResponseTimer=portBsciAdminNoResponseTimer, portVoiceAdminBlockedFlag=portVoiceAdminBlockedFlag, portX25OperMaxPktSize=portX25OperMaxPktSize, portBsciAdminPadType=portBsciAdminPadType, lSSdlcOperL2DatMode=lSSdlcOperL2DatMode, portVoiceOperLinkDownBusy=portVoiceOperLinkDownBusy, t1StatsRcvFrames=t1StatsRcvFrames, portFrBlockedFlag=portFrBlockedFlag, nlIfIpRouteMetric=nlIfIpRouteMetric)
mibBuilder.exportSymbols("NETLINK-SPECIFIC-MIB", bridgeDefaultEthernetFrameType=bridgeDefaultEthernetFrameType, portBsciOperFullDuplex=portBsciOperFullDuplex, portPhyX25AdminGenerateClock=portPhyX25AdminGenerateClock, portBsciOperTable=portBsciOperTable, portSdlcAdminIdleFillChar=portSdlcAdminIdleFillChar, lSSdlcLlc2AdminLocalMac=lSSdlcLlc2AdminLocalMac, portPhyX25OperInactivityTimer=portPhyX25OperInactivityTimer, portLogicalX25OperTable=portLogicalX25OperTable, portLogicalX25OperBAG=portLogicalX25OperBAG, portX25OperTable=portX25OperTable, lSSdlcLlc2AdminLanT2=lSSdlcLlc2AdminLanT2, portSdlcAdminL1Duplex=portSdlcAdminL1Duplex, bridgeIPEnabled=bridgeIPEnabled, portBsciOperCallInfoInRequestPacket=portBsciOperCallInfoInRequestPacket, nlLlc2TermConnectionEntry=nlLlc2TermConnectionEntry, portVoiceAdminLinkDownBusy=portVoiceAdminLinkDownBusy, rlpThreshMsgQueueLen=rlpThreshMsgQueueLen, ipxInterfaceTransportTime=ipxInterfaceTransportTime, tpAdrSLev=tpAdrSLev, nlIfIpSecondaryAddrBroadcastAddress=nlIfIpSecondaryAddrBroadcastAddress, portVoiceOperCallTimer=portVoiceOperCallTimer, portPhyX25AdminSetupTimer=portPhyX25AdminSetupTimer, portVoiceOperForwardDelay=portVoiceOperForwardDelay, portVoiceOperHuntGroup=portVoiceOperHuntGroup, portDLCIRowStatus=portDLCIRowStatus, rlpLIC1Type=rlpLIC1Type, portVoiceAdminTelephonyType=portVoiceAdminTelephonyType, bsciDevOperEntry=bsciDevOperEntry, nlIfIpSVCRetryTimer=nlIfIpSVCRetryTimer, nsEntry=nsEntry, portBsciAdminConnector=portBsciAdminConnector, voiceStatsRxCongestions=voiceStatsRxCongestions, rlpGroupNumber=rlpGroupNumber, portLogicalX25AdminRfc1490=portLogicalX25AdminRfc1490, portBsciAdminMultidrop=portBsciAdminMultidrop, lSSdlcOperLlc2Conversion=lSSdlcOperLlc2Conversion, frStatsEntry=frStatsEntry, nlIfIpSecondaryAddrSourceAddress=nlIfIpSecondaryAddrSourceAddress, portVoiceAdminRlpIndex=portVoiceAdminRlpIndex, nlIfIpSVCIdleTimer=nlIfIpSVCIdleTimer, portBsciOperBlockedFlag=portBsciOperBlockedFlag, portSdlcOperIdleFillChar=portSdlcOperIdleFillChar, nlIfIpNetworkMask=nlIfIpNetworkMask, portX25AdminBlockedFlag=portX25AdminBlockedFlag, portSdlcAdminTable=portSdlcAdminTable, portVoiceOperDialTimer=portVoiceOperDialTimer, nlIfIpSecondaryAddrRouteMetric=nlIfIpSecondaryAddrRouteMetric, portBsciOperUseEBCDIC=portBsciOperUseEBCDIC, portVoiceAdminDialTimer=portVoiceAdminDialTimer, portX25AdminFlowCtrlNeg=portX25AdminFlowCtrlNeg, portVoiceAdminLevelIn=portVoiceAdminLevelIn, nlLlc2OrigConnectionMAXDATA=nlLlc2OrigConnectionMAXDATA, portFrLLM=portFrLLM, bsciDevOperPrinterAttached=bsciDevOperPrinterAttached, portThreshRxFramesPerSec=portThreshRxFramesPerSec, portPhyX25AdminTrunkFlag=portPhyX25AdminTrunkFlag, voiceSystemDialDigits=voiceSystemDialDigits, ipxInterfaceRIPMaxSize=ipxInterfaceRIPMaxSize, portPinPort=portPinPort, frStatsRxANXDFrames=frStatsRxANXDFrames, portX25Group=portX25Group, ipxConfigRouting=ipxConfigRouting, t1StatsYellowAlarms=t1StatsYellowAlarms, nlLlc2HostN3NumberLPDUs=nlLlc2HostN3NumberLPDUs, portFrBackupRLP=portFrBackupRLP, ipxInterfaceFrameType=ipxInterfaceFrameType, nlLocalSubscriberRouteIndex=nlLocalSubscriberRouteIndex, portBsciAdminAnswerNonConfigured=portBsciAdminAnswerNonConfigured, portBsciAdminRetransmitInterval=portBsciAdminRetransmitInterval, portVoiceOperSampleDelay=portVoiceOperSampleDelay, portX25AdminCUGIndex=portX25AdminCUGIndex, portPhyX25OperTable=portPhyX25OperTable, lSSdlcAdminEntry=lSSdlcAdminEntry, portBsciOperNoResponseTimer=portBsciOperNoResponseTimer, portFrGenClock=portFrGenClock, portVoiceAdminTETimer=portVoiceAdminTETimer, t1StatsRcvErrors=t1StatsRcvErrors, nlLlc2HostRoutingSubscriberId=nlLlc2HostRoutingSubscriberId, ipxInterfaceEntry=ipxInterfaceEntry, status=status, ds0aStatsChannelIndex=ds0aStatsChannelIndex, rlpProtocol=rlpProtocol, portT1AdminBlockedPortFlag=portT1AdminBlockedPortFlag, rlpConfigTable=rlpConfigTable, portT1AdminGenerateClock=portT1AdminGenerateClock, portThreshTxPercentUtl=portThreshTxPercentUtl, lSSdlcOperAutoCall=lSSdlcOperAutoCall, nlLlc2OrigConnectionRemoteSubscriberId=nlLlc2OrigConnectionRemoteSubscriberId, bridgeAdminSRBID=bridgeAdminSRBID, portBsciOperMAXRetransmits=portBsciOperMAXRetransmits, ipxInterfaceSourceCard=ipxInterfaceSourceCard, nsNodTable=nsNodTable, portVoiceAdminExtDigitsSource=portVoiceAdminExtDigitsSource, portBsciOperTransTextSupported=portBsciOperTransTextSupported, x25TxINFOFrames=x25TxINFOFrames, portVoiceAdminAutoDial=portVoiceAdminAutoDial, portVoiceOperSpeed=portVoiceOperSpeed, bsciSubscrOperSequence=bsciSubscrOperSequence, portPinRlp=portPinRlp, x25RxRNRFrames=x25RxRNRFrames, portFrGroup=portFrGroup, portFrRcvClkFrmDTE=portFrRcvClkFrmDTE, portPhyX25OperDisconnectTimer=portPhyX25OperDisconnectTimer, portBsciOperConnector=portBsciOperConnector, nlIfIpBroadcastAddress=nlIfIpBroadcastAddress, portX25AdminCUGIncAccess=portX25AdminCUGIncAccess, bsciSubscrOperTable=bsciSubscrOperTable, portStatsOutFrames=portStatsOutFrames, portVoiceOperDTMFOffDuration=portVoiceOperDTMFOffDuration, portVoiceOperToneType=portVoiceOperToneType, nlIfIndex=nlIfIndex, rlpThreshTxFramesPerSec=rlpThreshTxFramesPerSec, portX25OperCUGOutAccess=portX25OperCUGOutAccess, frStatsRxFECNFrames=frStatsRxFECNFrames, ds0aStatsRcvAbortFrames=ds0aStatsRcvAbortFrames, bsciSubscrOperConnectionID=bsciSubscrOperConnectionID, portVoiceAdminCallTimer=portVoiceAdminCallTimer, voiceSpeedDialRowStatus=voiceSpeedDialRowStatus, frStatsTotDiscFrames=frStatsTotDiscFrames, portSdlcAdminRcvClockFromDTE=portSdlcAdminRcvClockFromDTE, statIp=statIp, voiceStatsCallTimeouts=voiceStatsCallTimeouts, voiceSystemCountryCode=voiceSystemCountryCode, nsMaxNeigh=nsMaxNeigh, nlIfIpType=nlIfIpType, x25RxStatsTable=x25RxStatsTable, portStatsIndex=portStatsIndex, nlInterfaces=nlInterfaces, portBsciOperSlowPollRetryFreq=portBsciOperSlowPollRetryFreq, portThreshLogRejPerSec=portThreshLogRejPerSec, nlIfIpMaxSVC=nlIfIpMaxSVC, portPinStatus=portPinStatus, portStatsInFrames=portStatsInFrames, x25TxSABMFrames=x25TxSABMFrames, bsciSubscrOperEntry=bsciSubscrOperEntry, lSSdlcLlc2OperLanT2=lSSdlcLlc2OperLanT2, voiceSpeedDialTable=voiceSpeedDialTable, portVoiceAdminFaxSupported=portVoiceAdminFaxSupported, portX25OperDefWinSize=portX25OperDefWinSize, lSSdlcLlc2AdminIdblk=lSSdlcLlc2AdminIdblk, ds0aStatsPortIndex=ds0aStatsPortIndex, voiceStatsRxCallsAccepts=voiceStatsRxCallsAccepts, voiceSystemVoiceNodeNum=voiceSystemVoiceNodeNum, portBsciAdminDataMode=portBsciAdminDataMode, portSdlcOperInactivityTimer=portSdlcOperInactivityTimer, ipxInterfacePortDiagEnabled=ipxInterfacePortDiagEnabled, portBsciAdminClearVCOnLastDeviceDown=portBsciAdminClearVCOnLastDeviceDown, frStatsTxFECNFrames=frStatsTxFECNFrames, ipxInterfaceWatchdogSpoofingEnabled=ipxInterfaceWatchdogSpoofingEnabled, bsciDevOperConnectionID=bsciDevOperConnectionID, portVoiceOperAutoDialNumber=portVoiceOperAutoDialNumber, portBsciOperSpeed=portBsciOperSpeed, bsciSubscrAdminAutocallMaxRtry=bsciSubscrAdminAutocallMaxRtry, portVoiceAdminInterface=portVoiceAdminInterface, pinStatusTable=pinStatusTable, nlLocalSubscriberRoutePort=nlLocalSubscriberRoutePort, bsciDevAdminDeviceUnitID=bsciDevAdminDeviceUnitID, ipxInterfacePeriodicRIPEnabled=ipxInterfacePeriodicRIPEnabled, rlpThreshPercntBufInUse=rlpThreshPercntBufInUse, nlIfIpPVCConnection=nlIfIpPVCConnection, bsciDevAdminTable=bsciDevAdminTable, lSSdlcAdminRowStatus=lSSdlcAdminRowStatus, portVoiceAdminMakeRatio=portVoiceAdminMakeRatio, rlpStatsTable=rlpStatsTable, t1StatsPCVRErrs=t1StatsPCVRErrs, portSdlcAdminPadType=portSdlcAdminPadType, portX25AdminMaxPktSize=portX25AdminMaxPktSize, nlIfIpDestSub=nlIfIpDestSub, portBsciAdminBlockedFlag=portBsciAdminBlockedFlag, x25RxREJFrames=x25RxREJFrames, voiceStatsRxClears=voiceStatsRxClears, voiceSpeedDialDigits=voiceSpeedDialDigits, portSdlcAdminMAXRetries=portSdlcAdminMAXRetries, portVoiceOperPortIndex=portVoiceOperPortIndex, ipxInterfaceSourcePort=ipxInterfaceSourcePort, portBsciAdminPollInterval=portBsciAdminPollInterval, snaDLC=snaDLC, portBsciAdminMAXRetransmits=portBsciAdminMAXRetransmits, ipxServConfigInterface=ipxServConfigInterface, portThreshRxPercentUtl=portThreshRxPercentUtl, portSdlcOperTable=portSdlcOperTable, bridgeAdminVirtualLANID=bridgeAdminVirtualLANID, portVoiceGroup=portVoiceGroup, lSSdlcLlc2OperPriority=lSSdlcLlc2OperPriority, tpAdrIdx=tpAdrIdx, lSSdlcLlc2AdminPriority=lSSdlcLlc2AdminPriority, t1StatsRcvChannelErrors=t1StatsRcvChannelErrors, portPhyX25OperConnector=portPhyX25OperConnector, portSdlcAdminConnector=portSdlcAdminConnector, portSdlcAdminInactivityTimer=portSdlcAdminInactivityTimer, nlLlc2OrigConnectionLocalSubscriberId=nlLlc2OrigConnectionLocalSubscriberId, nlLlc2HostTwNumberOutstanding=nlLlc2HostTwNumberOutstanding, portStatsEntry=portStatsEntry, nlIfLlc2FrEntry=nlIfLlc2FrEntry, portX25AdminCUGPref=portX25AdminCUGPref, frStatsTxDEFrames=frStatsTxDEFrames, ds0aStatsRcvOverruns=ds0aStatsRcvOverruns, voiceSpeedDialEntry=voiceSpeedDialEntry, portDLCIIndex=portDLCIIndex, nlIfLlc2FrBlockedPortFlag=nlIfLlc2FrBlockedPortFlag, portDLCIConfigTable=portDLCIConfigTable, nlLlc2HostBAG=nlLlc2HostBAG, portVoiceOperEntry=portVoiceOperEntry, portPhyX25OperRcvClockFromDTE=portPhyX25OperRcvClockFromDTE, portLogicalX25OperCxnPriority=portLogicalX25OperCxnPriority, rlpStatsFrameRetransmits=rlpStatsFrameRetransmits, portVoiceAdminHuntGroup=portVoiceAdminHuntGroup, frStatsTxANXDFrames=frStatsTxANXDFrames, bsciDevAdminSingleUserVC=bsciDevAdminSingleUserVC, portBsciOperGenerateClock=portBsciOperGenerateClock, rlpIndex=rlpIndex, portVoiceOperMakeRatio=portVoiceOperMakeRatio, portX25OperInCallBar=portX25OperInCallBar, portFrBackupWaitTimer=portFrBackupWaitTimer, statGroup=statGroup, t1StatsLCVCnt=t1StatsLCVCnt, ipxServConfigServSocketNumber=ipxServConfigServSocketNumber, ipxConfigNodeDefault=ipxConfigNodeDefault, portBsciAdminCallInfoInRequestPacket=portBsciAdminCallInfoInRequestPacket, portT1AdminFramingMode=portT1AdminFramingMode, portFrBackupProtEnab=portFrBackupProtEnab, lSSdlcAdminTable=lSSdlcAdminTable, portThreshTable=portThreshTable, portVoiceOperForwardedType=portVoiceOperForwardedType, nlIfIpInterface=nlIfIpInterface, nlIfIpRIPFullUpdates=nlIfIpRIPFullUpdates, nlIfLlc2FrFormat=nlIfLlc2FrFormat, lSSdlcLlc2OperEntry=lSSdlcLlc2OperEntry, sdlcLSAdminTable=sdlcLSAdminTable, portPhyX25OperTrunkFlag=portPhyX25OperTrunkFlag, portT1AdminNRZI=portT1AdminNRZI, portDLCIOutgoingCIR=portDLCIOutgoingCIR, rlpThreshRejFramesPerSec=rlpThreshRejFramesPerSec, ipxInterfaceType=ipxInterfaceType, ipxInterfacePeriodicSAPEnabled=ipxInterfacePeriodicSAPEnabled, nlLocalSubscriberRouteLP=nlLocalSubscriberRouteLP, nlLlc2OrigConnectionIDBLK=nlLlc2OrigConnectionIDBLK, lSSdlcLlc2OperLanTw=lSSdlcLlc2OperLanTw, nsNTNode=nsNTNode, x25RxDMFrames=x25RxDMFrames, portBsciGroup=portBsciGroup, ipxStaticRouteConfigRouter=ipxStaticRouteConfigRouter, portT1AdminLineBuildOut=portT1AdminLineBuildOut, nlLlc2HostTable=nlLlc2HostTable, ipxStaticRouteConfigTable=ipxStaticRouteConfigTable, bsciSubscrOperAutocall=bsciSubscrOperAutocall, voiceSystemVoiceRatesMin=voiceSystemVoiceRatesMin, portSdlcOperL1Duplex=portSdlcOperL1Duplex, portT1AdminLineEncoding=portT1AdminLineEncoding, nlLocalSubscriberRouteRowStatus=nlLocalSubscriberRouteRowStatus, portDLCIIncomingBe=portDLCIIncomingBe, frStatsRxBECNFrames=frStatsRxBECNFrames, nodeCfgTable=nodeCfgTable, portStatsFrameRetrans=portStatsFrameRetrans)
mibBuilder.exportSymbols("NETLINK-SPECIFIC-MIB", nlIfVoiceLocalNodeNumber=nlIfVoiceLocalNodeNumber, voiceSpeedDialLongDialMap=voiceSpeedDialLongDialMap, portBsciAdminGenerateClock=portBsciAdminGenerateClock, portDLCIBackupProtEnb=portDLCIBackupProtEnb, lSSdlcLlc2AdminBAG=lSSdlcLlc2AdminBAG, portFrConfigTable=portFrConfigTable, portBsciAdminErrorRetransmitCount=portBsciAdminErrorRetransmitCount, portStatsInPercentUtils=portStatsInPercentUtils, bsciDevAdminEntry=bsciDevAdminEntry, ipxInterfaceTable=ipxInterfaceTable, x25RxPortIndex=x25RxPortIndex, nlIfPort=nlIfPort, bsciDevOperControlUnitID=bsciDevOperControlUnitID, port=port, ipxNodeDefaultConfigRouterName=ipxNodeDefaultConfigRouterName, x25RxStatsEntry=x25RxStatsEntry, nlIfIpSourceAddress=nlIfIpSourceAddress, nsNeighEntry=nsNeighEntry, portLogicalX25AdminBAG=portLogicalX25AdminBAG, portVoiceAdminSampleDelay=portVoiceAdminSampleDelay, nlIfConnectorType=nlIfConnectorType, portThreshFCSErrPerSec=portThreshFCSErrPerSec, bridge=bridge, nodeModel=nodeModel, portT1OperProtocolFraming=portT1OperProtocolFraming, portVoiceOperForwardedDigits=portVoiceOperForwardedDigits, rlpGroupResponsibility=rlpGroupResponsibility, ipxInterfaceDestinationSubscriber=ipxInterfaceDestinationSubscriber, nlIfIpSecondaryAddrEntry=nlIfIpSecondaryAddrEntry, nlLlc2OrigConnectionRowStatus=nlLlc2OrigConnectionRowStatus, bridgeIPXEnabled=bridgeIPXEnabled, nlIfVoiceFrameRelayRlp=nlIfVoiceFrameRelayRlp, nlIfLlc2FrDLCI=nlIfLlc2FrDLCI, portDLCIRlpIndex=portDLCIRlpIndex, nlLlc2HostTiInactivityTimer=nlLlc2HostTiInactivityTimer, statVoice=statVoice, portVoiceOperLevelIn=portVoiceOperLevelIn, x25TxDMFrames=x25TxDMFrames, voiceSystemVoiceRatesMax=voiceSystemVoiceRatesMax, portFrBackupGroup=portFrBackupGroup, portBsciAdminFullDuplex=portBsciAdminFullDuplex, rlpStatus=rlpStatus, bsciSubscrAdminEntry=bsciSubscrAdminEntry, portSdlcOperPadType=portSdlcOperPadType, bsciDevAdminControlUnitID=bsciDevAdminControlUnitID, ipxInterfacePVCConnection=ipxInterfacePVCConnection, portBsciOperPadType=portBsciOperPadType, netstat=netstat, ipxServConfigServiceType=ipxServConfigServiceType, rlpStatsIndex=rlpStatsIndex, portLogicalX25OperFrDlci=portLogicalX25OperFrDlci, voiceStatsTxCongestions=voiceStatsTxCongestions, portT1AdminRowStatus=portT1AdminRowStatus, lSSdlcOperRemoteSub=lSSdlcOperRemoteSub, nlIfLlc2LANID=nlIfLlc2LANID, lSSdlcLlc2OperIdnum=lSSdlcLlc2OperIdnum, portBsciAdminEndPadChars=portBsciAdminEndPadChars, bridgeEnabled=bridgeEnabled, portVoiceOperTelephonyType=portVoiceOperTelephonyType, portBsciAdminUseEBCDIC=portBsciAdminUseEBCDIC, ds0aStatsRlpIndex=ds0aStatsRlpIndex, portX25OperCUGIncAccess=portX25OperCUGIncAccess, nlIfIpSecondaryAddrRIPSupport=nlIfIpSecondaryAddrRIPSupport, lSSdlcLlc2AdminRowStatus=lSSdlcLlc2AdminRowStatus, portFrSpeed=portFrSpeed, portBsciOperSlowPollRetryCount=portBsciOperSlowPollRetryCount, rlpLIC2Type=rlpLIC2Type, x25TxDISCFrames=x25TxDISCFrames, network=network, portBsciOperRcvClockFromDTE=portBsciOperRcvClockFromDTE, hwcard=hwcard, portVoiceAdminJitter=portVoiceAdminJitter, portThreshRtxFramesPerSec=portThreshRtxFramesPerSec, portX25OperRevChgAccpt=portX25OperRevChgAccpt, x25RxFRMRFrames=x25RxFRMRFrames)
| [
"dcwangmit01@gmail.com"
] | dcwangmit01@gmail.com |
a48bc369c0c167b51a2a6cd4319c5a7a18b0853b | 80301f1cffc5afce13256e2ecab6323c5df00194 | /en.3rd/py/T0311.py | a21106c22abe02dc3656c6ef6f6d66de672445ca | [] | no_license | ZhenjianYang/SoraVoiceScripts | c1ddf7c1bbcb933243754f9669bd6b75777c87b9 | 94a948090aba0f63b10b2c69dc845dc99c822fc4 | refs/heads/master | 2023-04-18T04:54:44.306652 | 2023-04-06T11:15:17 | 2023-04-06T11:15:17 | 103,167,541 | 43 | 11 | null | 2021-03-06T08:52:54 | 2017-09-11T17:36:55 | Python | UTF-8 | Python | false | false | 12,704 | py | from ED63RDScenarioHelper import *
def main():
SetCodePage("ms932")
# 洛连特
CreateScenaFile(
FileName = 'T0311 ._SN',
MapName = 'Rolent',
Location = 'T0311.x',
MapIndex = 1,
MapDefaultBGM = "ed60084",
Flags = 0,
EntryFunctionIndex = 0xFFFF,
Reserved = 0,
IncludedScenario = [
'',
'',
'',
'',
'',
'',
'',
''
],
)
BuildStringList(
'@FileName', # 8
'Joshua', # 9
'Target Camera', # 10
)
DeclEntryPoint(
Unknown_00 = 0,
Unknown_04 = 0,
Unknown_08 = 6000,
Unknown_0C = 4,
Unknown_0E = 0,
Unknown_10 = 0,
Unknown_14 = 9500,
Unknown_18 = -10000,
Unknown_1C = 0,
Unknown_20 = 0,
Unknown_24 = 0,
Unknown_28 = 2800,
Unknown_2C = 262,
Unknown_30 = 45,
Unknown_32 = 0,
Unknown_34 = 360,
Unknown_36 = 0,
Unknown_38 = 0,
Unknown_3A = 0,
InitScenaIndex = 0,
InitFunctionIndex = 0,
EntryScenaIndex = 0,
EntryFunctionIndex = 1,
)
AddCharChip(
'ED6_DT07/CH02750 ._CH', # 00
'ED6_DT06/CH20033 ._CH', # 01
'ED6_DT26/CH20338 ._CH', # 02
'ED6_DT26/CH20320 ._CH', # 03
'ED6_DT26/CH20787 ._CH', # 04
)
AddCharChipPat(
'ED6_DT07/CH02750P._CP', # 00
'ED6_DT06/CH20033P._CP', # 01
'ED6_DT26/CH20338P._CP', # 02
'ED6_DT26/CH20320P._CP', # 03
'ED6_DT26/CH20787P._CP', # 04
)
DeclNpc(
X = 44200,
Z = 240,
Y = 18540,
Direction = 0,
Unknown2 = 0,
Unknown3 = 1,
ChipIndex = 0x1,
NpcIndex = 0x1C5,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 0,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x80,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
ScpFunction(
"Function_0_112", # 00, 0
"Function_1_150", # 01, 1
"Function_2_15B", # 02, 2
"Function_3_748", # 03, 3
)
def Function_0_112(): pass
label("Function_0_112")
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0x42), scpexpr(EXPR_PUSH_LONG, 0x11), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_14F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x4A0, 4)), scpexpr(EXPR_END)), "loc_13D")
OP_4F(0x1, (scpexpr(EXPR_PUSH_LONG, 0xB7), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_A3(0x2504)
SetMapFlags(0x10000000)
Event(0, 3)
Jump("loc_14F")
label("loc_13D")
OP_4F(0x1, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
SetMapFlags(0x10000000)
Event(0, 2)
label("loc_14F")
Return()
# Function_0_112 end
def Function_1_150(): pass
label("Function_1_150")
OP_78(0x8C, 0x8C, 0xB4)
OP_82(0x80, 0x0)
OP_82(0x81, 0x0)
Return()
# Function_1_150 end
def Function_2_15B(): pass
label("Function_2_15B")
EventBegin(0x0)
FadeToDark(0, 0, -1)
OP_C4(0x0, 0x20000000)
OP_22(0x11C, 0x0, 0x64)
Sleep(10000)
OP_C4(0x0, 0x800)
SetMessageWindowPos(-1, -1, -1, -1)
AnonymousTalk( #0 op#A op#5
"\x18\x07\x00#35A#40WThe first week...\x05\x02",
)
CloseMessageWindow()
OP_56(0x0)
Sleep(1500)
OP_C4(0x1, 0x800)
OP_22(0x1A, 0x0, 0x64)
Sleep(1000)
OP_22(0x1B, 0x0, 0x64)
Sleep(1000)
OP_22(0x1A, 0x0, 0x64)
Sleep(1000)
OP_22(0x1B, 0x0, 0x64)
Sleep(1000)
OP_22(0x1A, 0x0, 0x64)
Sleep(1000)
OP_22(0x1B, 0x0, 0x64)
Sleep(1000)
OP_22(0x6, 0x0, 0x64)
Sleep(100)
OP_23(0x11C)
Sleep(1000)
Sleep(800)
SetChrName("Voice")
AnonymousTalk( #1
"\x07\x05#40WWell, hello there.\x02",
)
CloseMessageWindow()
OP_56(0x0)
SetChrName("Voice")
AnonymousTalk( #2
(
"\x07\x05#40WThere's no need to be so afraid.\x01",
"I am but a humble magician.\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
SetChrName("Voice")
AnonymousTalk( #3
"\x07\x05#40WI will heal your broken heart for you.\x02",
)
CloseMessageWindow()
OP_56(0x0)
SetChrName("Voice")
AnonymousTalk( #4
(
"\x07\x05#40WProvided, of course...#500W\x01",
"#40WI am compensated.\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
OP_59()
SetMessageWindowPos(72, 320, 56, 3)
OP_77(0xFF, 0xC8, 0x96, 0x0, 0x0)
SetChrPos(0x101, 8900, 0, 68780, 180)
SetChrPos(0x10, 8550, 500, 67500, 270)
SetChrSubChip(0x10, 0)
SetChrChipByIndex(0x10, 1)
ClearChrFlags(0x10, 0x80)
SetChrFlags(0x10, 0x2)
ClearChrFlags(0x10, 0x1)
OP_71(0x405, 0x0)
ExitThread()
OP_72(0x40A, 0x0)
ExitThread()
OP_6F(0xA, 60)
OP_6D(9420, 0, 68840, 0)
OP_67(0, 6000, -10000, 0)
OP_6B(3200, 0)
OP_6C(44000, 0)
OP_6E(280, 0)
OP_1D(0xB2)
Sleep(500)
def lambda_3A3():
OP_6B(3000, 5000)
ExitThread()
QueueWorkItem(0x11, 0, lambda_3A3)
def lambda_3B3():
OP_67(0, 5440, -10000, 5000)
ExitThread()
QueueWorkItem(0x11, 1, lambda_3B3)
FadeToBright(5000, 0)
OP_0D()
ChrTalk( #5
0x10,
(
"#303F#4P#40WUgh... Guh...\x02\x03",
"Ugh... #3S#20WGaaaaaah!\x02",
)
)
Sleep(500)
def lambda_411():
OP_9E(0xFE, 0xF, 0x0, 0x3E8, 0x7D0)
ExitThread()
QueueWorkItem(0x10, 3, lambda_411)
CloseMessageWindow()
ChrTalk( #6
0x101,
(
"#292F#5PJ-Joshua?! Are you okay?!\x02\x03",
"#293FOh, your temperature's gone up again!\x02",
)
)
CloseMessageWindow()
def lambda_47F():
OP_9E(0xFE, 0xF, 0x0, 0x3E8, 0x7D0)
ExitThread()
QueueWorkItem(0x10, 3, lambda_47F)
Sleep(1000)
ChrTalk( #7
0x101,
(
"#292F#5PStay still!\x02\x03",
"#295FWhere's a towel...? Where's a toweeel...?\x02",
)
)
CloseMessageWindow()
OP_8C(0x101, 270, 500)
Sleep(300)
ChrTalk( #8
0x101,
(
"#294F#5PWait a sec, okay? I'll be right back with\x01",
"some water!\x02",
)
)
CloseMessageWindow()
OP_62(0x101, 0x0, 1700, 0x28, 0x2B, 0x64, 0x0)
def lambda_548():
OP_8E(0xFE, 0x14AA, 0x0, 0x10CAC, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_548)
WaitChrThread(0x101, 0x1)
def lambda_568():
OP_8E(0xFE, 0xBD6, 0x0, 0x10252, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_568)
WaitChrThread(0x101, 0x1)
OP_22(0x6, 0x0, 0x64)
OP_63(0x101)
def lambda_590():
OP_8E(0xFE, 0xB7C, 0x0, 0xFBF4, 0x1388, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_590)
def lambda_5AB():
OP_9F(0xFE, 0xFF, 0xFF, 0xFF, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x101, 2, lambda_5AB)
WaitChrThread(0x101, 0x1)
def lambda_5C2():
OP_6B(2700, 10000)
ExitThread()
QueueWorkItem(0x11, 0, lambda_5C2)
def lambda_5D2():
OP_6D(9910, 0, 69150, 10000)
ExitThread()
QueueWorkItem(0x11, 1, lambda_5D2)
Sleep(3000)
ChrTalk( #9 op#A
0x10,
(
"#307F#11P#40W#26AKarin, I...\x02\x03",
"#60W#18AI'm...\x02",
)
)
CloseMessageWindow()
FadeToDark(2000, 0, -1)
OP_0D()
OP_77(0xFF, 0xFF, 0xFF, 0x0, 0x0)
Sleep(1000)
OP_C4(0x0, 0x800)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
AnonymousTalk( #10
(
"\x18\x07\x0C#40WAll I could hear in my head during those days\x01",
"were the same words repeating endlessly like\x01",
"a broken record...\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
SetChrName("")
AnonymousTalk( #11
(
"\x18\x07\x0C#40WYet somehow, I had no idea who was saying\x01",
"them...\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
SetChrName("")
AnonymousTalk( #12
"\x18\x07\x0C#40WAll I knew was that...\x02",
)
CloseMessageWindow()
OP_56(0x0)
OP_59()
SetMessageWindowPos(72, 320, 56, 3)
Sleep(100)
OP_C4(0x1, 0x800)
SetMapFlags(0x2000000)
NewScene("ED6_DT21/T0300 ._SN", 100, 0, 0)
IdleLoop()
Return()
# Function_2_15B end
def Function_3_748(): pass
label("Function_3_748")
EventBegin(0x0)
FadeToDark(0, 0, -1)
OP_6D(147950, 0, 146040, 0)
OP_67(0, 6000, -10000, 0)
OP_6B(2900, 0)
OP_6C(45000, 0)
OP_6E(280, 0)
ClearChrFlags(0x10, 0x80)
SetChrFlags(0x10, 0x2)
ClearChrFlags(0x10, 0x1)
SetChrChipByIndex(0x10, 4)
SetChrSubChip(0x10, 0)
SetChrPos(0x10, 148030, 500, 144900, 180)
SetChrFlags(0x101, 0x4)
SetChrChipByIndex(0x101, 3)
SetChrSubChip(0x101, 0)
SetChrPos(0x101, 145600, 100, 145350, 90)
OP_6F(0x0, 15)
OP_70(0x0, 0xF)
SoundLoad(390)
FadeToBright(2000, 0)
OP_0D()
Sleep(1000)
OP_62(0x101, 0x0, 1700, 0x1C, 0x21, 0xFA, 0x0)
Sleep(3000)
SetChrSubChip(0x10, 1)
Sleep(200)
SetChrSubChip(0x10, 2)
Sleep(200)
SetChrSubChip(0x10, 10)
Sleep(500)
ChrTalk( #13
0x10,
(
"#1676F#11PI swear... Why does she do this every single\x01",
"night?\x02\x03",
"#1675FI wish she'd mind her own business...\x02",
)
)
CloseMessageWindow()
OP_59()
Fade(1000)
OP_22(0x186, 0x0, 0x64)
SetChrSubChip(0x10, 0)
SetChrChipByIndex(0x10, 0)
ClearChrFlags(0x10, 0x2)
SetChrFlags(0x10, 0x1)
SetChrPos(0x10, 146350, 0, 144240, 270)
Sleep(1000)
TurnDirection(0x10, 0x101, 400)
Sleep(300)
OP_62(0x10, 0x0, 1700, 0x18, 0x1B, 0xFA, 0x0)
Sleep(2000)
OP_63(0x10)
Sleep(300)
OP_63(0x101)
def lambda_909():
OP_8E(0xFE, 0x23A64, 0x0, 0x2350A, 0x3E8, 0x0)
ExitThread()
QueueWorkItem(0x10, 1, lambda_909)
WaitChrThread(0x10, 0x1)
Sleep(500)
FadeToDark(1000, 0, -1)
OP_0D()
Sleep(500)
OP_22(0x186, 0x0, 0x64)
SetChrSubChip(0x101, 0)
SetChrChipByIndex(0x101, 2)
SetChrFlags(0x101, 0x2)
SetChrFlags(0x101, 0x4)
SetChrFlags(0x101, 0x40)
ClearChrFlags(0x101, 0x1)
SetChrPos(0x101, 147790, 350, 145320, 270)
SetChrPos(0x10, 146220, 0, 144900, 90)
OP_62(0x101, 0xFFFFFED4, 1300, 0x1C, 0x21, 0xFA, 0x0)
Sleep(500)
FadeToBright(1000, 0)
OP_0D()
Sleep(1000)
OP_8C(0x10, 225, 400)
def lambda_9B0():
OP_8E(0xFE, 0x232D0, 0x0, 0x22876, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x10, 1, lambda_9B0)
WaitChrThread(0x10, 0x1)
OP_8C(0x10, 180, 500)
OP_22(0x6, 0x0, 0x64)
Sleep(500)
def lambda_9E1():
OP_8E(0xFE, 0x23258, 0x0, 0x220F6, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x10, 1, lambda_9E1)
def lambda_9FC():
OP_9F(0xFE, 0xFF, 0xFF, 0xFF, 0x0, 0x1F4)
ExitThread()
QueueWorkItem(0x10, 2, lambda_9FC)
FadeToDark(2000, 0, -1)
Sleep(1000)
OP_22(0x7, 0x0, 0x64)
OP_0D()
OP_A2(0x2504)
NewScene("ED6_DT21/T0301 ._SN", 100, 0, 0)
IdleLoop()
Return()
# Function_3_748 end
SaveToFile()
Try(main)
| [
"ZJ.Yang@qq.com"
] | ZJ.Yang@qq.com |
0cca402c23bbae04f908bfa3169dc41f789604ba | bcb877f449d74a1b30ab1e4b1b04c5091ad48c20 | /2020/day23.py | 65bc5c70c4f3c9dabb591c2ce7a23d15ef8cb56f | [
"MIT"
] | permissive | dimkarakostas/advent-of-code | c8366b9914e942cc4b4ec451713fae296314296a | 9c10ab540dc00b6356e25ac28351a3cefbe98300 | refs/heads/master | 2022-12-10T08:46:35.130563 | 2022-12-08T11:14:48 | 2022-12-08T11:14:48 | 160,544,569 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,712 | py | from collections import deque
problem_input = '463528179'
inp = deque([int(i) for i in problem_input])
curr = inp[0]
for move in range(100):
picked = []
for _ in range(3):
picked.append(inp[(inp.index(curr) + 1) % len(inp)])
inp.remove(picked[-1])
dest = curr - 1
while dest not in inp:
dest -= 1
if dest <= 0:
dest = max(inp)
if inp[-1] == dest:
for i in range(3):
inp.append(picked[i])
else:
dest_idx = inp.index(dest)
for i in range(2, -1, -1):
inp.insert(dest_idx+1, picked[i])
curr = inp[(inp.index(curr) + 1) % len(inp)]
outp = []
idx = inp.index(1) + 1
for i in range(len(inp) - 1):
outp.append(str(inp[(idx + i) % len(inp)]))
print('Part 1:', ''.join(outp))
class Node:
def __init__(self, val):
self.val = val
self.next = None
inp = list([int(i) for i in problem_input])
nodes = [Node(i) for i in range(1000001)]
for idx, item in enumerate(inp[:-1]):
nodes[item].next = nodes[inp[idx + 1]]
nodes[inp[-1]].next = nodes[max(inp) + 1]
for item in range(max(inp)+1, len(nodes)-1):
nodes[item].next = nodes[item+1]
nodes[len(nodes)-1].next = nodes[inp[0]]
curr = nodes[inp[0]]
for _ in range(10000000):
picked = [curr.next, curr.next.next, curr.next.next.next]
picked_vals = {i.val for i in picked}
curr.next = picked[-1].next
dest = curr.val - 1
while dest in picked_vals or dest == 0:
dest -= 1
if dest <= 0:
dest = len(nodes) - 1
picked[-1].next = nodes[dest].next
nodes[dest].next = picked[0]
curr = curr.next
print('Part 2:', nodes[1].next.val * nodes[1].next.next.val)
| [
"dimit.karakostas@gmail.com"
] | dimit.karakostas@gmail.com |
a90f9ca2409ed2a986641dc81ca62b4b7043b79d | 55c250525bd7198ac905b1f2f86d16a44f73e03a | /Python/Projects/sphinx/util/nodes.py | 40e49021c00292298f6c0fed6211e82b2d78ffca | [
"LicenseRef-scancode-other-permissive",
"BSD-3-Clause"
] | permissive | NateWeiler/Resources | 213d18ba86f7cc9d845741b8571b9e2c2c6be916 | bd4a8a82a3e83a381c97d19e5df42cbababfc66c | refs/heads/master | 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null | UTF-8 | Python | false | false | 130 | py | version https://git-lfs.github.com/spec/v1
oid sha256:5cbc9876159b9a3355e26db5ad2e756f478d71524439897687e9546b84674d14
size 22333
| [
"nateweiler84@gmail.com"
] | nateweiler84@gmail.com |
d9078b9975bba2f2a10fc34ce38bdc837fae2c0b | 0b134572e3ac3903ebb44df6d4138cbab9d3327c | /app/tests/cases_tests/test_widget.py | 59f149277f7d70ad19567ee2ac62dcb65f700d75 | [
"Apache-2.0"
] | permissive | comic/grand-challenge.org | 660de3bafaf8f4560317f1dfd9ae9585ec272896 | dac25f93b395974b32ba2a8a5f9e19b84b49e09d | refs/heads/main | 2023-09-01T15:57:14.790244 | 2023-08-31T14:23:04 | 2023-08-31T14:23:04 | 4,557,968 | 135 | 53 | Apache-2.0 | 2023-09-14T13:41:03 | 2012-06-05T09:26:39 | Python | UTF-8 | Python | false | false | 5,179 | py | import pytest
from django.core.exceptions import ValidationError
from guardian.shortcuts import assign_perm
from grandchallenge.cases.widgets import FlexibleImageField, WidgetChoices
from grandchallenge.components.models import ComponentInterface
from grandchallenge.core.guardian import get_objects_for_user
from grandchallenge.uploads.models import UserUpload
from tests.components_tests.factories import ComponentInterfaceFactory
from tests.factories import ImageFactory, UserFactory
from tests.uploads_tests.factories import UserUploadFactory
from tests.utils import get_view_for_user
@pytest.mark.django_db
def test_flexible_image_field_validation():
user = UserFactory()
upload1 = UserUploadFactory(creator=user)
upload2 = UserUploadFactory()
im1, im2 = ImageFactory.create_batch(2)
assign_perm("cases.view_image", user, im1)
ci = ComponentInterfaceFactory(kind=ComponentInterface.Kind.IMAGE)
field = FlexibleImageField(
image_queryset=get_objects_for_user(user, "cases.view_image"),
upload_queryset=UserUpload.objects.filter(creator=user).all(),
)
parsed_value_for_empty_data = field.widget.value_from_datadict(
data={}, name=ci.slug, files={}
)
decompressed_value_for_missing_value = field.widget.decompress(value=None)
assert not parsed_value_for_empty_data
assert decompressed_value_for_missing_value == [None, None]
parsed_value_for_image_with_permission = field.widget.value_from_datadict(
data={ci.slug: im1.pk}, name=ci.slug, files={}
)
decompressed_value_for_image_with_permission = field.widget.decompress(
im1.pk
)
assert (
parsed_value_for_image_with_permission
== decompressed_value_for_image_with_permission
== [
im1.pk,
None,
]
)
assert field.clean(parsed_value_for_image_with_permission) == im1
parsed_value_for_image_without_permission = (
field.widget.value_from_datadict(
data={ci.slug: im2.pk}, name=ci.slug, files={}
)
)
decompressed_value_for_image_without_permission = field.widget.decompress(
im2.pk
)
assert (
parsed_value_for_image_without_permission
== decompressed_value_for_image_without_permission
== [im2.pk, None]
)
with pytest.raises(ValidationError):
field.clean(parsed_value_for_image_without_permission)
parsed_value_for_upload_from_user = field.widget.value_from_datadict(
data={ci.slug: str(upload1.pk)}, name=ci.slug, files={}
)
decompressed_value_for_upload_from_user = field.widget.decompress(
str(upload1.pk)
)
assert (
parsed_value_for_upload_from_user
== decompressed_value_for_upload_from_user
== [None, [str(upload1.pk)]]
)
assert field.clean(parsed_value_for_upload_from_user).get() == upload1
parsed_value_from_upload_from_other_user = (
field.widget.value_from_datadict(
data={ci.slug: str(upload2.pk)}, name=ci.slug, files={}
)
)
decompressed_value_for_upload_from_other_user = field.widget.decompress(
str(upload2.pk)
)
assert (
parsed_value_from_upload_from_other_user
== decompressed_value_for_upload_from_other_user
== [None, [str(upload2.pk)]]
)
with pytest.raises(ValidationError):
field.clean(parsed_value_from_upload_from_other_user)
parsed_value_for_missing_value = field.widget.value_from_datadict(
data={ci.slug: "IMAGE_UPLOAD"}, name=ci.slug, files={}
)
decompressed_value_for_missing_value = field.widget.decompress(
"IMAGE_UPLOAD"
)
assert (
parsed_value_for_missing_value
== decompressed_value_for_missing_value
== [None, None]
)
with pytest.raises(ValidationError):
field.clean(parsed_value_for_missing_value)
@pytest.mark.django_db
def test_flexible_image_widget(client):
user = UserFactory()
ci = ComponentInterfaceFactory(kind=ComponentInterface.Kind.IMAGE)
response = get_view_for_user(
viewname="cases:select-image-widget",
client=client,
user=user,
data={
f"WidgetChoice-{ci.slug}": WidgetChoices.IMAGE_SEARCH.name,
"interface_slug": ci.slug,
},
)
assert '<input class="form-control" type="search"' in str(response.content)
response2 = get_view_for_user(
viewname="cases:select-image-widget",
client=client,
user=user,
data={
f"WidgetChoice-{ci.slug}": WidgetChoices.IMAGE_UPLOAD.name,
"interface_slug": ci.slug,
},
)
assert 'class="user-upload"' in str(response2.content)
response3 = get_view_for_user(
viewname="cases:select-image-widget",
client=client,
user=user,
data={
f"WidgetChoice-{ci.slug}": WidgetChoices.UNDEFINED.name,
"interface_slug": ci.slug,
},
)
assert response3.content == b""
| [
"noreply@github.com"
] | comic.noreply@github.com |
7ffa8e50447a97f8cbf4119dc67d8bb4a3c4aa2a | 82507189fa8aa54eec7151f9a246859052b5b30e | /dj_ajax_todo/urls.py | 8a79d2f91f6bdb27bab536426e9837e965d2f1e0 | [] | no_license | nouhben/django-ajax-todo | 96a02f21f53efcc1f8a9d32fcee3c500bcb89e74 | 00cc03db7ed12cee14b91618714b5c78429da77c | refs/heads/master | 2023-08-17T07:45:34.146971 | 2021-03-19T15:07:27 | 2021-03-19T15:07:27 | 266,007,953 | 0 | 0 | null | 2021-09-22T19:04:21 | 2020-05-22T03:20:14 | Python | UTF-8 | Python | false | false | 201 | py | from django.contrib import admin
from django.urls import path,include
urlpatterns = [
path('',include('todo.urls')),
path('todos/',include('todo.urls')),
path('admin/', admin.site.urls),
]
| [
"benkadi.nouh@icloud.com"
] | benkadi.nouh@icloud.com |
418630fcf5668a3eb772b0b802c176aaa1163c3f | 3b1efdd0aacc98738f3b8b9ee09c6ff59cccc14e | /ietf/sync/iana.py | 972f2dda680db38cfeb26184b86083d96d3de94c | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | unofficial-mirror/ietfdb | 15beb6bf17b1d4abb257ee656ac6b7488339d331 | ce54adb30dc7299c6eb4d42b9aa9d2c2929c1a81 | refs/heads/master | 2020-08-06T17:24:13.966746 | 2019-10-04T20:54:05 | 2019-10-04T20:54:05 | 213,088,920 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,343 | py | # Copyright The IETF Trust 2012-2019, All Rights Reserved
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import base64
import datetime
import email
import json
import re
from six.moves.urllib.request import Request, urlopen
from django.conf import settings
from django.utils.encoding import force_str
from django.utils.http import urlquote
import debug # pyflakes:ignore
from ietf.doc.mails import email_state_changed
from ietf.doc.models import Document, DocEvent, State, StateDocEvent, StateType
from ietf.doc.utils import add_state_change_event
from ietf.person.models import Person
from ietf.utils.mail import parseaddr
from ietf.utils.timezone import local_timezone_to_utc, email_time_to_local_timezone, utc_to_local_timezone
#PROTOCOLS_URL = "https://www.iana.org/protocols/"
#CHANGES_URL = "https://datatracker.dev.icann.org:8080/data-tracker/changes"
def fetch_protocol_page(url):
f = urlopen(settings.IANA_SYNC_PROTOCOLS_URL)
text = f.read()
f.close()
return text
def parse_protocol_page(text):
"""Parse IANA protocols page to extract referenced RFCs (as
rfcXXXX document names)."""
matches = re.findall('RFC [0-9]+', text)
res = set()
for m in matches:
res.add("rfc" + m[len("RFC "):])
return list(res)
def update_rfc_log_from_protocol_page(rfc_names, rfc_must_published_later_than):
"""Add notices to RFC history log that IANA is now referencing the RFC."""
system = Person.objects.get(name="(System)")
updated = []
docs = Document.objects.filter(docalias__name__in=rfc_names).exclude(
docevent__type="rfc_in_iana_registry").filter(
# only take those that were published after cutoff since we
# have a big bunch of old RFCs that we unfortunately don't have data for
docevent__type="published_rfc", docevent__time__gte=rfc_must_published_later_than
).distinct()
for d in docs:
e = DocEvent(doc=d, rev=d.rev)
e.by = system
e.type = "rfc_in_iana_registry"
e.desc = "IANA registries were updated to include %s" % d.display_name()
e.save()
updated.append(d)
return updated
def fetch_changes_json(url, start, end):
url += "?start=%s&end=%s" % (urlquote(local_timezone_to_utc(start).strftime("%Y-%m-%d %H:%M:%S")),
urlquote(local_timezone_to_utc(end).strftime("%Y-%m-%d %H:%M:%S")))
request = Request(url)
# HTTP basic auth
username = "ietfsync"
password = settings.IANA_SYNC_PASSWORD
request.add_header("Authorization", "Basic %s" % base64.encodestring("%s:%s" % (username, password)).replace("\n", ""))
f = urlopen(request)
text = f.read()
f.close()
return text
def parse_changes_json(text):
response = json.loads(text)
if "error" in response:
raise Exception("IANA server returned error: %s" % response["error"])
changes = response["changes"]
# do some rudimentary validation
for i in changes:
for f in ['doc', 'type', 'time']:
if f not in i:
raise Exception('Error in response: Field %s missing in input: %s - %s' % (f, json.dumps(i), json.dumps(changes)))
# a little bit of cleaning
i["doc"] = i["doc"].strip()
if i["doc"].startswith("https://www.ietf.org/internet-drafts/"):
i["doc"] = i["doc"][len("https://www.ietf.org/internet-drafts/"):]
# make sure we process oldest entries first
changes.sort(key=lambda c: c["time"])
return changes
def update_history_with_changes(changes, send_email=True):
"""Take parsed changes from IANA and apply them. Note that we
expect to get these chronologically sorted, otherwise the change
descriptions generated may not be right."""
# build up state lookup
states = {}
slookup = dict((s.slug, s)
for s in State.objects.filter(used=True, type=StateType.objects.get(slug="draft-iana-action")))
states["action"] = {
"": slookup["newdoc"],
"In Progress": slookup["inprog"],
"Open": slookup["inprog"],
"pre-approval In Progress": slookup["inprog"],
"Waiting on Authors": slookup["waitauth"],
"Author": slookup["waitauth"],
"Waiting on ADs": slookup["waitad"],
"Waiting on AD": slookup["waitad"],
"AD": slookup["waitad"],
"Waiting on WGC": slookup["waitwgc"],
"WGC": slookup["waitwgc"],
"Waiting on RFC-Editor": slookup["waitrfc"],
"Waiting on RFC Editor": slookup["waitrfc"],
"RFC-Editor": slookup["waitrfc"],
"RFC-Ed-ACK": slookup["rfcedack"],
"RFC-Editor-ACK": slookup["rfcedack"],
"Completed": slookup["rfcedack"],
"On Hold": slookup["onhold"],
"No IC": slookup["noic"],
}
slookup = dict((s.slug, s)
for s in State.objects.filter(used=True, type=StateType.objects.get(slug="draft-iana-review")))
states["review"] = {
"IANA Review Needed": slookup["need-rev"],
"IANA - Review Needed": slookup["need-rev"],
"IANA OK - Actions Needed": slookup["ok-act"],
"IANA OK - No Actions Needed": slookup["ok-noact"],
"IANA Not OK": slookup["not-ok"],
"IANA - Not OK": slookup["not-ok"],
"Version Changed - Review Needed": slookup["changed"],
}
# so it turns out IANA has made a mistake and are including some
# wrong states, we'll have to skip those
wrong_action_states = ("Waiting on Reviewer", "Review Complete", "Last Call",
"Last Call - Questions", "Evaluation", "Evaluation - Questions",
"With Reviewer", "IESG Notification Received", "Watiing on Last Call",
"IANA Comments Submitted", "Waiting on Last Call")
system = Person.objects.get(name="(System)")
added_events = []
warnings = []
for c in changes:
docname = c['doc']
timestamp = datetime.datetime.strptime(c["time"], "%Y-%m-%d %H:%M:%S")
timestamp = utc_to_local_timezone(timestamp) # timestamps are in UTC
if c['type'] in ("iana_state", "iana_review"):
if c['type'] == "iana_state":
kind = "action"
if c["state"] in wrong_action_states:
warnings.append("Wrong action state '%s' encountered in changes from IANA" % c["state"])
continue
else:
kind = "review"
if c["state"] not in states[kind]:
warnings.append("Unknown IANA %s state %s (%s)" % (kind, c["state"], timestamp))
continue
state = states[kind][c["state"]]
state_type = "draft-iana-%s" % kind
if state.slug in ("need-rev", "changed"):
# the Datatracker is the ultimate source of these
# states, so skip them
continue
e = StateDocEvent.objects.filter(type="changed_state", time=timestamp,
state_type=state_type, state=state)
if not e:
try:
doc = Document.objects.get(docalias__name=docname)
except Document.DoesNotExist:
warnings.append("Document %s not found" % docname)
continue
# the naive way of extracting prev_state here means
# that we assume these changes are cronologically
# applied
prev_state = doc.get_state(state_type)
e = add_state_change_event(doc, system, prev_state, state, timestamp=timestamp)
if e:
# for logging purposes
e.json = c
added_events.append(e)
if not StateDocEvent.objects.filter(doc=doc, time__gt=timestamp, state_type=state_type):
doc.set_state(state)
if e:
doc.save_with_history([e])
if send_email and (state != prev_state):
email_state_changed(None, doc, "IANA %s state changed to \"%s\"" % (kind, state.name),'doc_iana_state_changed')
return added_events, warnings
def find_document_name(text):
prefixes = ['draft','conflict-review','status-change','charter']
leading_delimiter_re = r'(?<![-a-zA-Z0-9])'
prefix_re = r'(%s)' % '|'.join(prefixes)
tail_re = r'(-[a-z0-9]+)+?(-\d\d\.txt)?'
trailing_delimiter_re = r'((?![-a-zA-Z0-9])|$)'
name_re = r'%s(%s%s)%s' % (leading_delimiter_re, prefix_re, tail_re, trailing_delimiter_re)
m = re.search(name_re,text)
return m and m.group(0).lower()
def strip_version_extension(text):
if re.search(r"\.\w{3}$", text): # strip off extension
text = text[:-4]
if re.search(r"-\d{2}$", text): # strip off revision
text = text[:-3]
return text
def parse_review_email(text):
msg = email.message_from_string(force_str(text))
# doc
doc_name = find_document_name(msg["Subject"]) or ""
doc_name = strip_version_extension(doc_name)
# date
review_time = datetime.datetime.now()
if "Date" in msg:
review_time = email_time_to_local_timezone(msg["Date"])
# by
by = None
name, __ = parseaddr(msg["From"])
if name.endswith(" via RT"):
name = name[:-len(" via RT")]
try:
by = Person.objects.get(alias__name=name, role__group__acronym="iana")
except Person.DoesNotExist:
pass
if not by:
by = Person.objects.get(name="(System)")
# comment
charset = msg.get_content_charset()
body = msg.get_payload(decode=True).decode(charset or 'utf-8').replace("\r", "")
begin_search = re.search(r'\(BEGIN\s+IANA\s+(LAST\s+CALL\s+)?COMMENTS?(\s*:\s*[a-zA-Z0-9-\.]*)?\s*\)',body)
end_search = re.search(r'\(END\s+IANA\s+(LAST\s+CALL\s+)?COMMENTS?\)',body)
if begin_search and end_search:
begin_string = begin_search.group(0)
end_string = end_search.group(0)
b = body.find(begin_string)
e = body.find(end_string)
comment = body[b + len(begin_string):e].strip()
embedded_name = strip_version_extension(find_document_name(begin_string) or "")
if embedded_name:
doc_name = embedded_name
else:
comment = ""
# strip leading IESG:
if comment.startswith("IESG:"):
comment = comment[len("IESG:"):].lstrip()
# strip ending Thanks, followed by signature
m = re.compile(r"^Thanks,\n\n", re.MULTILINE).search(comment)
if m:
comment = comment[:m.start()].rstrip()
m = re.search(r"<(.*)>", msg["From"])
if m:
comment = '(Via %s): %s' % ( m.group(1).strip() , comment )
return doc_name, review_time, by, comment
def add_review_comment(doc_name, review_time, by, comment):
if comment:
try:
e = DocEvent.objects.get(doc__name=doc_name, time=review_time, type="iana_review")
except DocEvent.DoesNotExist:
doc = Document.objects.get(name=doc_name)
e = DocEvent(doc=doc, rev=doc.rev, time=review_time, type="iana_review")
e.desc = comment
e.by = by
e.save()
| [
"henrik@levkowetz.com"
] | henrik@levkowetz.com |
f3f172e7faa2052659c93cf8e54e19741ae5832d | fde77c384d1bd94b5c36f885a18411acd10b7719 | /Code_back/Curve_Functions.py | 18eec2758f60ae82ffdd0563ac7d0cfab3ee373e | [] | no_license | liprin1129/Project | 599654fa74948dfa9b2f26facb747a77b3187876 | b99acd00142985fe6e0d639026d87b2b4f6c4498 | refs/heads/master | 2020-04-06T07:05:51.345801 | 2016-09-06T12:43:44 | 2016-09-06T12:43:44 | 65,019,854 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 36,058 | py | import numpy as np
# from scipy.optimize import curve_fit
from scipy.optimize import least_squares
import matplotlib.pyplot as plt
#import DC_Pickle as dcp
############################################
## Polynomial Functioin
############################################
def polynomial_curve(x, w0, w1):
return w0 + w1*x
def polynomial_least(w, x, y):
return polynomial_curve(x, w[0], w[1]) - y
############################################
## Exponential Functioin
############################################
def exponential_curve2(x, a, b):
return a * np.exp(b * x)
def exponential_curve3(x, a, b, c):
return a * np.exp(b * -x) + c
def exponential_curve4(x, a, b, c, d):
return a * np.exp(b * -(x+d)) + c
def exponential_least2(w, x, y):
return exponential_curve2(x, w[0], w[1]) - y
def exponential_least3(w, x, y):
return exponential_curve3(x, w[0], w[1], w[2]) - y
def exponential_least4(w, x, y):
return exponential_curve4(x, w[0], w[1], w[2], w[3]) - y
############################################
## Powerlaw Functioin
############################################
def powerlaw_curve2(x, w0, w1):
return -w0 * (x)**(-w1) # -w0 * np.power((x), w1) + w2
def powerlaw_curve3(x, w0, w1, w2):
return -w0 * (x)**(-w1) + w2 # -w0 * np.power((x), w1) + w2
def powerlaw_curve4(x, w0, w1, w2, w3):
return -w0 * (x+w3)**(-w1) + w2 # -w0 * np.power((x), w1) + w2
def powerlaw_least2(w, x, y):
return powerlaw_curve2(x, w[0], w[1]) - y
def powerlaw_least3(w, x, y):
return powerlaw_curve3(x, w[0], w[1], w[2]) - y
def powerlaw_least4(w, x, y):
return powerlaw_curve4(x, w[0], w[1], w[2], w[3]) - y
############################################
## About single curve fitting
############################################
def cost_Function(true_y, pred_y):
diff = np.power((true_y - pred_y), 2)
cost = np.sum(diff)/2
return cost
def curve_Fitting(least_func, curve_func, x, y, seed, file_path, clt_num):
fig, ax = plt.subplots(1, 1, figsize=(6,4))
# popt, pcov = curve_fit(func, x, y, maxfev = 1000000)
'''
upper_bound = []
lower_bound = []
for i in range(len(pcov)):
upper_bound.append(popt[i] + pcov[i,i])
lower_bound.append(popt[i] - pcov[i,i])
'''
x_fit = np.linspace(0, 16, 100)
'''
if seed == 1:
lsq = least_squares(least_func, seed, args=(x, y))
y_mean = curve_func(x_fit, lsq.x)
cost = lsq.cost
'''
if len(seed) == 1:
lsq = least_squares(least_func, seed, args=(x, y))
y_mean = curve_func(x_fit, lsq.x)
cost = lsq.cost
if len(seed) == 2:
lsq = least_squares(least_func, seed, args=(x, y))
y_mean = curve_func(x_fit, lsq.x[0], lsq.x[1])
cost = lsq.cost
elif len(seed) == 3:
lsq = least_squares(least_func, seed, args=(x, y))
y_mean = curve_func(x_fit, lsq.x[0], lsq.x[1], lsq.x[2])
cost = lsq.cost
elif len(seed) == 4:
lsq = least_squares(least_func, seed, args=(x, y))
y_mean = curve_func(x_fit, lsq.x[0], lsq.x[1], lsq.x[2], lsq.x[3])
cost = lsq.cost
print(" - Curve Fitting Parameters: {0}".format(lsq.x))
print(" - Curve Fitting Cost: {0}\n".format(cost))
ax.plot(x, y, 'rx', label="average score")
ax.plot(x_fit, y_mean, 'b-', label="curve fitting")
'''
for i in range(len(x_fit)):
if i == 0:
ax.plot([x_fit[i], x_fit[i]], [y_lower[i], y_upper[i]], 'b-', label="variance")
else:
ax.plot([x_fit[i], x_fit[i]], [y_lower[i], y_upper[i]], 'b-')
'''
ax.set_ylim([0, max(y)+0.2])
ax.legend(fontsize=14)
ax.set_title("Cluster {0} (Cost {1})".format(clt_num, round(cost, 2)))
# ax.text(0.77, 0.03, "cost: {0}".format(round(cost, 2)), horizontalalignment='left', verticalalignment='bottom', transform=ax.transAxes, fontsize=15)
fig.savefig(file_path, dpi=100)
return lsq.x, cost
############################################
## About multipe curve fitting
############################################
'''
def multi_curveFitting_2(least_func, avg, seed, min_range=5):
cost = []
#param1 = np.ones((n_param, 300))
#param2 = np.ones((n_param, 300))
x_range = np.linspace(1, 300, 300)
for n in range( int(300/min_range) - 1) : # iteration for all data
# print("iteration ", n)
x1 = x_range[:min_range*(n+1)]
x2 = x_range[min_range*(n+1):]
#print('\n\n - x1:', x1)
#print(' - x2:', x2)
y1 = avg[:min_range*(n+1)]
y2 = avg[min_range*(n+1):]
lsq1 = least_squares(least_func, seed, args=(x1, y1))
lsq2 = least_squares(least_func, seed, args=(x2, y2))
#param1[:, n] = lsq1.x
#param2[:, n] = lsq2.x
cost.append(lsq1.cost+lsq2.cost)
idx = np.argmin(cost)
return min_range*(idx+1)#, param1[:, idx], param2[:, idx]
def multi_curveFitting_3(least_func, avg, seed, min_range=5):
cost = []
break_point2 = []
#idx_mid2 = [] # save idx2(second change)
x_range = np.linspace(1, 300, 300)
end1 = 0
end2 = 0
first_idx = []
for n in range(int(300/min_range) - 2): # iteration for all data
# print("\n - iter{0}".format(n))
x_idx = 0
if x_idx == 0:
end1 = min_range*(n+1) # caculate the first range limit
x1 = x_range[:end1]
y1 = avg[:end1]
lsq1 = least_squares(least_func, seed, args=(x1, y1))
#print('x1', x1)
first_idx.append(end1)
second_idx = []
second_cost = []
for j in range(int( (300-(min_range*(n+2))) / min_range) ): # iteration for 2nd and 3rd x_range
# print("iter {0}-{1}".format(n, j))
end2 = min_range*(j+1) + end1 # caculate the second range limit
x2 = x_range[end1:end2]
y2 = avg[end1:end2]
lsq2 = least_squares(least_func, seed, args=(x2, y2))
#print('x2', x2)
x3 = x_range[end2:]
y3 = avg[end2:]
lsq3 = least_squares(least_func, seed, args=(x3, y3))
#print('x3', x3)
second_idx.append(end2) # save 2nd break points
second_cost.append(lsq1.cost + lsq2.cost + lsq3.cost) # save costs
break_point2.append(second_idx[np.argmin(second_cost)]) # get index where cost of remained x_ranges is minimum
cost.append(second_cost) # save costs
point1 = np.argmin(cost) # get array index of cost is minimum
point2 = break_point2[point1] # get index of 2nd break point
return min_range*(point1+1), point2
def multi_curveFitting_4(least_func, avg, seed, min_range=5):
cost = []
break_point2 = []
#idx_mid2 = [] # save idx2(second change)
x_range = np.linspace(1, 300, 300)
end1 = 0
end2 = 0
first_idx = []
for n in range(int(300/min_range) - 2): # iteration for all data
# print("\n - iter{0}".format(n))
x_idx = 0
if x_idx == 0:
end1 = min_range*(n+1) # caculate the first range limit
x1 = x_range[:end1]
y1 = avg[:end1]
lsq1 = least_squares(least_func, seed, args=(x1, y1))
#print('x1', x1)
first_idx.append(end1)
second_idx = []
second_cost = []
for j in range(int( (300-(min_range*(n+2))) / min_range) ): # iteration for 2nd and 3rd x_range
# print("iter {0}-{1}".format(n, j))
end2 = min_range*(j+1) + end1 # caculate the second range limit
x2 = x_range[end1:end2]
y2 = avg[end1:end2]
lsq2 = least_squares(least_func, seed, args=(x2, y2))
#print('x2', x2)
x3 = x_range[end2:]
y3 = avg[end2:]
lsq3 = least_squares(least_func, seed, args=(x3, y3))
#print('x3', x3)
second_idx.append(end2) # save 2nd break points
second_cost.append(lsq1.cost + lsq2.cost + lsq3.cost) # save costs
break_point2.append(second_idx[np.argmin(second_cost)]) # get index where cost of remained x_ranges is minimum
cost.append(second_cost) # save costs
point1 = np.argmin(cost) # get array index of cost is minimum
point2 = break_point2[point1] # get index of 2nd break point
return min_range*(point1+1), point2
'''
def curve_Matrix(y_data, least_func, seed=[1,1], window=10, piece=4):
## set initial x ranges
x_range = np.linspace(1, 300, 300)
y_range = y_data
for i in range(piece):
if i < piece-1:
locals()["x{0}".format(i)] = x_range[i*window:window*(i+1)]
locals()["y{0}".format(i)] = y_range[i*window:window*(i+1)]
else:
locals()["x{0}".format(i)] = x_range[i*window:]
locals()["y{0}".format(i)] = y_range[i*window:]
# print("x:[{0}, {1}]".format(eval("x{0}".format(i))[0], eval("x{0}".format(i))[-1]))
# print("y:[{0}, {1}]".format(eval("y{0}".format(i))[0], eval("y{0}".format(i))[-1]))
## make matrix
count = 0
mat_iter = len(eval("x{0}".format(piece-1)))
while(mat_iter>window):
mat_iter = mat_iter-window
#print(mat_iter)
count = count+1
err_matrix = np.zeros([piece, count+1])
idx_matrix = np.zeros([piece, count+1])
len_matrix = np.zeros([piece, count+1])
#print(np.shape(err_matrix))
## change window for pieces except first pieces
for group in range(piece-1):
#print("\n\n - pieace ", piece-group-1)
#print(" - x{0}:\n".format(piece-group-1), "[{0}, {1}]".format(eval("x{0}".format(piece-group-1))[0], eval("x{0}".format(piece-group-1))[-1]))
partition = 0
## save matrix
lsq = least_squares(least_func, seed, args=(eval("x{0}".format(piece-group-1)), eval("y{0}".format(piece-group-1)))) # function fitting
err_matrix[piece-group-1, partition] = lsq.cost # save cost
idx_matrix[piece-group-1, partition] = eval("x{0}".format(piece-group-1))[0]#len(eval("x{0}".format(piece-group-1))) # save x values
len_matrix[piece-group-1, partition] = len(eval("x{0}".format(piece-group-1))) # save x length
#print("cost!!:", lsq.cost)
while( len(eval("x{0}".format(piece-group-1))-window) > window):
locals()["x{0}".format(piece-group-1)] = eval("x{0}".format(piece-group-1))[window:] # 마지막 piece의 첫번째를 window만큼 더한다.
locals()["y{0}".format(piece-group-1)] = eval("y{0}".format(piece-group-1))[window:] # 마지막 piece의 첫번째를 window만큼 더한다.
#print("[{0}, {1}]".format(eval("x{0}".format(piece-group-1))[0], eval("x{0}".format(piece-group-1))[-1]))
partition = partition+1
## save matrix
lsq = least_squares(least_func, seed, args=(eval("x{0}".format(piece-group-1)), eval("y{0}".format(piece-group-1)))) # function fitting
err_matrix[piece-group-1, partition] = lsq.cost # save cost
idx_matrix[piece-group-1, partition] = eval("x{0}".format(piece-group-1))[0]# len(eval("x{0}".format(piece-group-1))) # save x values
len_matrix[piece-group-1, partition] = len(eval("x{0}".format(piece-group-1))) # save x length
#print("cost!!:", lsq.cost)
end = eval("x{0}".format(piece-group-1))[0]
#print("end: ", end)
locals()["x{0}".format(piece-group-2)] = x_range[(piece-group-2)*window:int(end-1)] #처음의 값을 window만큼 이동
locals()["y{0}".format(piece-group-2)] = y_range[(piece-group-2)*window:int(end-1)] #처음의 값을 window만큼 이동
## change window for first piece
i = 0
#print("\n\n - piece 0")
#print(" - x0:\n", "[{0}, {1}]".format(eval("x0")[0], eval("x0")[-1]))
## save matrix
lsq = least_squares(least_func, seed, args=(eval("x{0}".format(piece-group-1)), eval("y{0}".format(piece-group-1)))) # function fitting
err_matrix[0, count] = lsq.cost # save cost
idx_matrix[0, count] = eval("x0")[0]# len(eval("x0"))
len_matrix[0, count] = len(eval("x0"))
while( len(eval("x0"))-window):
end = eval("x0")[-1]
locals()["x0"] = eval("x0")[:int(end-window)] # 마지막 piece의 첫번째를 window만큼 더한다.
locals()["y0"] = eval("y0")[:int(end-window)] # 마지막 piece의 첫번째를 window만큼 더한다.
#print("[{0}, {1}]".format(eval("x0")[0], eval("x0")[-1]))
i = i+1
## save matrix
lsq = least_squares(least_func, seed, args=(eval("x0"), eval("y0"))) # function fitting
err_matrix[0, count-i] = lsq.cost # save cost
idx_matrix[0, count-i] = eval("x0")[0]
len_matrix[0, count-i] = len(eval("x0"))
#print("cost!!:", lsq.cost)
# print(err_matrix)
return idx_matrix, err_matrix, len_matrix
def curve2_Fitting(idxM, lenM, errM):
groups, mat_iter = np.shape(lenM)
pre_cost = np.nan # an argument to check successive cost
min_cost = float('nan') # minimum cost
min_l = [np.nan]*groups # minimum length
min_bp = [np.nan]*groups # minimum break points
count = 0
## 1st piece
for i1 in range(mat_iter):
# print('iter ', i1)
p1 = idxM[0, i1]
l1 = lenM[0, i1]
c1 = errM[0, i1]
## 2nd piece
for i2 in range(mat_iter):
p2 = idxM[1, i2]
l2 = lenM[1, i2]
c2 = errM[1, i2]
## check whether range is 300 and index is valid
true_range = l1+l2
if (true_range==300) and (p1<p2):
sum_cost = c1+c2
## check a minimum cost
if count==0 or pre_cost > sum_cost:
#print("indd!!!!")
min_cost = sum_cost
min_l = l1, l2
min_bp = p1, p2
count = count+1
pre_cost = sum_cost
# print("min_cost: ", min_cost, "at {0} with {1}".format(min_bp, min_l))
return min_cost, min_l, min_bp
def curve3_Fitting(idxM, lenM, errM):
groups, mat_iter = np.shape(lenM)
pre_cost = np.nan # an argument to check successive cost
min_cost = float('nan') # minimum cost
min_l = [np.nan]*groups # minimum length
min_bp = [np.nan]*groups # minimum break points
count = 0
## 1st piece
for i1 in range(mat_iter):
# print('iter ', i1)
p1 = idxM[0, i1]
l1 = lenM[0, i1]
c1 = errM[0, i1]
## 2nd piece
for i2 in range(mat_iter):
p2 = idxM[1, i2]
l2 = lenM[1, i2]
c2 = errM[1, i2]
## 3rd piece
for i3 in range(mat_iter):
p3 = idxM[2, i3]
l3 = lenM[2, i3]
c3 = errM[2, i3]
## check whether range is 300 and index is valid
true_range = l1+l2+l3
if (true_range==300) and (p1<p2<p3):#bp_checker.all() == False):
#print(bp)
sum_cost = c1+c2+c3
## check a minimum cost
if count==0 or pre_cost > sum_cost:
#print("indd!!!!", bp)
min_cost = sum_cost
min_l = l1, l2, l3
min_bp = p1, p2, p3
count = count+1
pre_cost = sum_cost
# print("min_cost: ", min_cost, "at {0} with {1}".format(min_bp, min_l))
return min_cost, min_l, min_bp
def curve4_Fitting(idxM, lenM, errM):
groups, mat_iter = np.shape(lenM)
pre_cost = np.nan # an argument to check successive cost
min_cost = float('nan') # minimum cost
min_l = [np.nan]*groups # minimum length
min_bp = [np.nan]*groups # minimum break points
count = 0
## 1st piece
for i1 in range(mat_iter):
# print('iter ', i1)
p1 = idxM[0, i1]
l1 = lenM[0, i1]
c1 = errM[0, i1]
## 2nd piece
for i2 in range(mat_iter):
p2 = idxM[1, i2]
l2 = lenM[1, i2]
c2 = errM[1, i2]
## 3rd piece
for i3 in range(mat_iter):
p3 = idxM[2, i3]
l3 = lenM[2, i3]
c3 = errM[2, i3]
## 4th piece
for i4 in range(mat_iter):
p4 = idxM[3, i4]
l4 = lenM[3, i4]
c4 = errM[3, i4]
## check whether range is 300 and index is valid
true_range = l1+l2+l3+l4
if (true_range==300) and (p1<p2<p3<p4):
sum_cost = c1+c2+c3+c4
## check a minimum cost
if count==0 or pre_cost > sum_cost:
#print("indd!!!!")
min_cost = sum_cost
min_l = l1, l2, l3, l4
min_bp = p1, p2, p3, p4
count = count+1
pre_cost = sum_cost
# print("min_cost: ", min_cost, "at {0} with {1}".format(min_bp, min_l))
return min_cost, min_l, min_bp
def curve5_Fitting(idxM, lenM, errM):
groups, mat_iter = np.shape(lenM)
pre_cost = np.nan # an argument to check successive cost
min_cost = float('nan') # minimum cost
min_l = [np.nan]*groups # minimum length
min_bp = [np.nan]*groups # minimum break points
count = 0
## 1st piece
for i1 in range(mat_iter):
# print('iter ', i1)
p1 = idxM[0, i1]
l1 = lenM[0, i1]
c1 = errM[0, i1]
## 2nd piece
for i2 in range(mat_iter):
p2 = idxM[1, i2]
l2 = lenM[1, i2]
c2 = errM[1, i2]
## 3rd piece
for i3 in range(mat_iter):
p3 = idxM[2, i3]
l3 = lenM[2, i3]
c3 = errM[2, i3]
## 4th piece
for i4 in range(mat_iter):
p4 = idxM[3, i4]
l4 = lenM[3, i4]
c4 = errM[3, i4]
## 4th piece
for i5 in range(mat_iter):
p5 = idxM[4, i5]
l5 = lenM[4, i5]
c5 = errM[4, i5]
## check whether range is 300 and index is valid
true_range = l1+l2+l3+l4+l5
if (true_range==300) and (p1<p2<p3<p4<p5):
sum_cost = c1+c2+c3+c4+c5
## check a minimum cost
if count==0 or pre_cost > sum_cost:
#print("indd!!!!")
min_cost = sum_cost
min_l = l1, l2, l3, l4, l5
min_bp = p1, p2, p3, p4, p5
count = count+1
pre_cost = sum_cost
# print("min_cost: ", min_cost, "at {0} with {1}".format(min_bp, min_l))
return min_cost, min_l, min_bp
def curve6_Fitting(idxM, lenM, errM):
groups, mat_iter = np.shape(lenM)
pre_cost = np.nan # an argument to check successive cost
min_cost = float('nan') # minimum cost
min_l = [np.nan]*groups # minimum length
min_bp = [np.nan]*groups # minimum break points
count = 0
## 1st piece
for i1 in range(mat_iter):
# print('iter ', i1)
p1 = idxM[0, i1]
l1 = lenM[0, i1]
c1 = errM[0, i1]
## 2nd piece
for i2 in range(mat_iter):
p2 = idxM[1, i2]
l2 = lenM[1, i2]
c2 = errM[1, i2]
## 3rd piece
for i3 in range(mat_iter):
p3 = idxM[2, i3]
l3 = lenM[2, i3]
c3 = errM[2, i3]
## 4th piece
for i4 in range(mat_iter):
p4 = idxM[3, i4]
l4 = lenM[3, i4]
c4 = errM[3, i4]
## 5th piece
for i5 in range(mat_iter):
p5 = idxM[4, i5]
l5 = lenM[4, i5]
c5 = errM[4, i5]
## 6th piece
for i6 in range(mat_iter):
p6 = idxM[5, i6]
l6 = lenM[5, i6]
c6 = errM[5, i6]
## check whether range is 300 and index is valid
true_range = l1+l2+l3+l4+l5+l6
if (true_range==300) and (p1<p2<p3<p4<p5<p6):
sum_cost = c1+c2+c3+c4+c5+c6
## check a minimum cost
if count==0 or pre_cost > sum_cost:
#print("indd!!!!")
min_cost = sum_cost
min_l = l1, l2, l3, l4, l5, l6
min_bp = p1, p2, p3, p4, p5, p6
count = count+1
pre_cost = sum_cost
# print("min_cost: ", min_cost, "at {0} with {1}".format(min_bp, min_l))
return min_cost, min_l, min_bp
def curve7_Fitting(idxM, lenM, errM):
groups, mat_iter = np.shape(lenM)
pre_cost = np.nan # an argument to check successive cost
min_cost = float('nan') # minimum cost
min_l = [np.nan]*groups # minimum length
min_bp = [np.nan]*groups # minimum break points
count = 0
## 1st piece
for i1 in range(mat_iter):
# print('iter ', i1)
p1 = idxM[0, i1]
l1 = lenM[0, i1]
c1 = errM[0, i1]
## 2nd piece
for i2 in range(mat_iter):
p2 = idxM[1, i2]
l2 = lenM[1, i2]
c2 = errM[1, i2]
## 3rd piece
for i3 in range(mat_iter):
p3 = idxM[2, i3]
l3 = lenM[2, i3]
c3 = errM[2, i3]
## 4th piece
for i4 in range(mat_iter):
p4 = idxM[3, i4]
l4 = lenM[3, i4]
c4 = errM[3, i4]
## 5th piece
for i5 in range(mat_iter):
p5 = idxM[4, i5]
l5 = lenM[4, i5]
c5 = errM[4, i5]
## 6th piece
for i6 in range(mat_iter):
p6 = idxM[5, i6]
l6 = lenM[5, i6]
c6 = errM[5, i6]
## 7th piece
for i7 in range(mat_iter):
p7 = idxM[6, i7]
l7 = lenM[6, i7]
c7 = errM[6, i7]
## check whether range is 300 and index is valid
true_range = l1+l2+l3+l4+l5+l6+l7
if (true_range==300) and (p1<p2<p3<p4<p5<p6<p7):
sum_cost = c1+c2+c3+c4+c5+c6+c7
## check a minimum cost
if count==0 or pre_cost > sum_cost:
#print("indd!!!!")
min_cost = sum_cost
min_l = l1, l2, l3, l4, l5, l6, l7
min_bp = p1, p2, p3, p4, p5, p6, p7
count = count+1
pre_cost = sum_cost
# print("min_cost: ", min_cost, "at {0} with {1}".format(min_bp, min_l))
return min_cost, min_l, min_bp
def curve8_Fitting(idxM, lenM, errM):
groups, mat_iter = np.shape(lenM)
pre_cost = np.nan # an argument to check successive cost
min_cost = float('nan') # minimum cost
min_l = [np.nan]*groups # minimum length
min_bp = [np.nan]*groups # minimum break points
count = 0
## 1st piece
for i1 in range(mat_iter):
# print('iter ', i1)
p1 = idxM[0, i1]
l1 = lenM[0, i1]
c1 = errM[0, i1]
## 2nd piece
for i2 in range(mat_iter):
p2 = idxM[1, i2]
l2 = lenM[1, i2]
c2 = errM[1, i2]
## 3rd piece
for i3 in range(mat_iter):
p3 = idxM[2, i3]
l3 = lenM[2, i3]
c3 = errM[2, i3]
## 4th piece
for i4 in range(mat_iter):
p4 = idxM[3, i4]
l4 = lenM[3, i4]
c4 = errM[3, i4]
## 5th piece
for i5 in range(mat_iter):
p5 = idxM[4, i5]
l5 = lenM[4, i5]
c5 = errM[4, i5]
## 6th piece
for i6 in range(mat_iter):
p6 = idxM[5, i6]
l6 = lenM[5, i6]
c6 = errM[5, i6]
## 7th piece
for i7 in range(mat_iter):
p7 = idxM[6, i7]
l7 = lenM[6, i7]
c7 = errM[6, i7]
## 8th piece
for i8 in range(mat_iter):
p8 = idxM[7, i8]
l8 = lenM[7, i8]
c8 = errM[7, i8]
## check whether range is 300 and index is valid
true_range = l1+l2+l3+l4+l5+l6+l7+l8
if (true_range==300) and (p1<p2<p3<p4<p5<p6<p7<p8):
sum_cost = c1+c2+c3+c4+c5+c6+c7+c8
## check a minimum cost
if count==0 or pre_cost > sum_cost:
#print("indd!!!!")
min_cost = sum_cost
min_l = l1, l2, l3, l4, l5, l6, l7, l8
min_bp = p1, p2, p3, p4, p5, p6, p7, p8
count = count+1
pre_cost = sum_cost
# print("min_cost: ", min_cost, "at {0} with {1}".format(min_bp, min_l))
return min_cost, min_l, min_bp
def curve9_Fitting(idxM, lenM, errM):
groups, mat_iter = np.shape(lenM)
pre_cost = np.nan # an argument to check successive cost
min_cost = float('nan') # minimum cost
min_l = [np.nan]*groups # minimum length
min_bp = [np.nan]*groups # minimum break points
count = 0
## 1st piece
for i1 in range(mat_iter):
# print('iter ', i1)
p1 = idxM[0, i1]
l1 = lenM[0, i1]
c1 = errM[0, i1]
## 2nd piece
for i2 in range(mat_iter):
p2 = idxM[1, i2]
l2 = lenM[1, i2]
c2 = errM[1, i2]
## 3rd piece
for i3 in range(mat_iter):
p3 = idxM[2, i3]
l3 = lenM[2, i3]
c3 = errM[2, i3]
## 4th piece
for i4 in range(mat_iter):
p4 = idxM[3, i4]
l4 = lenM[3, i4]
c4 = errM[3, i4]
## 5th piece
for i5 in range(mat_iter):
p5 = idxM[4, i5]
l5 = lenM[4, i5]
c5 = errM[4, i5]
## 6th piece
for i6 in range(mat_iter):
p6 = idxM[5, i6]
l6 = lenM[5, i6]
c6 = errM[5, i6]
## 7th piece
for i7 in range(mat_iter):
p7 = idxM[6, i7]
l7 = lenM[6, i7]
c7 = errM[6, i7]
## 8th piece
for i8 in range(mat_iter):
p8 = idxM[7, i8]
l8 = lenM[7, i8]
c8 = errM[7, i8]
## 9th piece
for i9 in range(mat_iter):
p9 = idxM[8, i9]
l9 = lenM[8, i9]
c9 = errM[8, i9]
## check whether range is 300 and index is valid
true_range = l1+l2+l3+l4+l5+l6+l7+l8+l9
if (true_range==300) and (p1<p2<p3<p4<p5<p6<p7<p8<p9):
sum_cost = c1+c2+c3+c4+c5+c6+c7+c8+c9
## check a minimum cost
if count==0 or pre_cost > sum_cost:
#print("indd!!!!")
min_cost = sum_cost
min_l = l1, l2, l3, l4, l5, l6, l7, l8, l9
min_bp = p1, p2, p3, p4, p5, p6, p7, p8, p9
count = count+1
pre_cost = sum_cost
# print("min_cost: ", min_cost, "at {0} with {1}".format(min_bp, min_l))
return min_cost, min_l, min_bp
def curve10_Fitting(idxM, lenM, errM):
groups, mat_iter = np.shape(lenM)
pre_cost = np.nan # an argument to check successive cost
min_cost = float('nan') # minimum cost
min_l = [np.nan]*groups # minimum length
min_bp = [np.nan]*groups # minimum break points
count = 0
## 1st piece
for i1 in range(mat_iter):
# print('iter ', i1)
p1 = idxM[0, i1]
l1 = lenM[0, i1]
c1 = errM[0, i1]
## 2nd piece
for i2 in range(mat_iter):
p2 = idxM[1, i2]
l2 = lenM[1, i2]
c2 = errM[1, i2]
## 3rd piece
for i3 in range(mat_iter):
p3 = idxM[2, i3]
l3 = lenM[2, i3]
c3 = errM[2, i3]
## 4th piece
for i4 in range(mat_iter):
p4 = idxM[3, i4]
l4 = lenM[3, i4]
c4 = errM[3, i4]
## 5th piece
for i5 in range(mat_iter):
p5 = idxM[4, i5]
l5 = lenM[4, i5]
c5 = errM[4, i5]
## 6th piece
for i6 in range(mat_iter):
p6 = idxM[5, i6]
l6 = lenM[5, i6]
c6 = errM[5, i6]
## 7th piece
for i7 in range(mat_iter):
p7 = idxM[6, i7]
l7 = lenM[6, i7]
c7 = errM[6, i7]
## 8th piece
for i8 in range(mat_iter):
p8 = idxM[7, i8]
l8 = lenM[7, i8]
c8 = errM[7, i8]
## 9th piece
for i9 in range(mat_iter):
p9 = idxM[8, i9]
l9 = lenM[8, i9]
c9 = errM[8, i9]
## 9th piece
for i10 in range(mat_iter):
p10 = idxM[9, i10]
l10 = lenM[9, i10]
c10 = errM[9, i10]
## check whether range is 300 and index is valid
true_range = l1+l2+l3+l4+l5+l6+l7+l8+l9+l10
if (true_range==300) and (p1<p2<p3<p4<p5<p6<p7<p8<p9<p10):
sum_cost = c1+c2+c3+c4+c5+c6+c7+c8+c9+c10
## check a minimum cost
if count==0 or pre_cost > sum_cost:
#print("indd!!!!")
min_cost = sum_cost
min_l = l1, l2, l3, l4, l5, l6, l7, l8, l9, l10
min_bp = p1, p2, p3, p4, p5, p6, p7, p8, p9, p10
count = count+1
pre_cost = sum_cost
# print("min_cost: ", min_cost, "at {0} with {1}".format(min_bp, min_l))
return min_cost, min_l, min_bp
def multCurve_Fitting(y, lf, s=[1, 1], w=50, p=3): # y=data, lf=least square functions, s=seed, w=window, p=piece
idx_matrix, err_matrix, len_matrix = curve_Matrix(y, lf, seed=s, window=w, piece=p)
if p == 2:
cost, min_length, min_indice = curve2_Fitting(idx_matrix, len_matrix, err_matrix)
elif p == 3:
cost, min_length, min_indice = curve3_Fitting(idx_matrix, len_matrix, err_matrix)
elif p == 4:
cost, min_length, min_indice = curve4_Fitting(idx_matrix, len_matrix, err_matrix)
elif p == 5:
cost, min_length, min_indice = curve5_Fitting(idx_matrix, len_matrix, err_matrix)
elif p == 6:
cost, min_length, min_indice = curve6_Fitting(idx_matrix, len_matrix, err_matrix)
elif p == 7:
cost, min_length, min_indice = curve7_Fitting(idx_matrix, len_matrix, err_matrix)
elif p == 8:
cost, min_length, min_indice = curve8_Fitting(idx_matrix, len_matrix, err_matrix)
elif p == 9:
cost, min_length, min_indice = curve9_Fitting(idx_matrix, len_matrix, err_matrix)
elif p == 10:
cost, min_length, min_indice = curve10_Fitting(idx_matrix, len_matrix, err_matrix)
return cost, min_length, min_indice
| [
"liprin1129@gmail.com"
] | liprin1129@gmail.com |
6360a786f13684708b89f005c7278215a84639df | 64ccbe2ce2176ecda231f20f1341af8368b30fd4 | /dnd/views.py | 827aab3f1ac43ce0f91eb5e89012df97ddfae18b | [] | no_license | dabslee/D-D5e-Tools | 4a6a4d4d228eba68d9750b1145ec3195dbd03db5 | 1f6f3d3b6ceac17d077ad2a29e24553e07447095 | refs/heads/master | 2023-08-15T17:27:12.336245 | 2021-10-09T02:26:48 | 2021-10-09T02:26:48 | 414,007,862 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 183 | py | from django.shortcuts import render
# Create your views here.
def home(request):
context = {
"current_page": "home"
}
return render(request, "home.html", context) | [
"brandon.sangmin.lee@gmail.com"
] | brandon.sangmin.lee@gmail.com |
10eeb1a0a39540332aac618a4f134f9b13a15a4b | bfb591a1d5166d3b56f2e8fb7f2aa52cdbfd4831 | /questions/q219_partition_equal_sum_subset/recursive.py | 6a32060facebff16a805ab0abcb96cf3f09ed2e2 | [
"MIT"
] | permissive | aadhityasw/Competitive-Programs | 8a5543995ae6ae2adae7ebaa9cb41b222d8372f1 | 3d233b665a3b2a9d0b1245f7de688b725d258b1c | refs/heads/master | 2022-01-16T19:36:48.332134 | 2022-01-03T18:14:45 | 2022-01-03T18:14:45 | 192,506,814 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,307 | py | class Solution:
def recursiveSearch(self, summ, n) :
if summ == self.required or summ == 0 :
return 1
if n < 0 :
return 0
if self.visited[summ][n] :
return 0
self.visited[summ][n] = True
return (
self.recursiveSearch(summ, n-1) or
self.recursiveSearch((summ - self.arr[n]), n-1) if summ > self.arr[n] else False
)
def equalPartition(self, N, arr):
self.arr = arr
# Find the sum of the array
summ = sum(arr)
# If the sum is odd, then return false
if summ % 2 != 0 :
return 0
# This is the number we need to obtain during the computation
self.required = summ // 2
# Form the DP table
self.visited = [[False for _ in range(N)] for _ in range(summ+1)]
# Perform a Recursive search
return self.recursiveSearch(summ, N-1)
if __name__ == '__main__':
t = int(input())
for _ in range(t):
N = int(input())
arr = input().split()
for it in range(N):
arr[it] = int(arr[it])
ob = Solution()
if (ob.equalPartition(N, arr) == 1):
print("YES")
else:
print("NO")
| [
"aadhityas@gmail.com"
] | aadhityas@gmail.com |
c1c08ab9d53ffc2101a9a0097c30215b7a1559c5 | ef479ad92071445cf2478930094472f415e0410b | /08_mask_rcnn_inceptionv2/01_float32/03_weight_quantization.py | 25d757da7bc1a057144b1bfef373f076543e8bc2 | [
"MIT"
] | permissive | Chomolungma/PINTO_model_zoo | 6f379aaadf5ff6d42f859a6a80673c816de2c664 | df020dbdcfc4a79932da6885f0cb257ffc32e37b | refs/heads/master | 2021-04-23T04:58:02.213884 | 2020-03-20T10:59:12 | 2020-03-20T10:59:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 632 | py | import tensorflow as tf
### Tensorflow v2.1.0 - master - commit 8fdb834931fe62abaeab39fe6bf0bcc9499b25bf
# Weight Quantization - Input/Output=float32
converter = tf.lite.TFLiteConverter.from_saved_model('./saved_model')
converter.optimizations = [tf.lite.Optimize.OPTIMIZE_FOR_SIZE]
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS, tf.lite.OpsSet.SELECT_TF_OPS]
tflite_quant_model = converter.convert()
with open('./mask_rcnn_inception_v2_coco_800_weight_quant.tflite', 'wb') as w:
w.write(tflite_quant_model)
print("Weight Quantization complete! - mask_rcnn_inception_v2_coco_800_weight_quant.tflite")
| [
"rmsdh122@yahoo.co.jp"
] | rmsdh122@yahoo.co.jp |
f73a19e9239492bf90244981ed07ce1af89650e9 | 60693b05f465e6755cf0e6f3d0cf6c78c716559a | /StateM/StateSplit.py | 2ba95883eb54e944a29e40208a5f76bbc0eac9be | [] | no_license | WXW322/ProrocolReverseTool | 6fe842f3787fbcb2dfb3dbc8d1ece66fed5593e9 | a619b51243f0844ac7ef6561bfd68a1f0856f794 | refs/heads/master | 2020-08-21T19:00:19.799383 | 2019-10-21T06:51:47 | 2019-10-21T06:51:47 | 216,224,178 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 872 | py | from common.readdata import *
class StateSplit:
def __init__(self):
pass
def generateStates(self, messages):
result = []
for message in messages:
result.append((get_ip(message.source), message.data))
return result
def splitByDire(self, messages):
ipMessages = self.generateStates(messages)
splitResults = []
i = 0
sourceIp = ipMessages[0][0]
while(i < len(ipMessages)):
tempResult = []
while(i < len(ipMessages) and ipMessages[i][0] == sourceIp):
tempResult.append(ipMessages[i][1])
i = i + 1
while (i < len(ipMessages) and ipMessages[i][0] != sourceIp):
tempResult.append(ipMessages[i][1])
i = i + 1
splitResults.append(tempResult)
return splitResults
| [
"15895903730@163.com"
] | 15895903730@163.com |
11bc419b59eefd5c2b9fdaf4f7e22060ff2267a5 | dacf092e82b5cc841554178e5117c38fd0b28827 | /day24_program4/interface/admin_interface.py | 5372c7850010cca75f8f2bcefb6a71d576ef284a | [] | no_license | RainMoun/python_programming_camp | f9bbee707e7468a7b5d6633c2364f5dd75abc8a4 | f8e06cdd2e6174bd6986d1097cb580a6a3b7201f | refs/heads/master | 2020-04-15T11:27:09.680587 | 2019-04-06T02:21:14 | 2019-04-06T02:21:14 | 164,630,838 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 962 | py | import os
import socket
import time
from interface import common_interface
from conf import setting
def upload_file_interface(admin_name, file_path): # 将本地文件上传至服务器
if not os.path.isfile(file_path):
return False, '文件不存在'
file_path_lst = file_path.split('/')
file_name = file_path_lst[-1]
file_size = os.path.getsize(file_path)
file_md5 = common_interface.get_file_md5(file_path)
# file = models.File(file_name, file_size, file_md5)
sk = socket.socket()
sk.connect(setting.SERVER_ADDRESS)
file_info = 'post|%s|%s|%s|%s' % (admin_name, file_name, file_size, file_md5)
sk.sendall(file_info.encode('utf-8'))
time.sleep(1)
f = open(file_path, 'rb')
has_sent = 0
while has_sent != file_size:
data = f.read(1024)
sk.sendall(data)
has_sent += len(data)
f.close()
sk.close()
print('upload success')
return True, '文件传输成功'
| [
"775653143@qq.com"
] | 775653143@qq.com |
6ce9ad0a770aeb4a328cfe74078d9b317be7a312 | 4bd555bc662b8182a2e7644976bfdb00ed5e1ebe | /PythonistaAppTemplate/PythonistaKit.framework/pylib/site-packages/Crypto/Hash/SHA512.py | b55501ce9f84e5d5351c540fb37cad4426a90f13 | [] | no_license | fhelmli/homeNOWG2 | a103df1ef97194dec9501dbda87ec1f7c111fb4a | e794fd87b296544542fd9dc7ac94c981c6312419 | refs/heads/master | 2020-04-04T13:40:20.417769 | 2019-01-30T21:41:04 | 2019-01-30T21:41:04 | 155,970,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,537 | py | #\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
# -*- coding: utf-8 -*-
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""SHA-512 cryptographic hash algorithm.
SHA-512 belongs to the SHA-2_ family of cryptographic hashes.
It produces the 512 bit digest of a message.
>>> from Crypto.Hash import SHA512
>>>
>>> h = SHA512.new()
>>> h.update(b'Hello')
>>> print h.hexdigest()
*SHA* stands for Secure Hash Algorithm.
.. _SHA-2: http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf
"""
_revision__ = "$Id$"
__all__ = ['new', 'digest_size', 'SHA512Hash' ]
from Crypto.Util.py3compat import *
from Crypto.Hash.hashalgo import HashAlgo
try:
import hashlib
hashFactory = hashlib.sha512
except ImportError:
import _SHA512
hashFactory = _SHA512
class SHA512Hash(HashAlgo):
"""Class that implements a SHA-512 hash
:undocumented: block_size
"""
#: ASN.1 Object identifier (OID)::
#:
#: id-sha512 OBJECT IDENTIFIER ::= {
#: joint-iso-itu-t(2)
#: country(16) us(840) organization(1) gov(101) csor(3) nistalgorithm(4) hashalgs(2) 3
#: }
#:
#: This value uniquely identifies the SHA-512 algorithm.
oid = b('\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x03')
digest_size = 64
block_size = 128
def __init__(self, data=None):
HashAlgo.__init__(self, hashFactory, data)
def new(self, data=None):
return SHA512Hash(data)
def new(data=None):
"""Return a fresh instance of the hash object.
:Parameters:
data : byte string
The very first chunk of the message to hash.
It is equivalent to an early call to `SHA512Hash.update()`.
Optional.
:Return: A `SHA512Hash` object
"""
return SHA512Hash().new(data)
#: The size of the resulting hash in bytes.
digest_size = SHA512Hash.digest_size
#: The internal block size of the hash algorithm in bytes.
block_size = SHA512Hash.block_size
| [
"tberk@gmx.at"
] | tberk@gmx.at |
32d8dc9ef7307c6d3118b4af34978bf943b77ba4 | 951e433b25a25afeea4d9b45994a57e0a6044144 | /NowCoder/拼多多_回合制游戏.py | b9c5f768b6df142db72c09e965dc9fcc88c5528e | [] | no_license | EricaEmmm/CodePython | 7c401073e0a9b7cd15f9f4a553f0aa3db1a951a3 | d52aa2a0bf71b5e7934ee7bff70d593a41b7e644 | refs/heads/master | 2020-05-31T14:00:34.266117 | 2019-09-22T09:48:23 | 2019-09-22T09:48:23 | 190,318,878 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,350 | py | # 回合制游戏
# 你在玩一个回合制角色扮演的游戏。现在你在准备一个策略,以便在最短的回合内击败敌方角色。在战斗开始时,敌人拥有HP格血量。
# 当血量小于等于0时,敌人死去。一个缺乏经验的玩家可能简单地尝试每个回合都攻击。但是你知道辅助技能的重要性。
# 在你的每个回合开始时你可以选择以下两个动作之一:聚力或者攻击。
# 聚力会提高你下个回合攻击的伤害。
# 攻击会对敌人造成一定量的伤害。如果你上个回合使用了聚力,那这次攻击会对敌人造成buffedAttack点伤害。否则,会造成normalAttack点伤害。
# 给出血量HP和不同攻击的伤害,buffedAttack和normalAttack,返回你能杀死敌人的最小回合数。
#
# 输入描述:
# 第一行是一个数字HP
# 第二行是一个数字normalAttack
# 第三行是一个数字buffedAttack
# 1 <= HP,buffedAttack,normalAttack <= 10^9
# 输出描述:
# 输出一个数字表示最小回合数
#
# 输入例子1:
# 13
# 3
# 5
# 输出例子1:
# 5
# buff=0,表示上个回合攻击了,这个回合只能normalAttack
# buff=1,表示上个回合聚力了,这个回合只能normalAttack
def helper(HP, normalAttack, buffedAttack, buff):
if HP <= 0:
return
if buff:
return min(helper(HP-normalAttack), normalAttack, buffedAttack, 1)
if __name__ == '__main__':
HP = int(input())
normalAttack = int(input())
buffedAttack = int(input())
if normalAttack >= HP:
print(1)
res = 0
if normalAttack * 2 >= buffedAttack: # 当蓄力的伤害不高于普通伤害的二倍的时候,全用普通伤害
# while HP > 0:
# res += 1
# HP -= normalAttack
# print(res)
res = HP // normalAttack
if HP-res*normalAttack > 0:
print(res+1)
else:
print(res)
else:
# while HP > normalAttack:
# res += 2
# HP -= buffedAttack
# if HP <= 0:
# print(res)
# else:
# print(res+1)
res = HP // buffedAttack
if HP-res*buffedAttack > normalAttack:
print(2*(res+1))
elif HP-res*buffedAttack > 0:
print(2*res+1)
else:
print(2*res)
| [
"1016920795@qq.com"
] | 1016920795@qq.com |
dffaae8ce39b4f30265ffa47cde93f3253f1e1f9 | 977c82ec23f2f8f2b0da5c57984826e16a22787d | /src/IceRayPy/core/material/instruction/label/color/temp.py | 13f937e7410e04a579f1748b267fc79c264470c5 | [
"MIT-0"
] | permissive | dmilos/IceRay | 47ce08e2920171bc20dbcd6edcf9a6393461c33e | 84fe8d90110c5190c7f58c4b2ec3cdae8c7d86ae | refs/heads/master | 2023-04-27T10:14:04.743094 | 2023-04-20T14:33:45 | 2023-04-20T15:07:18 | 247,471,987 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 24 | py | _BEGIN = 36
_END = 72 | [
"dmilos@gmail.com"
] | dmilos@gmail.com |
74f00f42f7ad17c83df242a9a67d18dd57e80f6d | 146143036ef6a0bf2346eef9881db3ac85059008 | /kdl_wagtail/people/migrations/0002_rename_field_summary_on_person_to_introduction.py | 04f158bb7929b972babedece17961f9fae723c7f | [
"MIT"
] | permissive | kingsdigitallab/django-kdl-wagtail | bc5a26420fcef3a25c6a42b0c5bb5f8643d32951 | 457623a35057f88ee575397ac2c68797f35085e1 | refs/heads/master | 2023-01-06T02:43:48.073289 | 2021-10-08T19:25:56 | 2021-10-08T19:25:56 | 168,135,935 | 3 | 0 | MIT | 2022-12-26T20:39:03 | 2019-01-29T10:27:05 | Python | UTF-8 | Python | false | false | 370 | py | # Generated by Django 2.1.5 on 2019-01-31 10:04
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('kdl_wagtail_people', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='person',
old_name='summary',
new_name='introduction',
),
]
| [
"jmvieira@gmail.com"
] | jmvieira@gmail.com |
e8db7174a5e58d829efa2ed8646b01398663afb2 | 1b8d162160f5ab6d6a6b8940b8ab83b482abb409 | /tests/aggregation/test_max.py | d32d10c32315d5359c79abb9b11b2e441941fdc9 | [
"Apache-2.0"
] | permissive | jlinn/pylastica | f81e438a109dfe06adc7e9b70fdf794c5d01a53f | 0fbf68ed3e17d665e3cdf1913444ebf1f72693dd | refs/heads/master | 2020-05-19T14:07:38.794717 | 2014-07-23T23:43:00 | 2014-07-23T23:43:00 | 10,442,284 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,063 | py | from pylastica.aggregation.max import Max
from pylastica.document import Document
from pylastica.query.query import Query
from pylastica.script import Script
from tests.base import Base
__author__ = 'Joe Linn'
import unittest
class MaxTest(unittest.TestCase, Base):
def setUp(self):
super(MaxTest, self).setUp()
self._index = self._create_index("test_aggregation_max")
docs = [
Document("1", {"price": 5}),
Document("2", {"price": 8}),
Document("3", {"price": 1}),
Document("4", {"price": 3})
]
self._index.get_doc_type("test").add_documents(docs)
self._index.refresh()
def tearDown(self):
super(MaxTest, self).tearDown()
self._index.delete()
def test_to_dict(self):
expected = {
"max": {
"field": "price",
"script": "_value * conversion_rate",
"params": {
"conversion_rate": 1.2
}
},
"aggs": {
"subagg": {"max": {"field": "foo"}}
}
}
agg = Max("min_price_in_euros")
agg.set_field("price")
agg.set_script(Script("_value * conversion_rate", {"conversion_rate": 1.2}))
agg.add_aggregation(Max("subagg").set_field("foo"))
self.assertEqual(expected, agg.to_dict())
def test_max_aggregation(self):
agg = Max("min_price")
agg.set_field("price")
query = Query()
query.add_aggregation(agg)
results = self._index.get_doc_type("test").search(query).aggregations["min_price"]
self.assertAlmostEqual(8, results["value"])
# test using a script
agg.set_script(Script("_value * conversion_rate", {"conversion_rate": 1.2}))
query = Query()
query.add_aggregation(agg)
results = self._index.get_doc_type("test").search(query).aggregations["min_price"]
self.assertEqual(8 * 1.2, results["value"])
if __name__ == '__main__':
unittest.main()
| [
"joe@venturocket.com"
] | joe@venturocket.com |
938d2387acdadda18fcff71135fa9bb41e9d267b | 232411e5462f43b38abd3f0e8078e9acb5a9fde7 | /build/rotors_description/catkin_generated/pkg.develspace.context.pc.py | d9fa71b7cb86e4b8eb08039501fa05b1632357d6 | [] | no_license | Fengaleng/feng_ws | 34d8ee5c208f240c24539984bc9d692ddf1808f9 | a9fa2e871a43146a3a85a0c0a9481793539654eb | refs/heads/main | 2023-03-29T17:25:42.110485 | 2021-03-30T14:50:24 | 2021-03-30T14:50:24 | 353,034,507 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 406 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "rotors_description"
PROJECT_SPACE_DIR = "/home/fechec/feng_ws/devel/.private/rotors_description"
PROJECT_VERSION = "2.2.3"
| [
"chenfy1998@gmail.com"
] | chenfy1998@gmail.com |
b35f61492670ffc4e2044973d754b52e2c12fc65 | 4e30c855c253cc1d972d29e83edb9d5ef662d30a | /invoice/models.py | 10e28aa5941aea567f4bca34e53e910b3c7e20b5 | [
"MIT"
] | permissive | rajeshr188/django-onex | 8b531fc2f519d004d1da64f87b10ffacbd0f2719 | 0a190ca9bcf96cf44f7773686205f2c1f83f3769 | refs/heads/master | 2023-08-21T22:36:43.898564 | 2023-08-15T12:08:24 | 2023-08-15T12:08:24 | 163,012,755 | 2 | 0 | NOASSERTION | 2023-07-22T09:47:28 | 2018-12-24T17:46:35 | Python | UTF-8 | Python | false | false | 541 | py | from django.db import models
from django.utils import timezone
# Create your models here.
class voucher(models.Model):
pass
class PaymentTerm(models.Model):
name = models.CharField(max_length=30)
description = models.TextField()
due_days = models.PositiveSmallIntegerField()
discount_days = models.PositiveSmallIntegerField()
discount = models.DecimalField(max_digits=10, decimal_places=2)
class Meta:
ordering = ("due_days",)
def __str__(self):
return f"{self.name} ({self.due_days})"
| [
"rajeshrathodh@gmail.com"
] | rajeshrathodh@gmail.com |
f80f0be06fc4b0b545009feebda90b8c1dfed845 | ea48ef0588c104e49a7ebec5bd8dc359fdeb6674 | /mini_web_frame/deme/django_orm模型.py | 4f0825357545e2d50dbb4344b17e936e298facf0 | [] | no_license | Jizishuo/django--text | c0d58d739ef643c7f3793fbead19302778670368 | 152a5c99e7a16a75fda2f1f85edcfdce9274c9c2 | refs/heads/master | 2020-04-01T10:39:18.131551 | 2018-11-18T13:31:59 | 2018-11-18T13:31:59 | 153,125,799 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,053 | py | '''
class ModelMetaclass(type):
def __new__(cls, name, bases, attrs): #name=user,bases=(),attrs={'uid':(..)...}
mappings = dict()
#判断是否需要保存
#例如type('AAA', (), {'num ':1, 'num2':2}) 创建类
for k, v in attrs.items():
if isinstance(v, tuple):
#print("mappings %s--->%s" % (k, v))
mappings[k] = v
#删除这些存在字典中的属性
for k in mappings.keys():
attrs.pop(k)
#将之前的uid/name..以及对应的对象引用,类名
attrs['__mappings__'] = mappings #雷属性与列名字的映射关系
attrs['__table__'] = name
#tuple(attrs)
return type.__new__(cls, name, bases, attrs)
class User(metaclass=ModelMetaclass):
uid = ('uid', 'int unsigned')
name = ('username', 'varchar(30)')
email = ('email', 'varchar(30)')
password = ('password', 'varchar(30)')
#经过modelmetaclas后变
#__mappings__ = {
#'uid' : ('uid', 'int unsigned'),
#'name' : ('username', 'varchar(30)'),
#'email' : ('email', 'varchar(30)'),
#'password' : ('password', 'varchar(30)'),
#}
#__table__ = 'User'
def __init__(self, **kwargs):
for name, value in kwargs.items():
setattr(self, name, value)
#uid=123 name=uid,value=123的name=value...
def save(self):
fields = []
args = []
for k, v in self.__mappings__.items():
fields.append(v[0])
args.append(getattr(self, k, None))
#这个不完美
#sql = 'insert into %s (%s) values (%s)' % (self.__table__, ','.join(fields), ','.join([str(i) for i in args]))
# insert into User (uid,username,email,password) values (123,root,xxx@xxx,xxxx)
args_temp = list()
for temp in args: #['123', "'root'", "'xxx@xxx'", "'xxxx'"]
#判断插入的如果是数字
if isinstance(temp, int):
args_temp.append(str(temp))
elif isinstance(temp, str):
args_temp.append("""'%s'""" % temp)
print(args_temp)
sql = 'insert into %s (%s) values (%s)' % (self.__table__, ','.join(fields), ','.join(args_temp))
print(sql)
#insert into User (uid,username,email,password) values (123,'root','xxx@xxx','xxxx')
u = User(uid=123, name='root', email='xxx@xxx', password='xxxx')
u.save()
'''
class ModelMetaclass(type):
def __new__(cls, name, bases, attrs): #name=user,bases=(),attrs={'uid':(..)...}
mappings = dict()
#判断是否需要保存
#例如type('AAA', (), {'num ':1, 'num2':2}) 创建类
for k, v in attrs.items():
if isinstance(v, tuple):
#print("mappings %s--->%s" % (k, v))
mappings[k] = v
#删除这些存在字典中的属性
for k in mappings.keys():
attrs.pop(k)
#将之前的uid/name..以及对应的对象引用,类名
attrs['__mappings__'] = mappings #雷属性与列名字的映射关系
attrs['__table__'] = name
#tuple(attrs)
return type.__new__(cls, name, bases, attrs)
class Model(metaclass=ModelMetaclass):
#经过modelmetaclas后变
#__mappings__ = {
#'uid' : ('uid', 'int unsigned'),
#'name' : ('username', 'varchar(30)'),
#'email' : ('email', 'varchar(30)'),
#'password' : ('password', 'varchar(30)'),
#}
#__table__ = 'User'
def __init__(self, **kwargs):
for name, value in kwargs.items():
setattr(self, name, value)
#uid=123 name=uid,value=123的name=value...
def save(self):
fields = []
args = []
for k, v in self.__mappings__.items():
fields.append(v[0])
args.append(getattr(self, k, None))
#这个不完美
#sql = 'insert into %s (%s) values (%s)' % (self.__table__, ','.join(fields), ','.join([str(i) for i in args]))
# insert into User (uid,username,email,password) values (123,root,xxx@xxx,xxxx)
args_temp = list()
for temp in args: #['123', "'root'", "'xxx@xxx'", "'xxxx'"]
#判断插入的如果是数字
if isinstance(temp, int):
args_temp.append(str(temp))
elif isinstance(temp, str):
args_temp.append("""'%s'""" % temp)
print(args_temp)
sql = 'insert into %s (%s) values (%s)' % (self.__table__, ','.join(fields), ','.join(args_temp))
print(sql)
#insert into User (uid,username,email,password) values (123,'root','xxx@xxx','xxxx')
class User(Model):
uid = ('uid', 'int unsigned')
name = ('username', 'varchar(30)')
email = ('email', 'varchar(30)')
password = ('password', 'varchar(30)')
u = User(uid=123, name='root', email='xxx@xxx', password='xxxx')
u.save() | [
"948369894@qq.com"
] | 948369894@qq.com |
7744e21b09e713d7eb3c26adcc7d76080eb9c847 | 620323fc090cebaf7aca456ff3f7fbbe1e210394 | /qt__pyqt__pyside__pyqode/example_QStackedWidget/use_listwidget.py | df42105e6af3e29d463d9aa479d1ebed22cc55a4 | [
"CC-BY-4.0"
] | permissive | gil9red/SimplePyScripts | bd2733372728bf9b9f00570e90316fa12116516b | 773c2c9724edd8827a1dbd91694d780e03fcb05a | refs/heads/master | 2023-08-31T04:26:09.120173 | 2023-08-30T17:22:59 | 2023-08-30T17:22:59 | 22,650,442 | 157 | 46 | null | 2023-09-08T17:51:33 | 2014-08-05T16:19:52 | Python | UTF-8 | Python | false | false | 1,025 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = "ipetrash"
from PyQt5.QtWidgets import (
QApplication,
QHBoxLayout,
QWidget,
QLabel,
QStackedWidget,
QListWidget,
)
class MainWindow(QWidget):
def __init__(self):
super().__init__()
self.stacked_widget = QStackedWidget()
self.stacked_widget.addWidget(QLabel("1234"))
self.stacked_widget.addWidget(QLabel("ABCD"))
self.stacked_widget.addWidget(QLabel("FOO_BAR"))
self.control_list = QListWidget()
self.control_list.addItems(["1234", "ABCD", "FOO_BAR"])
self.control_list.setFixedWidth(80)
self.control_list.clicked.connect(
lambda index: self.stacked_widget.setCurrentIndex(index.row())
)
main_layout = QHBoxLayout(self)
main_layout.addWidget(self.control_list)
main_layout.addWidget(self.stacked_widget)
if __name__ == "__main__":
app = QApplication([])
mw = MainWindow()
mw.show()
app.exec()
| [
"ilya.petrash@inbox.ru"
] | ilya.petrash@inbox.ru |
f9aba00ebbe58ef41e41b7374c770514fad67a28 | 6dedbcff0af848aa979574426ad9fa3936be5c4a | /cengal/parallel_execution/coroutines/coro_standard_services/shutdown_on_keyboard_interrupt/versions/v_0/__init__.py | 5e2395584c9ba7589876333ee84c164b51c2af0c | [
"Apache-2.0"
] | permissive | FI-Mihej/Cengal | 558d13541865e22006431bd1a1410ad57261484a | d36c05f4c90dfdac7296e87cf682df2f4d367e4b | refs/heads/master | 2023-06-08T00:39:39.414352 | 2023-06-05T21:35:50 | 2023-06-05T21:35:50 | 68,829,562 | 10 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,252 | py | #!/usr/bin/env python
# coding=utf-8
# Copyright © 2012-2023 ButenkoMS. All rights reserved. Contacts: <gtalk@butenkoms.space>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .shutdown_on_keyboard_interrupt import *
"""
Module Docstring
Docstrings: http://www.python.org/dev/peps/pep-0257/
"""
__author__ = "ButenkoMS <gtalk@butenkoms.space>"
__copyright__ = "Copyright © 2012-2023 ButenkoMS. All rights reserved. Contacts: <gtalk@butenkoms.space>"
__credits__ = ["ButenkoMS <gtalk@butenkoms.space>", ]
__license__ = "Apache License, Version 2.0"
__version__ = "3.2.6"
__maintainer__ = "ButenkoMS <gtalk@butenkoms.space>"
__email__ = "gtalk@butenkoms.space"
# __status__ = "Prototype"
__status__ = "Development"
# __status__ = "Production"
| [
"gtalk@butenkoms.space"
] | gtalk@butenkoms.space |
8f0c91c576598b507ff7652c740be6f6b6462ec9 | 65d93b3db37f488356faa1789f1001f17191e345 | /isi_mip/climatemodels/migrations/0102_auto_20190408_1308.py | 9581823e6bd88e440f166ad4dfe4f5df2ee91833 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | ISI-MIP/isimip | b4a19310dd772356eef87259783084836107cf4a | c2a78c727337e38f3695031e00afd607da7d6dcb | refs/heads/master | 2021-09-14T15:42:14.453031 | 2021-05-25T09:33:45 | 2021-05-25T09:33:45 | 237,446,232 | 0 | 0 | MIT | 2020-01-31T14:27:04 | 2020-01-31T14:27:03 | null | UTF-8 | Python | false | false | 914 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-04-08 11:08
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('climatemodels', '0101_impactmodel_simulation_round_specific_description'),
]
operations = [
migrations.RemoveField(
model_name='impactmodel',
name='model_output_license',
),
migrations.AlterField(
model_name='baseimpactmodel',
name='short_description',
field=models.TextField(blank=True, default='', help_text='This short description should assist other researchers in getting an understanding of your model, including the main differences between model versions used for different ISIMIP simulation rounds.', null=True, verbose_name='Short model description (all rounds)'),
),
]
| [
"hi@brueck.io"
] | hi@brueck.io |
f56291670cf5bac55339f2575f4ddc76147d4946 | dee7bbdddeae675f27bce0c9b79d972026bf388b | /Django/p5_connection_project/p5_connection_project/wsgi.py | 1332f5638464991446970a47345707c8c06d7043 | [] | no_license | KevinMichaelCamp/CodingDojo-Python | 529815dcef102ef4b9bbb18d0ec572172de5d2c4 | 30674b52423e0724908b3ab9930d3d80c807581d | refs/heads/master | 2022-12-10T13:20:22.723313 | 2021-02-18T02:22:00 | 2021-02-18T02:22:00 | 151,661,213 | 0 | 1 | null | 2022-12-08T03:03:05 | 2018-10-05T02:24:57 | Python | UTF-8 | Python | false | false | 419 | py | """
WSGI config for p5_connection_project project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'p5_connection_project.settings')
application = get_wsgi_application()
| [
"kevinmichaelcamp@gmail.com"
] | kevinmichaelcamp@gmail.com |
b524bc653594cd96b5b8faf8c3e1de45836d35c0 | 3f788f1dcb325f403a21a3cc0f3a547b48a9492a | /packages/levylab_lib_lockin_multichannel/levylab_lib_lockin_multichannel-2.15.3.17.spec | 5d7a576be7e4b2f531fe10aedac52228675edd97 | [
"BSD-3-Clause"
] | permissive | levylabpitt/levylabpitt.github.io | e01fa402e7bbb59f4b99f979735021271f55d48b | 96f1e0d7064526ab5908e9a26721adf306331516 | refs/heads/master | 2023-08-24T15:46:32.342077 | 2023-08-10T04:28:08 | 2023-08-10T04:28:08 | 183,644,047 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 42,458 | spec | [Package]
Name="levylab_lib_lockin_multichannel"
Version="2.15.3.17"
Release=""
ID=2f46dc7080f325cb2a71fe3d40ec3287
File Format="vip"
Format Version="2017"
Display Name="Multichannel Lockin"
[Description]
Description="Multichannel Lockin for National Instruments' Dynamic Signal Acquisition hardware (4431, 4461, 4462).\0A- This version is configured to handle multiple cards for simultaneous, synchronized AI/AO.\0A- The analog outputs can be configured to output sine, square, sawtooth, or triangle functions with (swept) DC offsets.\0A- Each of the analog inputs can be demodulated at multiple frequencies."
Summary=""
License="BSD-3"
Copyright="Copyright (c) 2022, LevyLab"
Distribution=""
Vendor="LevyLab"
URL="https://github.com/levylabpitt/Multichannel-Lockin"
Packager="Patrick Irvin"
Demo="FALSE"
Release Notes="- Issue #63: Implement arbitrary sweep table support.\0A - Rudimentary tables can be created inside the UI (e.g., [0,1,2,0])\0A - More complicated arbitrary waveforms can be loaded via the API "setSweepTable" method\0A- Issue #64: implement output sampling rate to avoid memory overflow errors during sweep mode\0A- Issue #65: better support for USB-4431 Devices\0A- Fix issues with measuring offset; updated notch filter\0A- Updated waveguide model (AI1 gain)\0A- Miscellaneous bugfixes and UI improvements (add show/hide DAQ UI button)\0A- Update to Instrument Framework v1.12.5\0A\0A#### Dependencies\0A- Please ensure that [Visual C++ Redistributable Packages for Visual Studio 2013 (32 bit / x86)](https://www.microsoft.com/en-us/download/details.aspx?id=40784) is installed.\0A"
System Package="FALSE"
Sub Package="FALSE"
License Agreement="TRUE"
[LabVIEW]
close labview before install="FALSE"
restart labview after install="FALSE"
skip mass compile after install="FALSE"
[Platform]
Exclusive_LabVIEW_Version="LabVIEW x86>=19.0"
Exclusive_LabVIEW_System="ALL"
Exclusive_OS="ALL"
[Script VIs]
PreInstall=""
PostInstall=""
PreUninstall=""
PostUninstall=""
Verify=""
PreBuild=""
PostBuild=""
[Dependencies]
AutoReqProv=FALSE
Requires="levylab_lib_levylab_instruments>=1.12.5.3"
Conflicts=""
[Activation]
License File=""
Licensed Library=""
[Files]
Num File Groups="3"
Sub-Packages=""
Namespaces=""
[File Group 0]
Target Dir="<application>"
Replace Mode="Always"
Num Files=408
File 0="user.lib/LevyLab/Lockin-Multichannel/Multichannel Lockin.lvproj"
File 1="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Instrument.Lockin.AppLauncher.vi"
File 2="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Instrument.Lockin.lvclass"
File 3="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Instrument.Lockin.TestLauncher.vi"
File 4="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Process.vi"
File 5="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/Lockin.Dependencies--Cluster.ctl"
File 6="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/Lockin.getAllData--Cluster.ctl"
File 7="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/Offset Mode--Enum.ctl"
File 8="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/REF Parameter--Enum.ctl"
File 9="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/Results Type--Enum.ctl"
File 10="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/Select All--Boolean.ctl"
File 11="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/Select None--Boolean.ctl"
File 12="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/images/select-all-13-FALSE.png"
File 13="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/images/select-all-13-TRUE.png"
File 14="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/images/select-none-13-FALSE.png"
File 15="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/images/select-none-13-TRUE.png"
File 16="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/API/Lockin.Commands--Enum.ctl"
File 17="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/API/Lockin.Commands.Local--Enum.ctl"
File 18="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/API/Lockin.setAO_Amp--cluster.ctl"
File 19="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/API/Lockin.setAO_DC--cluster.ctl"
File 20="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/API/Lockin.setAO_f--cluster.ctl"
File 21="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/API/Lockin.setAO_function--cluster.ctl"
File 22="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/API/Lockin.setAO_phi--cluster.ctl"
File 23="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/API/Lockin.setAUX--cluster.ctl"
File 24="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/API/Lockin.setREF_f--cluster.ctl"
File 25="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/API/Lockin.setREF_phi--cluster.ctl"
File 26="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/API/Lockin.setREF_RollOff--cluster.ctl"
File 27="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Typedefs/API/Lockin.setREF_TC--cluster.ctl"
File 28="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Private/Format Status String.vi"
File 29="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Private/formatSweepResults.vi"
File 30="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Private/Lockin.Client (Local).vim"
File 31="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Private/Lockin.Client.vim"
File 32="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Private/Parse Result Key.vi"
File 33="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Private/Retry Timeout.vi"
File 34="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Private/Update AO Parameter.vi"
File 35="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Private/Update REF Parameter.vi"
File 36="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Overrides/Configuration Window.vi"
File 37="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Overrides/enumerateStaticDependencies.vi"
File 38="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Overrides/Get SMO Name.vi"
File 39="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Overrides/Get SMO Port.vi"
File 40="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Overrides/Get SMO Public API.vi"
File 41="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Overrides/Handle Command.vi"
File 42="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Overrides/Handle getAll.vi"
File 43="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Overrides/Read Configuration File.vi"
File 44="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/Overrides/Write Configuration File.vi"
File 45="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/DAQ-public/config not UI to UI (AO and AI).vi"
File 46="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/DAQ-public/config UI to not UI (AO and AI).vi"
File 47="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/DAQ-private/FloatApprox.vi"
File 48="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/DAQ-private/FloatApproxPoint1Percent.vi"
File 49="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/DAQ-private/Limit AO Amplitude.vi"
File 50="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/DAQ-private/PS Chart.vi"
File 51="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/DAQ-private/Replace Sweep Waveforms.vi"
File 52="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API (Class)/Read Instrument.DAQ.vi"
File 53="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/Close.vi"
File 54="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/getAIwfm.vi"
File 55="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/getAOwfm.vi"
File 56="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/getAUX.vi"
File 57="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/getInputGain.vi"
File 58="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/getOutputGain.vi"
File 59="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/getResults.vi"
File 60="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/getSampling.vi"
File 61="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/getSamplingMode.vi"
File 62="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/getSweepData.vi"
File 63="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/getSweepResults.vi"
File 64="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/getSweepWaveforms.vi"
File 65="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/Open.vi"
File 66="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/setAUX.vi"
File 67="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/setDAQ.vi"
File 68="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/setInputGain.vi"
File 69="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/setOffset.vi"
File 70="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/setOutputGain.vi"
File 71="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/setSampling.vi"
File 72="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/setSamplingMode.vi"
File 73="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/setSweepConfiguration.vi"
File 74="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/setSweepTable.vim"
File 75="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/setSweepTime.vi"
File 76="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/State/getStatus.vi"
File 77="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/State/resetDAQ.vi"
File 78="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/State/setState.vi"
File 79="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/State/startDAQ.vi"
File 80="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/State/startSweep.vi"
File 81="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/State/stopDAQ.vi"
File 82="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/State/stopSweep.vi"
File 83="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/REF/getREF.vi"
File 84="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/REF/setREF.vi"
File 85="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/REF/setREF_f.vi"
File 86="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/REF/setREF_phi.vi"
File 87="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/REF/setREF_RollOff.vi"
File 88="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/REF/setREF_TC.vi"
File 89="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/Macros/startIVAndWait.vi"
File 90="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/Macros/startIVSweepAndWait.vi"
File 91="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/Macros/startSweepAndWait.vi"
File 92="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/Macros/waitForNewResults.vi"
File 93="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/Macros/waitForNewWaveforms.vi"
File 94="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/Macros/waitStatusCreated.vi"
File 95="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/Macros/waitStatusIdle.vi"
File 96="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/Macros/waitStatusStarted.vi"
File 97="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/Macros/waitStatusStopped.vi"
File 98="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/Macros/waitStatusSweepingStarted.vi"
File 99="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/Macros/waitStatusSweepingStopped.vi"
File 100="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/Data/Parse Results (all).vi"
File 101="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/Data/Parse Results (single).vi"
File 102="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/AO/getAO.vi"
File 103="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/AO/setAO.vi"
File 104="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/AO/setAO_Amp.vi"
File 105="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/AO/setAO_DC.vi"
File 106="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/AO/setAO_f.vi"
File 107="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/AO/setAO_function.vi"
File 108="user.lib/LevyLab/Lockin-Multichannel/Instrument.Lockin/API/AO/setAO_phi.vi"
File 109="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/4461-SIMULATEDDEVICES.NCE"
File 110="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/4461-SIMULATEDDEVICES_20220813.NCE"
File 111="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Instrument.DAQ Activity Tree.vi"
File 112="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Instrument.DAQ.lvclass"
File 113="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Instrument.DAQ.TestLauncher.vi"
File 114="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Process.vi"
File 115="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Waveguide Model/Waveguide Model--Cluster.ctl"
File 116="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Waveguide Model/Waveguide Model.vi"
File 117="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/AI.Channel--Cluster--Array.ctl"
File 118="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/AI.Channel--Cluster.ctl"
File 119="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/AO.Channel--Cluster--Array.ctl"
File 120="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/AO.Channel--Cluster.ctl"
File 121="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/AO.Channel.Function--Enum.ctl"
File 122="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/Channel.Gain--Cluster--Array.ctl"
File 123="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/Channel.Gain--Cluster.ctl"
File 124="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/Channel.GainsAndOffsets--Cluster.ctl"
File 125="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/Configuration--Cluster.ctl"
File 126="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/Configuration.Channels--Cluster.ctl"
File 127="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/Configuration.DAQ--Cluster.ctl"
File 128="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/Configuration.LimitAmplitude--Cluster.ctl"
File 129="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/Configuration.LinkFrequencies--Cluster.ctl"
File 130="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/Configuration.Prefilter--Cluster.ctl"
File 131="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/DAQ.Calibration--Cluster--Array.ctl"
File 132="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/DAQ.Calibration--Cluster.ctl"
File 133="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/DAQ.Config.AI--Cluster--Array.ctl"
File 134="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/DAQ.Config.AI--Cluster.ctl"
File 135="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/DAQ.Config.AI.Coupling--enum.ctl"
File 136="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/DAQ.Config.AI.Range--enum.ctl"
File 137="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/DAQ.Config.AO--Cluster--Array.ctl"
File 138="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/DAQ.Config.AO--Cluster.ctl"
File 139="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/DAQ.Config.AO.Range--enum.ctl"
File 140="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/DAQ.Configuration--Cluster--Array.ctl"
File 141="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/DAQ.Configuration--Cluster.ctl"
File 142="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/DAQ.Sampling--Cluster.ctl"
File 143="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/DAQ.Sampling.Fs Mode--enum.ctl"
File 144="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/DAQ.Sampling.Sample Mode--Ring.ctl"
File 145="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/DAQ.Sampling.Sampling--Cluster.ctl"
File 146="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/DAQ.Type--enum.ctl"
File 147="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/Get State-enum.ctl"
File 148="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--Cluster.ctl"
File 149="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.autoConfigureDAQ.ctl"
File 150="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.measureOffset.ctl"
File 151="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.setAI.ctl"
File 152="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.setAIconfig.ctl"
File 153="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.setAO.ctl"
File 154="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.setAOconfig.ctl"
File 155="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.setCalibration.ctl"
File 156="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.setInputGain.ctl"
File 157="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.setLimitAmplitude.ctl"
File 158="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.setLinkFrequencies.ctl"
File 159="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.setOutputGain.ctl"
File 160="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.setPreFilter.ctl"
File 161="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.setREF.ctl"
File 162="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.setSampling.ctl"
File 163="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.setState.ctl"
File 164="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.setSweep.ctl"
File 165="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.showUI.ctl"
File 166="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.zeroOffset.ctl"
File 167="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PrivateEvents--DAQ.zeroPhase.ctl"
File 168="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PublicEvents--Cluster.ctl"
File 169="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PublicEvents--DAQ.getAIChannels.ctl"
File 170="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PublicEvents--DAQ.getAOChannels.ctl"
File 171="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PublicEvents--DAQ.getCalibration.ctl"
File 172="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PublicEvents--DAQ.getDAQconfig.ctl"
File 173="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PublicEvents--DAQ.getDAQState.ctl"
File 174="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PublicEvents--DAQ.getInputGain.ctl"
File 175="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PublicEvents--DAQ.getInputOffset.ctl"
File 176="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PublicEvents--DAQ.getOutputGain.ctl"
File 177="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PublicEvents--DAQ.getREFChannels.ctl"
File 178="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PublicEvents--DAQ.getResults.ctl"
File 179="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PublicEvents--DAQ.getSampling.ctl"
File 180="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PublicEvents--DAQ.getSweepChannels.ctl"
File 181="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PublicEvents--DAQ.getSweepWaveforms.ctl"
File 182="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/PublicEvents--DAQ.getWaveforms.ctl"
File 183="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/REF.Channel--Cluster--Array.ctl"
File 184="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/REF.Channel--Cluster.ctl"
File 185="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/Results--Cluster.ctl"
File 186="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/Set State-enum.ctl"
File 187="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/Simulation Mode--enum.ctl"
File 188="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/Sweep.Channel--Cluster--Array.ctl"
File 189="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/Sweep.Channel--Cluster.ctl"
File 190="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/Sweep.Configuration--Cluster.ctl"
File 191="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Typedefs/Sweep.Pattern--Enum.ctl"
File 192="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Tests/Test DAQ Synchronize.vi"
File 193="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Tests/Test LockinEngine.vi"
File 194="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Tests/Test PLL.vi"
File 195="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Tests/Test Prefilters.vi"
File 196="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Tests/Test Serialize.vi"
File 197="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Tests/Test Sweep.vi"
File 198="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Sweep/Append Multiple Waveforms.vi"
File 199="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Sweep/Calcualte N_total and Updates.vi"
File 200="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Sweep/Create DC Waveforms.vi"
File 201="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Sweep/Generate Sweep Pattern.vi"
File 202="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Sweep/Generator DC Sweep Manager.vi"
File 203="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Sweep/Get Sweep Pattern Subset.vi"
File 204="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Sweep/Test New Sweep.vi"
File 205="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/AI Array Add or Remove.vi"
File 206="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/AO Array Add or Remove.vi"
File 207="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/DAQ.getAIChannels.vi"
File 208="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/DAQ.getAOChannels.vi"
File 209="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/DAQ.getCalibration.vi"
File 210="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/DAQ.getDAQconfig.vi"
File 211="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/DAQ.getDAQState.vi"
File 212="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/DAQ.getInputGain.vi"
File 213="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/DAQ.getInputOffset.vi"
File 214="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/DAQ.getOutputGain.vi"
File 215="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/DAQ.GetPrivateEvents.vi"
File 216="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/DAQ.getREFChannels.vi"
File 217="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/DAQ.getResults.vi"
File 218="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/DAQ.getSampling.vi"
File 219="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/DAQ.getSweepChannels.vi"
File 220="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/DAQ.getSweepWaveforms.vi"
File 221="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/DAQ.getWaveforms.vi"
File 222="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/DAQ.Sampling.Coerce Timing.vi"
File 223="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/DAQ.Sampling.Configure.vi"
File 224="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/DAQ.Sampling.Find Integer Period Fs and Ns.vi"
File 225="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/Decimate DFD.vi"
File 226="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/Gain Add or Remove.vi"
File 227="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/Index Waveform Array Elements.vi"
File 228="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/Initialize AI Channels.vi"
File 229="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/Initialize AO Channels.vi"
File 230="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/Initialize Input Gain.vi"
File 231="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/Initialize Input Offset.vi"
File 232="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/Initialize Ouput Gain.vi"
File 233="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/Initialize REF Channels.vi"
File 234="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/Initialize Sweep Channels.vi"
File 235="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/REF Array Add or Remove.vi"
File 236="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/Set all f same.vi"
File 237="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/Waveform Array to Cluster.vi"
File 238="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/private/Waveform Cluster to Array.vi"
File 239="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Methods (overrides)/CreatePrivateEvents.vi"
File 240="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Methods (overrides)/CreatePublicEvents.vi"
File 241="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Methods (overrides)/DestroyPrivateEvents.vi"
File 242="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/Methods (overrides)/DestroyPublicEvents.vi"
File 243="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/4461 Autophase.vi"
File 244="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/4461 Calibrate Phase.vi"
File 245="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/4461 Replace AO Ch.vi"
File 246="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Auto Configure DAQ.vi"
File 247="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.44xx.Get AI Min and Max.vi"
File 248="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.44xx.Get AO Min and Max.vi"
File 249="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.44xx.Get Export Triggers.vi"
File 250="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.44xx.Get Master and Slave Tasks.vi"
File 251="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.44xx.Get Product Type from Device.vi"
File 252="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.44xx.Get Product Type from Task.vi"
File 253="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.44xx.Get Ref Clk.vi"
File 254="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.44xx.Get Terminal Name with Device Prefix.vi"
File 255="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.44xx.MIN MAX Fs.vi"
File 256="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.Calibrate.vi"
File 257="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.Clear.vi"
File 258="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.Create AI Tasks.vi"
File 259="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.Create AO Tasks.vi"
File 260="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.Create Sample Clock.vi"
File 261="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.Create.vi"
File 262="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.DAQmx Asynch Read.vi"
File 263="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.DAQmx Asynch Write.vi"
File 264="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.DAQmx EventRegistration.vi"
File 265="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.ErrorHandler.vi"
File 266="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.Get Calibration.vi"
File 267="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.PreStartWriteBuffer.vi"
File 268="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.Read AI and Write AO.vi"
File 269="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.Read AI.vi"
File 270="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.Simulate Noisy AI.vi"
File 271="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.Simulate Waveguide.vi"
File 272="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.Start.AI.vi"
File 273="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.Start.AO.vi"
File 274="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.Start.vi"
File 275="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.Stop.vi"
File 276="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.TimedLoop.vi"
File 277="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.Trigger.vi"
File 278="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQ.Write AO.vi"
File 279="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/DAQmx.Activity Tree.vi"
File 280="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Enumerate Chassis.vi"
File 281="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Generator.Create AO DC waveforms.vi"
File 282="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Generator.Create AO waveforms.vi"
File 283="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Generator.Create Reference Waveforms.vi"
File 284="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Generator.Synthesize Waveforms.vi"
File 285="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Get Waveform AI Channel.vi"
File 286="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Get Waveform Quadrature.vi"
File 287="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Get Waveform Reference Channel.vi"
File 288="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Lockin.Engine.vi"
File 289="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Lockin.Low Pass Filter (subVI).vi"
File 290="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Lockin.Low Pass Filter.vi"
File 291="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Lockin.Mixer.vi"
File 292="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Multitone Eval.vi"
File 293="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/PLL_PID.vi"
File 294="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Reduce Fs.vi"
File 295="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Set Waveform AI Channel.vi"
File 296="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Set Waveform Quadrature.vi"
File 297="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Set Waveform Reference Channel.vi"
File 298="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Sort Devices.vi"
File 299="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Test Multitone.vi"
File 300="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/Waveform Circular Buffer.vi"
File 301="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/Clip AO Waveforms.vi"
File 302="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/Coerce Fs.vi"
File 303="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/Decimate Waveforms.vi"
File 304="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/Decimate.vi"
File 305="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/Divide Input Gain.vi"
File 306="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/Divide Output Gain.vi"
File 307="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/Factorize Integer.vi"
File 308="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/Fs_to_FilterDelay.vi"
File 309="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/Get DAQ Offset.vi"
File 310="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/lockin - replace AO DC.vi"
File 311="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/lockin - replace AO Sweep Indicator.vi"
File 312="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/lockin - Restore Saved AO.vi"
File 313="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/lockin - Save AO.vi"
File 314="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/lockin - Set REF f equal AO f.vi"
File 315="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/lockin - Set REF phase equals zero.vi"
File 316="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/lockin - zero amplitude.vi"
File 317="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/Multiply Input Gain.vi"
File 318="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/Multiply Output Gain.vi"
File 319="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/Prefilter.60Hz_Notch.vi"
File 320="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/Prefilter.Low Pass.vi"
File 321="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/Resample.vi"
File 322="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/Set DAQ Offset.vi"
File 323="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/Simple PID Array.vi"
File 324="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/DAQ/support/Subtract DAQ Offset.vi"
File 325="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.autoConfigureDAQ.vi"
File 326="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.Get Number of Channels from Channel String.vi"
File 327="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.GetPublicEvents.vi"
File 328="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.measureOffset.vi"
File 329="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.setAI.vi"
File 330="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.setAIconfig.vi"
File 331="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.setAO.vi"
File 332="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.setAOconfig.vi"
File 333="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.setCalibration.vi"
File 334="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.setInputGain.vi"
File 335="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.setLimitAmplitude.vi"
File 336="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.setLinkFrequencies.vi"
File 337="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.setOutputGain.vi"
File 338="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.setPreFilter.vi"
File 339="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.setREF.vi"
File 340="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.setSampling.vi"
File 341="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.setstate.vi"
File 342="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.setSweep.vi"
File 343="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.showUI.vi"
File 344="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.zeroOffset.vi"
File 345="user.lib/LevyLab/Lockin-Multichannel/Instrument.DAQ/API/DAQ.zeroPhase.vi"
File 346="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/Instrument UI.Lockin.lvclass"
File 347="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/Instrument UI.Lockin.TestLauncher.vi"
File 348="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/Process.vi"
File 349="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/Typedefs/Display-Enum.ctl"
File 350="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/Typedefs/Tiny Ring.ctl"
File 351="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/Typedefs/UI.Configuration--Cluster.ctl"
File 352="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/Typedefs/UI.References--Cluster.ctl"
File 353="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/Typedefs/UI.References.Booleans--Cluster.ctl"
File 354="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/Typedefs/UI.References.Clusters--Cluster.ctl"
File 355="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/Typedefs/UI.References.Graphs--Cluster.ctl"
File 356="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/Typedefs/X Axis-Enum.ctl"
File 357="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/Typedefs/Y Axis Input-Enum.ctl"
File 358="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/Typedefs/Y Axis Output-Enum.ctl"
File 359="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/Tests/sandbox.vi"
File 360="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Decimate Waveforms.vi"
File 361="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Dictionary to Chart.vi"
File 362="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Dictionary to Indicator.vi"
File 363="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Dictionary to Plot Names.vi"
File 364="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Dictionary to XY Graph.vi"
File 365="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Display n or dt.vi"
File 366="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Get AI Channel Count.vi"
File 367="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Get AI REF (f) StringsAndValues.vi"
File 368="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Get AI REF StringsAndValues.vi"
File 369="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Get AI StringsAndValues.vi"
File 370="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Get AO Channel Count.vi"
File 371="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Get AO StringsAndValues.vi"
File 372="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Get Plot Visibility (WFChart).vi"
File 373="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Get Plot Visibility (WFGraph).vi"
File 374="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Get Plot Visibility.vi"
File 375="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Get REF StringsAndValues.vi"
File 376="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Multiply by Gain.vi"
File 377="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Number to String Bar Graph.vi"
File 378="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Parse Results (table).vi"
File 379="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Remove Unused References.vi"
File 380="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set All Plot Visibility (WFChart).vi"
File 381="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set All Plot Visibility (WFGraph).vi"
File 382="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set All Plot Visibility (XYGraph).vi"
File 383="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set All Plot Visibility.vi"
File 384="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set AO AI Graph X Axis (WFGraph).vi"
File 385="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set AO Graph Y Axis (WFGraph).vi"
File 386="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set Legend (WDTWFGraph).vi"
File 387="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set Legend (WFChart).vi"
File 388="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set Legend (WFGraph).vi"
File 389="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set Legend (XYGraph).vi"
File 390="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set Legend.vi"
File 391="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set Plot Color (WDTWFGraph).vi"
File 392="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set Plot Color (WFChart).vi"
File 393="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set Plot Color (WFGraph).vi"
File 394="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set Plot Color (XYGraph).vi"
File 395="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set Plot Color.vi"
File 396="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set Plot Visibility (WDTWFGraph).vi"
File 397="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set Plot Visibility (WFChart).vi"
File 398="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set Plot Visibility (WFGraph).vi"
File 399="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set Plot Visibility (XYGraph).vi"
File 400="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Set Plot Visibility.vi"
File 401="user.lib/LevyLab/Lockin-Multichannel/Instrument UI.Lockin/private/Spinner.vi"
File 402="user.lib/LevyLab/Lockin-Multichannel/Examples/Example_IV_Curves.vi"
File 403="user.lib/LevyLab/Lockin-Multichannel/Examples/Example_Lockin.vi"
File 404="user.lib/LevyLab/Lockin-Multichannel/Examples/Multi-Device Sync - DSA Analog Input and Output Using a Shared Timebase & Trigger with DAQmx/Get_Terminal_Name_with_Device_Prefix.vi"
File 405="user.lib/LevyLab/Lockin-Multichannel/Examples/Multi-Device Sync - DSA Analog Input and Output Using a Shared Timebase & Trigger with DAQmx/Multi-Device_Sync-AI_and_AO-Shared_Timebase_&_Trig-DSA.html"
File 406="user.lib/LevyLab/Lockin-Multichannel/Examples/Multi-Device Sync - DSA Analog Input and Output Using a Shared Timebase & Trigger with DAQmx/Multi-Device_Sync-AI_and_AO-Shared_Timebase_&_Trig-DSA.vi"
File 407="user.lib/LevyLab/Lockin-Multichannel/Examples/Multi-Device Sync - DSA Analog Input and Output Using a Shared Timebase & Trigger with DAQmx/Waveform_Buffer_Generation.vi"
[File Group 1]
Target Dir="<menus>/Categories/LevyLab"
Replace Mode="Always"
Num Files=1
File 0="functions_LevyLab_lib_Lockin_Multichannel.mnu"
[File Group 2]
Target Dir="<menus>/Categories/LevyLab"
Replace Mode="If Newer"
Num Files=1
File 0="dir.mnu"
| [
"p.irvin@levylab.org"
] | p.irvin@levylab.org |
a67d141be18a04831a14901f68ed831333063d86 | e959e0af4559447309f083e73010752e88f848c4 | /meta/DesignDataPackage/lib/python/avm/systemc/__init__.py | 0641f06e0c7370be5a7d03a8b73a083f0815ce5c | [
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-other-permissive"
] | permissive | lefevre-fraser/openmeta-mms | 5621e5bd2f14f63c5f44e5130678f93dae87e3d3 | 08f3115e76498df1f8d70641d71f5c52cab4ce5f | refs/heads/master | 2022-04-21T13:39:07.209451 | 2020-04-14T19:56:34 | 2020-04-14T19:56:34 | 257,699,808 | 0 | 0 | NOASSERTION | 2020-04-21T19:48:04 | 2020-04-21T19:48:03 | null | UTF-8 | Python | false | false | 19,675 | py | # .\_systemc.py
# -*- coding: utf-8 -*-
# PyXB bindings for NM:acabe3c8394de3f41da11a8fb34cb58c8e1b3a5a
# Generated 2016-02-15 11:24:52.072000 by PyXB version 1.2.3
# Namespace systemc [xmlns:systemc]
import pyxb
import pyxb.binding
import pyxb.binding.saxer
import io
import pyxb.utils.utility
import pyxb.utils.domutils
import sys
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:057f9670-d409-11e5-9520-7429af7917c0')
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.3'
# Generated bindings are not compatible across PyXB versions
if pyxb.__version__ != _PyXBVersion:
raise pyxb.PyXBVersionError(_PyXBVersion)
# Import bindings for namespaces imported into schema
import pyxb.binding.datatypes
import avm as _ImportedBinding__avm
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI(u'systemc', create_if_missing=True)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument (xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a
Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, unicode):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM (node, default_namespace=None):
"""Create a Python instance from the given DOM node.
The node tag must correspond to an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}."""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
# Atomic simple type: {systemc}SystemCDataTypeEnum
class SystemCDataTypeEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'SystemCDataTypeEnum')
_XSDLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 39, 2)
_Documentation = None
SystemCDataTypeEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=SystemCDataTypeEnum, enum_prefix=None)
SystemCDataTypeEnum.bool = SystemCDataTypeEnum._CF_enumeration.addEnumeration(unicode_value=u'bool', tag=u'bool')
SystemCDataTypeEnum.sc_int = SystemCDataTypeEnum._CF_enumeration.addEnumeration(unicode_value=u'sc_int', tag=u'sc_int')
SystemCDataTypeEnum.sc_uint = SystemCDataTypeEnum._CF_enumeration.addEnumeration(unicode_value=u'sc_uint', tag=u'sc_uint')
SystemCDataTypeEnum.sc_logic = SystemCDataTypeEnum._CF_enumeration.addEnumeration(unicode_value=u'sc_logic', tag=u'sc_logic')
SystemCDataTypeEnum.sc_bit = SystemCDataTypeEnum._CF_enumeration.addEnumeration(unicode_value=u'sc_bit', tag=u'sc_bit')
SystemCDataTypeEnum._InitializeFacetMap(SystemCDataTypeEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'SystemCDataTypeEnum', SystemCDataTypeEnum)
# Atomic simple type: {systemc}DirectionalityEnum
class DirectionalityEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'DirectionalityEnum')
_XSDLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 48, 2)
_Documentation = None
DirectionalityEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=DirectionalityEnum, enum_prefix=None)
DirectionalityEnum.in_ = DirectionalityEnum._CF_enumeration.addEnumeration(unicode_value=u'in', tag=u'in_')
DirectionalityEnum.out = DirectionalityEnum._CF_enumeration.addEnumeration(unicode_value=u'out', tag=u'out')
DirectionalityEnum.inout = DirectionalityEnum._CF_enumeration.addEnumeration(unicode_value=u'inout', tag=u'inout')
DirectionalityEnum.not_applicable = DirectionalityEnum._CF_enumeration.addEnumeration(unicode_value=u'not_applicable', tag=u'not_applicable')
DirectionalityEnum._InitializeFacetMap(DirectionalityEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'DirectionalityEnum', DirectionalityEnum)
# Atomic simple type: {systemc}FunctionEnum
class FunctionEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'FunctionEnum')
_XSDLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 56, 2)
_Documentation = None
FunctionEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=FunctionEnum, enum_prefix=None)
FunctionEnum.normal = FunctionEnum._CF_enumeration.addEnumeration(unicode_value=u'normal', tag=u'normal')
FunctionEnum.clock = FunctionEnum._CF_enumeration.addEnumeration(unicode_value=u'clock', tag=u'clock')
FunctionEnum.reset_async = FunctionEnum._CF_enumeration.addEnumeration(unicode_value=u'reset_async', tag=u'reset_async')
FunctionEnum.reset_sync = FunctionEnum._CF_enumeration.addEnumeration(unicode_value=u'reset_sync', tag=u'reset_sync')
FunctionEnum._InitializeFacetMap(FunctionEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'FunctionEnum', FunctionEnum)
# Complex type {systemc}SystemCModel with content type ELEMENT_ONLY
class SystemCModel_ (_ImportedBinding__avm.DomainModel_):
"""Complex type {systemc}SystemCModel with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'SystemCModel')
_XSDLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 7, 2)
_ElementMap = _ImportedBinding__avm.DomainModel_._ElementMap.copy()
_AttributeMap = _ImportedBinding__avm.DomainModel_._AttributeMap.copy()
# Base type is _ImportedBinding__avm.DomainModel_
# Element SystemCPort uses Python identifier SystemCPort
__SystemCPort = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'SystemCPort'), 'SystemCPort', '__systemc_SystemCModel__SystemCPort', True, pyxb.utils.utility.Location(u'avm.systemc.xsd', 11, 10), )
SystemCPort = property(__SystemCPort.value, __SystemCPort.set, None, None)
# Element Parameter uses Python identifier Parameter
__Parameter = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Parameter'), 'Parameter', '__systemc_SystemCModel__Parameter', True, pyxb.utils.utility.Location(u'avm.systemc.xsd', 12, 10), )
Parameter = property(__Parameter.value, __Parameter.set, None, None)
# Attribute UsesResource inherited from {avm}DomainModel
# Attribute Author inherited from {avm}DomainModel
# Attribute Notes inherited from {avm}DomainModel
# Attribute XPosition inherited from {avm}DomainModel
# Attribute YPosition inherited from {avm}DomainModel
# Attribute Name inherited from {avm}DomainModel
# Attribute ID inherited from {avm}DomainModel
# Attribute ModuleName uses Python identifier ModuleName
__ModuleName = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'ModuleName'), 'ModuleName', '__systemc_SystemCModel__ModuleName', pyxb.binding.datatypes.string, required=True)
__ModuleName._DeclarationLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 14, 8)
__ModuleName._UseLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 14, 8)
ModuleName = property(__ModuleName.value, __ModuleName.set, None, None)
_ElementMap.update({
__SystemCPort.name() : __SystemCPort,
__Parameter.name() : __Parameter
})
_AttributeMap.update({
__ModuleName.name() : __ModuleName
})
Namespace.addCategoryObject('typeBinding', u'SystemCModel', SystemCModel_)
# Complex type {systemc}Parameter with content type ELEMENT_ONLY
class Parameter_ (_ImportedBinding__avm.DomainModelParameter_):
"""Complex type {systemc}Parameter with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'Parameter')
_XSDLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 18, 2)
_ElementMap = _ImportedBinding__avm.DomainModelParameter_._ElementMap.copy()
_AttributeMap = _ImportedBinding__avm.DomainModelParameter_._AttributeMap.copy()
# Base type is _ImportedBinding__avm.DomainModelParameter_
# Element Value uses Python identifier Value
__Value = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Value'), 'Value', '__systemc_Parameter__Value', False, pyxb.utils.utility.Location(u'avm.systemc.xsd', 22, 10), )
Value = property(__Value.value, __Value.set, None, None)
# Attribute Notes inherited from {avm}DomainModelParameter
# Attribute XPosition inherited from {avm}DomainModelParameter
# Attribute YPosition inherited from {avm}DomainModelParameter
# Attribute ParamName uses Python identifier ParamName
__ParamName = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'ParamName'), 'ParamName', '__systemc_Parameter__ParamName', pyxb.binding.datatypes.string)
__ParamName._DeclarationLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 24, 8)
__ParamName._UseLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 24, 8)
ParamName = property(__ParamName.value, __ParamName.set, None, None)
# Attribute ParamPosition uses Python identifier ParamPosition
__ParamPosition = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'ParamPosition'), 'ParamPosition', '__systemc_Parameter__ParamPosition', pyxb.binding.datatypes.int)
__ParamPosition._DeclarationLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 25, 8)
__ParamPosition._UseLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 25, 8)
ParamPosition = property(__ParamPosition.value, __ParamPosition.set, None, None)
_ElementMap.update({
__Value.name() : __Value
})
_AttributeMap.update({
__ParamName.name() : __ParamName,
__ParamPosition.name() : __ParamPosition
})
Namespace.addCategoryObject('typeBinding', u'Parameter', Parameter_)
# Complex type {systemc}SystemCPort with content type EMPTY
class SystemCPort_ (_ImportedBinding__avm.DomainModelPort_):
"""Complex type {systemc}SystemCPort with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'SystemCPort')
_XSDLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 29, 2)
_ElementMap = _ImportedBinding__avm.DomainModelPort_._ElementMap.copy()
_AttributeMap = _ImportedBinding__avm.DomainModelPort_._AttributeMap.copy()
# Base type is _ImportedBinding__avm.DomainModelPort_
# Attribute Notes inherited from {avm}Port
# Attribute XPosition inherited from {avm}Port
# Attribute Definition inherited from {avm}Port
# Attribute YPosition inherited from {avm}Port
# Attribute Name inherited from {avm}Port
# Attribute ID inherited from {avm}PortMapTarget
# Attribute PortMap inherited from {avm}PortMapTarget
# Attribute DataType uses Python identifier DataType
__DataType = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'DataType'), 'DataType', '__systemc_SystemCPort__DataType', SystemCDataTypeEnum)
__DataType._DeclarationLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 32, 8)
__DataType._UseLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 32, 8)
DataType = property(__DataType.value, __DataType.set, None, None)
# Attribute DataTypeDimension uses Python identifier DataTypeDimension
__DataTypeDimension = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'DataTypeDimension'), 'DataTypeDimension', '__systemc_SystemCPort__DataTypeDimension', pyxb.binding.datatypes.int)
__DataTypeDimension._DeclarationLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 33, 8)
__DataTypeDimension._UseLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 33, 8)
DataTypeDimension = property(__DataTypeDimension.value, __DataTypeDimension.set, None, None)
# Attribute Directionality uses Python identifier Directionality
__Directionality = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'Directionality'), 'Directionality', '__systemc_SystemCPort__Directionality', DirectionalityEnum)
__Directionality._DeclarationLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 34, 8)
__Directionality._UseLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 34, 8)
Directionality = property(__Directionality.value, __Directionality.set, None, None)
# Attribute Function uses Python identifier Function
__Function = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'Function'), 'Function', '__systemc_SystemCPort__Function', FunctionEnum)
__Function._DeclarationLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 35, 8)
__Function._UseLocation = pyxb.utils.utility.Location(u'avm.systemc.xsd', 35, 8)
Function = property(__Function.value, __Function.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__DataType.name() : __DataType,
__DataTypeDimension.name() : __DataTypeDimension,
__Directionality.name() : __Directionality,
__Function.name() : __Function
})
Namespace.addCategoryObject('typeBinding', u'SystemCPort', SystemCPort_)
SystemCModel = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, u'SystemCModel'), SystemCModel_, location=pyxb.utils.utility.Location(u'avm.systemc.xsd', 4, 2))
Namespace.addCategoryObject('elementBinding', SystemCModel.name().localName(), SystemCModel)
Parameter = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, u'Parameter'), Parameter_, location=pyxb.utils.utility.Location(u'avm.systemc.xsd', 5, 2))
Namespace.addCategoryObject('elementBinding', Parameter.name().localName(), Parameter)
SystemCPort = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, u'SystemCPort'), SystemCPort_, location=pyxb.utils.utility.Location(u'avm.systemc.xsd', 6, 2))
Namespace.addCategoryObject('elementBinding', SystemCPort.name().localName(), SystemCPort)
SystemCModel_._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'SystemCPort'), SystemCPort_, scope=SystemCModel_, location=pyxb.utils.utility.Location(u'avm.systemc.xsd', 11, 10)))
SystemCModel_._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Parameter'), Parameter_, scope=SystemCModel_, location=pyxb.utils.utility.Location(u'avm.systemc.xsd', 12, 10)))
def _BuildAutomaton ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton
del _BuildAutomaton
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=None, metadata=pyxb.utils.utility.Location(u'avm.systemc.xsd', 11, 10))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=None, metadata=pyxb.utils.utility.Location(u'avm.systemc.xsd', 12, 10))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(SystemCModel_._UseForTag(pyxb.namespace.ExpandedName(None, u'SystemCPort')), pyxb.utils.utility.Location(u'avm.systemc.xsd', 11, 10))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(SystemCModel_._UseForTag(pyxb.namespace.ExpandedName(None, u'Parameter')), pyxb.utils.utility.Location(u'avm.systemc.xsd', 12, 10))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
SystemCModel_._Automaton = _BuildAutomaton()
Parameter_._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Value'), _ImportedBinding__avm.Value_, scope=Parameter_, location=pyxb.utils.utility.Location(u'avm.systemc.xsd', 22, 10)))
def _BuildAutomaton_ ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_
del _BuildAutomaton_
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'avm.systemc.xsd', 22, 10))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(Parameter_._UseForTag(pyxb.namespace.ExpandedName(None, u'Value')), pyxb.utils.utility.Location(u'avm.systemc.xsd', 22, 10))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
Parameter_._Automaton = _BuildAutomaton_()
| [
"kevin.m.smyth@gmail.com"
] | kevin.m.smyth@gmail.com |
28bc2cd20ad54c1510a2240542dcab62c23db357 | 8997a0bf1e3b6efe5dd9d5f307e1459f15501f5a | /Decorators__examples/decorator__args_as_funcs.py | 93655d075ac3c19fdba95b7b26163d49ff67f6ef | [
"CC-BY-4.0"
] | permissive | stepik/SimplePyScripts | 01092eb1b2c1c33756427abb2debbd0c0abf533f | 3259d88cb58b650549080d6f63b15910ae7e4779 | refs/heads/master | 2023-05-15T17:35:55.743164 | 2021-06-11T22:59:07 | 2021-06-11T22:59:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,041 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
class TextBuilder:
def __init__(self):
self.result = []
# Функция, принимающая аргументы и возвращающая декоратор
def _call_before(*funcs):
# Сам декоратор
def decorator(func):
# Функция-обертка, заменит собой декорируемую
def wrapper(self, *args, **kwargs):
for f in funcs:
f(self)
return func(self, *args, **kwargs)
# Декоратор возвращает обертку
return wrapper
# Возаращаем сам декоратор
return decorator
# Функция, принимающая аргументы и возвращающая декоратор
def _call_after(*funcs):
# Сам декоратор
def decorator(func):
# Функция-обертка, заменит собой декорируемую
def wrapper(self, *args, **kwargs):
result = func(self, *args, **kwargs)
for f in funcs:
f(self)
return result
# Декоратор возвращает обертку
return wrapper
# Возаращаем сам декоратор
return decorator
@_call_before(lambda self: self.result.append('+' + '-' * 10 + '+'))
@_call_after(lambda self: self.result.append('+' + '-' * 10 + '+'), lambda self: self.result.append('\n'))
def append(self, text: str) -> 'TextBuilder':
self.result.append(text)
return self
def build(self):
return '\n'.join(self.result)
builder = TextBuilder()
builder.append("Foo").append("Bar").append("Hello World!")
print(builder.build())
# +----------+
# Foo
# +----------+
#
#
# +----------+
# Bar
# +----------+
#
#
# +----------+
# Hello World!
# +----------+
| [
"ilya.petrash@inbox.ru"
] | ilya.petrash@inbox.ru |
a6c80e22b97fbe9dfcdc635ef03d12eea5b06884 | 9f951479d5eda96e7fecbbbd0b3b7e4f5e83360d | /webtest/全栈课程代码学员版/Level3课程代码/Level3Code/lesson8/LessonCode/OriginalVersion/tenmins/urls.py | c0720b4831408ae6c32907c61dc3b51f8fa8d8b8 | [] | no_license | lianbo2006/Project | 44c5b6fcab4fe31b80bfff467b3e0e31fd2da8ba | 5d13923817a1d4cffe7d4abbb5873277ce28bb87 | refs/heads/master | 2021-01-11T18:24:47.597849 | 2017-04-25T03:44:47 | 2017-04-25T03:44:47 | 79,539,300 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,804 | py | """tenmins URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from website.views import listing, index_login, index_register, detail, detail_vote
from django.conf import settings
from django.conf.urls.static import static
from django.contrib.auth.views import logout
from website.mobile_views import video_list
from website.api import video
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^list/$', listing, name='list'),
url(r'^list/(?P<cate>[A-Za-z]+)$', listing, name='list'),
url(r'^detail/(?P<id>\d+)$', detail, name='detail'),
url(r'^detail/vote/(?P<id>\d+)$', detail_vote, name='vote'),
url(r'^login/$', index_login, name='login'),
url(r'^register/$', index_register, name='register'),
url(r'^logout/$', logout, {'next_page': '/register'}, name='logout'),
url(r'^api/video/', video),
url(r'^m/videolist/', video_list),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"513748889@qq.com"
] | 513748889@qq.com |
7f3d9b3fc7a7807540e8318fc33426af29965e20 | cb1fb3bf87b3f7006b564a0f2acd2d68e5d5ffaa | /pyram/tree/treemodule.py | e1ed25bba91eb8e5f5f36a320ce2c54625637cd1 | [
"MIT"
] | permissive | Hoseung/pyRamAn | 2778f8b12ca966e7586ebf077a964aecd1654223 | f9386fa5a9f045f98590039988d3cd50bc488dc2 | refs/heads/master | 2021-06-22T18:35:06.478492 | 2021-06-05T03:26:31 | 2021-06-05T03:26:31 | 227,741,934 | 1 | 1 | MIT | 2020-03-04T12:39:06 | 2019-12-13T02:49:30 | Jupyter Notebook | UTF-8 | Python | false | false | 7,126 | py | # -*- coding: utf-8 -*-
"""
treemodule
Created on Sun Jun 14 06:35:45 2015
@author: hoseung
"""
import numpy as np
def _is_ascii(filename):
return filename.split(".")[-1] == "dat"
class CTree(object):
"""
compatible with ConsistentTrees 1.01
"""
def __init__(self, filename=None):
if filename is not None:
self.load(filename=filename)
def _add_info(self):
#self.pboxsize = 199.632011
self.pboxsize = 200.0
def _load_ascii(self, filename):
cnt_header = 0
datatype =[ 'f8','i8','i8','i8','i8','i8','i8','i8','i8','f8'\
,'f8','f8','i8','f8','f8','f8','i8','f8','f8','f8'\
,'f8','f8','f8','f8','f8','f8','f8','i8','i8','i8'\
,'i8','i8','i8','i8','i8','f8','i8']#,\
with open(filename, 'rb') as f:
for i in range(180):
line = f.readline()
line = line.decode('utf-8')
if line[0] != '#':
self.ntrees = int(line) # The first line after the header is the number of trees.
cnt_header = f.tell()
break
f.seek(cnt_header)
self.data = np.genfromtxt(f,dtype=datatype)
self.data.dtype.names=(\
'aexp','id','desc_aexp','desc_id','nprog','pid','upid','desc_pid','phantom','sam_mvir'\
,'mvir','rvir','rs','vrms','mmp','aexp_last_MM','vmax','x','y','z'\
,'vx','vy','vz','jx','jy','jz','spin','b_id','d_id','tree_root_id'\
,'Orig_halo_id','nout','next_coprogenitor_d_id','last_progenitor_d_id'\
,'last_mainleaf_depthfirst_id', 'tidal_force', 'tidal_id')
print("Loading Consistent Tree data from ASCII is done")
def _load_pickle(self, filename):
import pickle
try:
with open(filename, "rb") as f:
self.data = pickle.load(f)
print("Loading Consistent Tree data from ASCII is done")
except IOError:
print("Error, No such file.", filename)
def load(self, filename=None):
if filename is None:
# from tkinter import tk
# tk().withdraw() # we don't want a full GUI, so keep the root window from appearing
from tkinter.filedialog import askopenfilename
filename = askopenfilename() # show an "Open" dialog box and return the path to the selected file
if _is_ascii(filename):
self._load_ascii(filename)
else:
self._load_pickle(filename)
# The tree output file is written in 'bytes' string rather than string - this is the modern concept of text.
# So the b' is at the very begining of lines. Python3 now distinguishes between string and byte string.
#
# Python3 strings are unicode by default.
# You need to specify the encoding of the text file.
# Of course you can there are built-in methods to detect the encoding of a text file.
# The output of Consistent tree is 'utf-8'
# Additional complexity is that
# numpy genfromtxt always want it to be byte strings.
# So you need 'rb' rather than 'r'
self._tree_ids()
def _tree_ids(self):
i = np.where(self.data['nout'] == max(self.data['nout']))[0]
self.trees_idx = self.data['id'][i]
self.trees_id = self.data['Orig_halo_id'][i]
""" get_main_prg in treeutils is a working version.
Use that instead.
def get_main_prg(trees, haloid=None, haloind=None, unique_id=True):
def get_main_prg(self, ids=None, original=True):
if ids is None:
if original:
ids = self.trees_id
else:
ids = self.trees_idx
if type(ids) is not list: ids = [ ids ]
for thisid in ids:
tree = self.data[np.where(self.data['tree_root_id'] == thisid)]
prgs=[]
next_h = tree[0]['id']
while next_h != -1:
print(next_h)
i_hal = np.where(tree['id'] == next_h)
halo = tree[i_hal]
next_h = halo['last_progenitor_d_id']
prgs.append(next_h)
"""
# Now, what are the main progenitors?
def show_data(self, data, ind):
'''
This prints list of filed names and values of a numpy array in two column format.
Sometimes there are too many fields that listing the names in a row makes it difficult
to match the field name and the field value.
Example)
>>>
'''
for ii,jj in zip(self.data.dtype.names,data[ind]):
print("%s : %f" % (ii,jj))
def load_tree(wdir, is_gal=False, no_dump=False, load_ascii=False):
import pickle
#from tree import treemodule
import tree.ctutils as ctu
from general import defaults
df = defaults.Default()
tree_path = df.tree_path(is_gal=is_gal)
if load_ascii:
alltrees = CTree()
alltrees.load(filename= wdir + tree_path + 'tree_0_0_0.dat')
# Fix nout -----------------------------------------------------
nout_max = alltrees.data['nout'].max()
alltrees.data['nout'] += 187 - nout_max
print("------ NOUT fixed")
alltrees.data = ctu.augment_tree(alltrees.data, wdir, is_gal=is_gal)
print("------ tree data extended")
if not no_dump:
pickle.dump(alltrees, open(wdir + df.ext_tree_pickle(is_gal=is_gal), "wb" ))
else:
try:
alltrees = pickle.load(open(wdir + df.ext_tree_pickle(is_gal=is_gal), "rb" ))
print("Loaded an extended tree")
except:
alltrees = CTree()
alltrees.load(filename= wdir + tree_path + 'tree_0_0_0.dat')
# Fix nout -----------------------------------------------------
nout_max = alltrees.data['nout'].max()
alltrees.data['nout'] += 187 - nout_max
print("------ NOUT fixed")
alltrees.data = ctu.augment_tree(alltrees.data, wdir, is_gal=is_gal)
print("------ tree data extended")
if not no_dump:
pickle.dump(alltrees, open(wdir + df.ext_tree_pickle(is_gal=is_gal), "wb" ))
return alltrees
def rs2codeunit(rst):
"""
Check and clean up.
nout in Consistent Tree by default starts from 0 regardless of
the original simulation snapshot number.
This function assumes the nouts are already fixed.
In practice, it should be fixed when reading from ASCII and pickling it.
"""
import numpy as np
nouts = np.unique(rst['nout'])
for nout in nouts:
dir_halo = base + "rhalo/rockstar_halos/"
f_tree = base + "rhalo/tree.pickle"
with open(f_tree, "rb") as ft:
rstree = pickle.load(ft)
rstree['x'] *= 200/199.632011
rstree['y'] *= 200/199.632011
rstree['z'] *= 200/199.632011
| [
"hopung@gmail.com"
] | hopung@gmail.com |
36f0354909d9d144b680d363b46210367d9d65f2 | 9e549ee54faa8b037f90eac8ecb36f853e460e5e | /quiz.py | 487aec0b5842d9ac80fc5c7efaacddddf7c41cd2 | [
"MIT"
] | permissive | aitoehigie/britecore_flask | e8df68e71dd0eac980a7de8c0f20b5a5a16979fe | eef1873dbe6b2cc21f770bc6dec783007ae4493b | refs/heads/master | 2022-12-09T22:07:45.930238 | 2019-05-15T04:10:37 | 2019-05-15T04:10:37 | 177,354,667 | 0 | 0 | MIT | 2022-12-08T04:54:09 | 2019-03-24T00:38:20 | Python | UTF-8 | Python | false | false | 483 | py | from cryptography.fernet import Fernet
key = 'TluxwB3fV_GWuLkR1_BzGs1Zk90TYAuhNMZP_0q4WyM='
# Oh no! The code is going over the edge! What are you going to do?
message = b'gAAAAABcmZNjB1CqmvCWcO8ByvVTKas3rKh5Py67Al5tOma1TtHeQBQJi55SmpN86uNXNFX7_clMNLWCB5HzqkcOkkz3V7KzxeFo7q4ZNIGsa4tb82l0sVTcS2zCW7-Rk7kcnkLl_Jsw2F98JMpnnLa4ZrlZqPoteBSgjBr7vmk4Z5GXIpAqPU5TBk2K51m2RE47HogruLsO'
def main():
f = Fernet(key)
print(f.decrypt(message))
if __name__ == "__main__":
main()
| [
"aitoehigie@gmail.com"
] | aitoehigie@gmail.com |
0ef156a76ca296a030eba1c782df633bae7c1ba4 | 7bd3c35070d40724ab21e83b4d3f5ba39e455818 | /E2 - Second/eshop/admin.py | 10f96f6813b97567fbb8ce70c6afe590ef33d481 | [] | no_license | shatishdesai202/Django-Project-Practice | 9433004de6fd72dd0cd56cb4ff7770ecded6a054 | f66ee507fcf959d148627c1c2f5d587b10adc996 | refs/heads/main | 2023-03-12T17:14:15.823285 | 2021-03-07T05:32:07 | 2021-03-07T05:32:07 | 345,263,312 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 653 | py | from django.contrib import admin
from .models import Category, Product, Placeorder, Comment
# Register your models here.
@admin.register(Category)
class AdminCategory(admin.ModelAdmin):
list_display = ['id', 'c_name']
@admin.register(Product)
class AdminProduct(admin.ModelAdmin):
list_display = ['id','category', 'p_name', 'price', 'desc', 'timestamp']
@admin.register(Placeorder)
class AdminPlaceorder(admin.ModelAdmin):
list_display = ['id','firstname', 'last_name', 'email', 'address', 'city', 'state', 'pin', 'item', 'qty', 'customer', ]
@admin.register(Comment)
class AdminComment(admin.ModelAdmin):
list_display = ['comment'] | [
"sdondjango@gmail.com"
] | sdondjango@gmail.com |
c520a22448096a9aec2f2b0a15c922d05bc96916 | 77932fb1dde97eafe995b960d62036e2511465d2 | /shoppingsite.py | cab310f0cc96165443d84cfc2f906044fadb1b0b | [] | no_license | lsylk/shopping-site | f2dc859a3f07ca5855246f3573c7e13c1885c3ba | f29020824bec51e71c3ba6197f8420d149a4bd16 | refs/heads/master | 2020-12-24T18:42:20.858411 | 2016-04-22T00:14:44 | 2016-04-22T00:14:44 | 56,802,374 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,814 | py | """Ubermelon shopping application Flask server.
Provides web interface for browsing melons, seeing detail about a melon, and
put melons in a shopping cart.
Authors: Joel Burton, Christian Fernandez, Meggie Mahnken.
"""
from flask import Flask, render_template, redirect, flash, session, request
import jinja2
import melons
app = Flask(__name__)
# Need to use Flask sessioning features
app.secret_key = 'this-should-be-something-unguessable'
# Normally, if you refer to an undefined variable in a Jinja template,
# Jinja silently ignores this. This makes debugging difficult, so we'll
# set an attribute of the Jinja environment that says to make this an
# error.
app.jinja_env.undefined = jinja2.StrictUndefined
@app.route("/")
def index():
"""Return homepage."""
return render_template("homepage.html")
@app.route("/melons")
def list_melons():
"""Return page showing all the melons ubermelon has to offer"""
melon_list = melons.get_all()
return render_template("all_melons.html",
melon_list=melon_list)
@app.route("/melon/<int:melon_id>")
def show_melon(melon_id):
"""Return page showing the details of a given melon.
Show all info about a melon. Also, provide a button to buy that melon.
"""
melon = melons.get_by_id(melon_id)
print melon
return render_template("melon_details.html",
display_melon=melon)
@app.route("/cart")
def shopping_cart():
"""Display content of shopping cart."""
# TODO: Display the contents of the shopping cart.
# The logic here will be something like:
#
# - get the list-of-ids-of-melons from the session cart
# - loop over this list:
# - keep track of information about melon types in the cart
# - keep track of the total amt ordered for a melon-type
# - keep track of the total amt of the entire order
# - hand to the template the total order cost and the list of melon types
for id in session.values():
melon_info = melons.get_by_id(id)
# melon = melons.get_by_id(melon_id)
# raise Exception("Fix me")
return render_template("cart.html")
@app.route("/add_to_cart/<int:id>")
def add_to_cart(id):
"""Add a melon to cart and redirect to shopping cart page.
When a melon is added to the cart, redirect browser to the shopping cart
page and display a confirmation message: 'Successfully added to cart'.
"""
# TODO: Finish shopping cart functionality
# The logic here should be something like:
#
# - add the id of the melon they bought to the cart in the session
session.setdefault("cart", []).append(str(id)) # If cart is not a key in session, then create that key with an empty list value. Everytime we add something new to the cart, append the id to the list.
orders = ", ".join(session["cart"]) # Items in list must be returned back as string.
flash("Successfully added to cart!")
return redirect("/cart")
@app.route("/login", methods=["GET"])
def show_login():
"""Show login form."""
return render_template("login.html")
@app.route("/login", methods=["POST"])
def process_login():
"""Log user into site.
Find the user's login credentials located in the 'request.form'
dictionary, look up the user, and store them in the session.
"""
# TODO: Need to implement this!
return "Oops! This needs to be implemented"
@app.route("/checkout")
def checkout():
"""Checkout customer, process payment, and ship melons."""
# For now, we'll just provide a warning. Completing this is beyond the
# scope of this exercise.
flash("Sorry! Checkout will be implemented in a future version.")
return redirect("/melons")
if __name__ == "__main__":
app.run(debug=True)
| [
"info@hackbrightacademy.com"
] | info@hackbrightacademy.com |
4e93574da1da48b3fd89023b214a51f62f586e7a | b1605b9fcc04adb8d8b095104b1e0cb5d7e39fbf | /scienceApp/views.py | 5a2e37086c344767947a9baee88500bf8ae29eef | [] | no_license | ZLS1803/hengda | 4f022c868590d0b2cd2cc644d9cabd7a7aa4a1f5 | 8f3b801353932528c499a0ae2d40df08fdd44214 | refs/heads/main | 2023-07-19T11:37:40.898445 | 2021-09-12T02:41:18 | 2021-09-12T02:41:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 141 | py | from django.shortcuts import render
# Create your views here.
def science(request):
return render(request, 'scienceApp/science.html')
| [
"email"
] | email |
b05937c4e6edd1ec39ddc284c0ffb6a77f4e3951 | aa52a6a6a9db8ccc35a36d0ec42d06dc8070d1f6 | /migrations/versions/e716104ed757_init.py | 7449cda2864b787936d10bc387c0578b6f83d19f | [
"MIT"
] | permissive | Kel0/aiogram_bot_template | 38a9e64b7010eec61e017442cceecb4adb1020c1 | c5cd2129c99824efe122a951ada27936f3a35b0a | refs/heads/main | 2023-04-12T05:56:58.086964 | 2021-05-19T10:48:03 | 2021-05-19T10:48:03 | 368,808,317 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,111 | py | """init
Revision ID: e716104ed757
Revises:
Create Date: 2021-05-19 15:59:22.801097
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e716104ed757'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('users',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('updated_at', sa.TIMESTAMP(), nullable=False),
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('telegram_id', sa.Integer(), nullable=True),
sa.Column('username', sa.String(length=255), nullable=True),
sa.Column('role', sa.String(length=30), nullable=True),
sa.Column('active', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('telegram_id'),
sa.UniqueConstraint('telegram_id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('users')
# ### end Alembic commands ###
| [
"rozovdima123@gmail.com"
] | rozovdima123@gmail.com |
b9e13cfd823bb8d0a72c4ba68afa6e6ccf12dda8 | f82757475ea13965581c2147ff57123b361c5d62 | /gi-stubs/repository/ICalGLib/_Geo.py | b67491959d92d351635e23d3c9e4cdfcea4d5f81 | [] | no_license | ttys3/pygobject-stubs | 9b15d1b473db06f47e5ffba5ad0a31d6d1becb57 | d0e6e93399212aada4386d2ce80344eb9a31db48 | refs/heads/master | 2022-09-23T12:58:44.526554 | 2020-06-06T04:15:00 | 2020-06-06T04:15:00 | 269,693,287 | 8 | 2 | null | 2020-06-05T15:57:54 | 2020-06-05T15:57:54 | null | UTF-8 | Python | false | false | 4,443 | py | # encoding: utf-8
# module gi.repository.ICalGLib
# from /usr/lib64/girepository-1.0/ICalGLib-3.0.typelib
# by generator 1.147
"""
An object which wraps an introspection typelib.
This wrapping creates a python module like representation of the typelib
using gi repository as a foundation. Accessing attributes of the module
will dynamically pull them in and create wrappers for the members.
These members are then cached on this introspection module.
"""
# imports
import gi as __gi
import gi.overrides.GObject as __gi_overrides_GObject
import gobject as __gobject
class _Geo(__gi.Struct):
"""
:Constructors:
::
_Geo()
"""
def __delattr__(self, *args, **kwargs): # real signature unknown
""" Implement delattr(self, name). """
pass
def __dir__(self, *args, **kwargs): # real signature unknown
""" Default dir() implementation. """
pass
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __format__(self, *args, **kwargs): # real signature unknown
""" Default object formatter. """
pass
def __getattribute__(self, *args, **kwargs): # real signature unknown
""" Return getattr(self, name). """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __hash__(self, *args, **kwargs): # real signature unknown
""" Return hash(self). """
pass
def __init_subclass__(self, *args, **kwargs): # real signature unknown
"""
This method is called when a class is subclassed.
The default implementation does nothing. It may be
overridden to extend subclasses.
"""
pass
def __init__(self): # real signature unknown; restored from __doc__
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
def __reduce_ex__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
def __setattr__(self, *args, **kwargs): # real signature unknown
""" Implement setattr(self, name, value). """
pass
def __sizeof__(self, *args, **kwargs): # real signature unknown
""" Size of object in memory, in bytes. """
pass
def __str__(self, *args, **kwargs): # real signature unknown
""" Return str(self). """
pass
def __subclasshook__(self, *args, **kwargs): # real signature unknown
"""
Abstract classes can override this to customize issubclass().
This is invoked early on by abc.ABCMeta.__subclasscheck__().
It should return True, False or NotImplemented. If it returns
NotImplemented, the normal algorithm is used. Otherwise, it
overrides the normal algorithm (and the outcome is cached).
"""
pass
def __weakref__(self, *args, **kwargs): # real signature unknown
pass
parent = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__class__ = None # (!) real value is "<class 'gi.types.StructMeta'>"
__dict__ = None # (!) real value is "mappingproxy({'__info__': StructInfo(_Geo), '__module__': 'gi.repository.ICalGLib', '__gtype__': <GType void (4)>, '__dict__': <attribute '__dict__' of '_Geo' objects>, '__weakref__': <attribute '__weakref__' of '_Geo' objects>, '__doc__': None, 'parent': <property object at 0x7f1352075ae0>})"
__gtype__ = None # (!) real value is '<GType void (4)>'
__info__ = StructInfo(_Geo)
| [
"ttys3@outlook.com"
] | ttys3@outlook.com |
cd071f5e54a2a2b4f3822a91157091fd742e6b2c | 42674d7355d852e6ec7071830bb87d781ab63ad3 | /bitmovin/services/inputs/__init__.py | 368733a01bf8e226d3145dce78bd979929481b5f | [
"Unlicense"
] | permissive | bitmovin/bitmovin-python | 57b1eb5deb7e38f3079e0ded546ec762753c3132 | d183718d640117dd75141da261901dc2f60433b0 | refs/heads/master | 2023-07-11T02:40:59.277881 | 2020-01-28T14:49:15 | 2020-01-28T14:49:15 | 72,857,798 | 46 | 27 | Unlicense | 2019-12-17T13:59:51 | 2016-11-04T15:01:56 | Python | UTF-8 | Python | false | false | 40 | py | from .input_service import InputService
| [
"dominic.miglar@netunix.at"
] | dominic.miglar@netunix.at |
af898e4d962f53afbb60cce12d3671b58739fb41 | 50ec7dde190bfa4671a1a6db307375efb607b05b | /day04/part2.py | 9c73c10ae2a100e8a9793690047044d10e4324ed | [
"MIT"
] | permissive | mtn/advent15 | 10a07f1bbc27c90405504cfaf0ac82f7731101e4 | b23bcf5761363596336d5361218c52db0b078793 | refs/heads/master | 2021-07-05T12:47:41.302328 | 2019-01-03T03:28:31 | 2019-01-03T03:28:31 | 163,866,552 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py | #!/usr/bin/env python3
import hashlib
inp = "yzbqklnj"
i = 0
while True:
m = hashlib.md5()
m.update(str.encode("{}{}".format(inp, i)))
if m.hexdigest()[:6] == "000000":
print(i)
break
i += 1
| [
"michaeltnoronha@gmail.com"
] | michaeltnoronha@gmail.com |
a1463bc23152746dc7b8f4ce3c9f84b5aad9a31e | 96dcea595e7c16cec07b3f649afd65f3660a0bad | /tests/components/devolo_home_network/test_sensor.py | 230457f56173898a1cd34f6f92a7ead81b8fc354 | [
"Apache-2.0"
] | permissive | home-assistant/core | 3455eac2e9d925c92d30178643b1aaccf3a6484f | 80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743 | refs/heads/dev | 2023-08-31T15:41:06.299469 | 2023-08-31T14:50:53 | 2023-08-31T14:50:53 | 12,888,993 | 35,501 | 20,617 | Apache-2.0 | 2023-09-14T21:50:15 | 2013-09-17T07:29:48 | Python | UTF-8 | Python | false | false | 3,353 | py | """Tests for the devolo Home Network sensors."""
from datetime import timedelta
from unittest.mock import AsyncMock
from devolo_plc_api.exceptions.device import DeviceUnavailable
from freezegun.api import FrozenDateTimeFactory
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.devolo_home_network.const import (
LONG_UPDATE_INTERVAL,
SHORT_UPDATE_INTERVAL,
)
from homeassistant.components.sensor import DOMAIN
from homeassistant.const import STATE_UNAVAILABLE
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import configure_integration
from .mock import MockDevice
from tests.common import async_fire_time_changed
@pytest.mark.usefixtures("mock_device")
async def test_sensor_setup(hass: HomeAssistant) -> None:
"""Test default setup of the sensor component."""
entry = configure_integration(hass)
device_name = entry.title.replace(" ", "_").lower()
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert hass.states.get(f"{DOMAIN}.{device_name}_connected_wifi_clients") is not None
assert hass.states.get(f"{DOMAIN}.{device_name}_connected_plc_devices") is None
assert hass.states.get(f"{DOMAIN}.{device_name}_neighboring_wifi_networks") is None
await hass.config_entries.async_unload(entry.entry_id)
@pytest.mark.parametrize(
("name", "get_method", "interval"),
[
[
"connected_wifi_clients",
"async_get_wifi_connected_station",
SHORT_UPDATE_INTERVAL,
],
[
"neighboring_wifi_networks",
"async_get_wifi_neighbor_access_points",
LONG_UPDATE_INTERVAL,
],
[
"connected_plc_devices",
"async_get_network_overview",
LONG_UPDATE_INTERVAL,
],
],
)
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
async def test_sensor(
hass: HomeAssistant,
mock_device: MockDevice,
entity_registry: er.EntityRegistry,
freezer: FrozenDateTimeFactory,
snapshot: SnapshotAssertion,
name: str,
get_method: str,
interval: timedelta,
) -> None:
"""Test state change of a sensor device."""
entry = configure_integration(hass)
device_name = entry.title.replace(" ", "_").lower()
state_key = f"{DOMAIN}.{device_name}_{name}"
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert hass.states.get(state_key) == snapshot
assert entity_registry.async_get(state_key) == snapshot
# Emulate device failure
setattr(mock_device.device, get_method, AsyncMock(side_effect=DeviceUnavailable))
setattr(mock_device.plcnet, get_method, AsyncMock(side_effect=DeviceUnavailable))
freezer.tick(interval)
async_fire_time_changed(hass)
await hass.async_block_till_done()
state = hass.states.get(state_key)
assert state is not None
assert state.state == STATE_UNAVAILABLE
# Emulate state change
mock_device.reset()
freezer.tick(interval)
async_fire_time_changed(hass)
await hass.async_block_till_done()
state = hass.states.get(state_key)
assert state is not None
assert state.state == "1"
await hass.config_entries.async_unload(entry.entry_id)
| [
"noreply@github.com"
] | home-assistant.noreply@github.com |
e0deffcdb6731bbc6ad4f3559137ab9268a241e2 | 27e890f900bd4bfb2e66f4eab85bc381cf4d5d3f | /tests/unit/modules/storage/hpe3par/test_ss_3par_cpg.py | b94850aa094183ae5e2506e2174e24efe8423f3b | [] | no_license | coll-test/notstdlib.moveitallout | eb33a560070bbded5032385d0aea2f3cf60e690b | 0987f099b783c6cf977db9233e1c3d9efcbcb3c7 | refs/heads/master | 2020-12-19T22:28:33.369557 | 2020-01-23T18:51:26 | 2020-01-23T18:51:26 | 235,865,139 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,684 | py | # Copyright: (c) 2018, Hewlett Packard Enterprise Development LP
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
import mock
import sys
sys.modules['hpe3par_sdk'] = mock.Mock()
sys.modules['hpe3par_sdk.client'] = mock.Mock()
sys.modules['hpe3parclient'] = mock.Mock()
sys.modules['hpe3parclient.exceptions'] = mock.Mock()
from ansible_collections.notstdlib.moveitallout.plugins.modules import ss_3par_cpg
from ansible_collections.notstdlib.moveitallout.plugins.module_utils.storage.hpe3par import hpe3par
@mock.patch('ansible_collections.notstdlib.moveitallout.plugins.modules.ss_3par_cpg.client')
@mock.patch('ansible_collections.notstdlib.moveitallout.plugins.modules.ss_3par_cpg.AnsibleModule')
@mock.patch('ansible_collections.notstdlib.moveitallout.plugins.modules.ss_3par_cpg.create_cpg')
def test_module_args(mock_create_cpg, mock_module, mock_client):
"""
hpe3par CPG - test module arguments
"""
PARAMS_FOR_PRESENT = {
'storage_system_ip': '192.168.0.1',
'storage_system_username': 'USER',
'storage_system_password': 'PASS',
'cpg_name': 'test_cpg',
'domain': 'test_domain',
'growth_increment': 32768,
'growth_increment_unit': 'MiB',
'growth_limit': 32768,
'growth_limit_unit': 'MiB',
'growth_warning': 32768,
'growth_warning_unit': 'MiB',
'raid_type': 'R6',
'set_size': 8,
'high_availability': 'MAG',
'disk_type': 'FC',
'state': 'present',
'secure': False
}
mock_module.params = PARAMS_FOR_PRESENT
mock_module.return_value = mock_module
mock_client.HPE3ParClient.login.return_value = True
mock_create_cpg.return_value = (True, True, "Created CPG successfully.")
ss_3par_cpg.main()
mock_module.assert_called_with(
argument_spec=hpe3par.cpg_argument_spec(),
required_together=[['raid_type', 'set_size']])
@mock.patch('ansible_collections.notstdlib.moveitallout.plugins.modules.ss_3par_cpg.client')
@mock.patch('ansible_collections.notstdlib.moveitallout.plugins.modules.ss_3par_cpg.AnsibleModule')
@mock.patch('ansible_collections.notstdlib.moveitallout.plugins.modules.ss_3par_cpg.create_cpg')
def test_main_exit_functionality_present_success_without_issue_attr_dict(mock_create_cpg, mock_module, mock_client):
"""
hpe3par flash cache - success check
"""
PARAMS_FOR_PRESENT = {
'storage_system_ip': '192.168.0.1',
'storage_system_name': '3PAR',
'storage_system_username': 'USER',
'storage_system_password': 'PASS',
'cpg_name': 'test_cpg',
'domain': 'test_domain',
'growth_increment': 32768,
'growth_increment_unit': 'MiB',
'growth_limit': 32768,
'growth_limit_unit': 'MiB',
'growth_warning': 32768,
'growth_warning_unit': 'MiB',
'raid_type': 'R6',
'set_size': 8,
'high_availability': 'MAG',
'disk_type': 'FC',
'state': 'present',
'secure': False
}
# This creates a instance of the AnsibleModule mock.
mock_module.params = PARAMS_FOR_PRESENT
mock_module.return_value = mock_module
instance = mock_module.return_value
mock_client.HPE3ParClient.login.return_value = True
mock_create_cpg.return_value = (
True, True, "Created CPG successfully.")
ss_3par_cpg.main()
# AnsibleModule.exit_json should be called
instance.exit_json.assert_called_with(
changed=True, msg="Created CPG successfully.")
# AnsibleModule.fail_json should not be called
assert instance.fail_json.call_count == 0
@mock.patch('ansible_collections.notstdlib.moveitallout.plugins.modules.ss_3par_cpg.client')
@mock.patch('ansible_collections.notstdlib.moveitallout.plugins.modules.ss_3par_cpg.AnsibleModule')
@mock.patch('ansible_collections.notstdlib.moveitallout.plugins.modules.ss_3par_cpg.delete_cpg')
def test_main_exit_functionality_absent_success_without_issue_attr_dict(mock_delete_cpg, mock_module, mock_client):
"""
hpe3par flash cache - success check
"""
PARAMS_FOR_DELETE = {
'storage_system_ip': '192.168.0.1',
'storage_system_name': '3PAR',
'storage_system_username': 'USER',
'storage_system_password': 'PASS',
'cpg_name': 'test_cpg',
'domain': None,
'growth_increment': None,
'growth_increment_unit': None,
'growth_limit': None,
'growth_limit_unit': None,
'growth_warning': None,
'growth_warning_unit': None,
'raid_type': None,
'set_size': None,
'high_availability': None,
'disk_type': None,
'state': 'absent',
'secure': False
}
# This creates a instance of the AnsibleModule mock.
mock_module.params = PARAMS_FOR_DELETE
mock_module.return_value = mock_module
instance = mock_module.return_value
mock_delete_cpg.return_value = (
True, True, "Deleted CPG test_cpg successfully.")
mock_client.HPE3ParClient.login.return_value = True
ss_3par_cpg.main()
# AnsibleModule.exit_json should be called
instance.exit_json.assert_called_with(
changed=True, msg="Deleted CPG test_cpg successfully.")
# AnsibleModule.fail_json should not be called
assert instance.fail_json.call_count == 0
def test_convert_to_binary_multiple():
assert hpe3par.convert_to_binary_multiple(None) == -1
assert hpe3par.convert_to_binary_multiple('-1.0 MiB') == -1
assert hpe3par.convert_to_binary_multiple('-1.0GiB') == -1
assert hpe3par.convert_to_binary_multiple('1.0 MiB') == 1
assert hpe3par.convert_to_binary_multiple('1.5GiB') == 1.5 * 1024
assert hpe3par.convert_to_binary_multiple('1.5 TiB') == 1.5 * 1024 * 1024
assert hpe3par.convert_to_binary_multiple(' 1.5 TiB ') == 1.5 * 1024 * 1024
@mock.patch('ansible_collections.notstdlib.moveitallout.plugins.modules.ss_3par_cpg.client')
def test_validate_set_size(mock_client):
mock_client.HPE3ParClient.RAID_MAP = {'R0': {'raid_value': 1, 'set_sizes': [1]},
'R1': {'raid_value': 2, 'set_sizes': [2, 3, 4]},
'R5': {'raid_value': 3, 'set_sizes': [3, 4, 5, 6, 7, 8, 9]},
'R6': {'raid_value': 4, 'set_sizes': [6, 8, 10, 12, 16]}
}
raid_type = 'R0'
set_size = 1
assert ss_3par_cpg.validate_set_size(raid_type, set_size)
set_size = 2
assert not ss_3par_cpg.validate_set_size(raid_type, set_size)
raid_type = None
assert not ss_3par_cpg.validate_set_size(raid_type, set_size)
@mock.patch('ansible_collections.notstdlib.moveitallout.plugins.modules.ss_3par_cpg.client')
def test_cpg_ldlayout_map(mock_client):
mock_client.HPE3ParClient.PORT = 1
mock_client.HPE3ParClient.RAID_MAP = {'R0': {'raid_value': 1, 'set_sizes': [1]},
'R1': {'raid_value': 2, 'set_sizes': [2, 3, 4]},
'R5': {'raid_value': 3, 'set_sizes': [3, 4, 5, 6, 7, 8, 9]},
'R6': {'raid_value': 4, 'set_sizes': [6, 8, 10, 12, 16]}
}
ldlayout_dict = {'RAIDType': 'R6', 'HA': 'PORT'}
assert ss_3par_cpg.cpg_ldlayout_map(ldlayout_dict) == {
'RAIDType': 4, 'HA': 1}
@mock.patch('ansible_collections.notstdlib.moveitallout.plugins.modules.ss_3par_cpg.client')
def test_create_cpg(mock_client):
ss_3par_cpg.validate_set_size = mock.Mock(return_value=True)
ss_3par_cpg.cpg_ldlayout_map = mock.Mock(
return_value={'RAIDType': 4, 'HA': 1})
mock_client.HPE3ParClient.login.return_value = True
mock_client.HPE3ParClient.cpgExists.return_value = False
mock_client.HPE3ParClient.FC = 1
mock_client.HPE3ParClient.createCPG.return_value = True
assert ss_3par_cpg.create_cpg(mock_client.HPE3ParClient,
'test_cpg',
'test_domain',
'32768 MiB',
'32768 MiB',
'32768 MiB',
'R6',
8,
'MAG',
'FC'
) == (True, True, "Created CPG %s successfully." % 'test_cpg')
mock_client.HPE3ParClient.cpgExists.return_value = True
assert ss_3par_cpg.create_cpg(mock_client.HPE3ParClient,
'test_cpg',
'test_domain',
'32768.0 MiB',
'32768.0 MiB',
'32768.0 MiB',
'R6',
8,
'MAG',
'FC'
) == (True, False, 'CPG already present')
ss_3par_cpg.validate_set_size = mock.Mock(return_value=False)
assert ss_3par_cpg.create_cpg(mock_client.HPE3ParClient,
'test_cpg',
'test_domain',
'32768.0 MiB',
'32768 MiB',
'32768.0 MiB',
'R6',
3,
'MAG',
'FC'
) == (False, False, 'Set size 3 not part of RAID set R6')
@mock.patch('ansible_collections.notstdlib.moveitallout.plugins.modules.ss_3par_cpg.client')
def test_delete_cpg(mock_client):
mock_client.HPE3ParClient.login.return_value = True
mock_client.HPE3ParClient.cpgExists.return_value = True
mock_client.HPE3ParClient.FC = 1
mock_client.HPE3ParClient.deleteCPG.return_value = True
assert ss_3par_cpg.delete_cpg(mock_client.HPE3ParClient,
'test_cpg'
) == (True, True, "Deleted CPG %s successfully." % 'test_cpg')
mock_client.HPE3ParClient.cpgExists.return_value = False
assert ss_3par_cpg.delete_cpg(mock_client.HPE3ParClient,
'test_cpg'
) == (True, False, "CPG does not exist")
assert ss_3par_cpg.delete_cpg(mock_client.HPE3ParClient,
None
) == (True, False, "CPG does not exist")
| [
"wk@sydorenko.org.ua"
] | wk@sydorenko.org.ua |
8919c999f822111499f7a48ea5970e4e66bdf2d6 | 13cf11440998376d3b52a49f1e4fb8936c360ac4 | /chainer_chemistry/utils/extend.py | bc1fd94d7cbab82a4c46b3f94174cba64feebd46 | [
"MIT"
] | permissive | k-ishiguro/chainer-chemistry | 87e3db724de0e99042d9585cd4bd5fff38169339 | aec33496def16e76bdfbefa508ba01ab9f79a592 | refs/heads/master | 2021-07-06T22:58:20.127907 | 2019-02-04T02:51:34 | 2019-02-04T02:51:34 | 169,345,375 | 1 | 1 | MIT | 2020-07-30T06:04:13 | 2019-02-06T02:27:39 | Python | UTF-8 | Python | false | false | 3,569 | py | from collections import Iterable
from logging import getLogger
import six
from chainer import cuda
def _to_list(a):
if isinstance(a, Iterable):
a = list(a)
else:
a = [a]
return a
def extend_node(node, out_size, axis=-1, value=0):
"""Extend size of `node` array
For now, this function works same with `extend_array` method,
this is just an alias function.
Args:
node (numpy.ndarray): the array whose `axis` to be extended.
first axis is considered as "batch" axis.
out_size (int): target output size for specified `axis`.
axis (int): node feature axis to be extended.
Default is `axis=-1`, which extends only last axis.
value (int or float): value to be filled for extended place.
Returns (numpy.ndarray): extended `node` array, extended place is filled
with `value`
"""
return extend_arrays_to_size(
node, out_size=out_size, axis=axis, value=value)
def extend_adj(adj, out_size, axis=None, value=0):
"""Extend size of `adj` array
For now, this function only differs default `axis` value from
`extend_array` method, this is an alias function.
Args:
adj (numpy.ndarray): the array whose `axis` to be extended.
first axis is considered as "batch" axis.
out_size (int): target output size for specified `axis`.
axis (list or None): node feature axis to be extended. Default is None,
in this case `axis=[-1, -2]` is used to extend last 2 axes.
value (int or float): value to be filled for extended place.
Returns (numpy.ndarray): extended `adj` array, extended place is filled
with `value`
"""
axis = axis or [-1, -2]
return extend_arrays_to_size(
adj, out_size=out_size, axis=axis, value=value)
def extend_arrays_to_size(arrays, out_size, axis=-1, value=0):
"""Extend size of `arrays` array
Args:
arrays (numpy.ndarray): the array whose `axis` to be extended.
first axis is considered as "batch" axis.
out_size (int): target output size for specified `axis`.
axis (int or list): node feature axis to be extended.
value (int or float): value to be filled for extended place.
Returns (numpy.ndarray): extended array, extended place is filled
with `value`
"""
batch_size = len(arrays)
in_shape = _to_list(arrays[0].shape)
out_shape = [batch_size] + in_shape
axis = _to_list(axis)
for ax in axis:
if ax == 0:
logger = getLogger(__name__)
logger.warning('axis 0 detected, but axis=0 is expected to be '
'batch size dimension.')
if out_shape[ax] > out_size:
raise ValueError(
'current size={} is larger than out_size={} at axis={}'
.format(out_shape[ax], out_size, ax))
out_shape[ax] = out_size
return extend_arrays_to_shape(arrays, out_shape, value=value)
def extend_arrays_to_shape(arrays, out_shape, value=0):
# Ref: `_concat_arrays_with_padding` method in chainer convert.py
# https://github.com/chainer/chainer/blob/master/chainer/dataset/convert.py
xp = cuda.get_array_module(arrays[0])
with cuda.get_device_from_array(arrays[0]):
result = xp.full(out_shape, value, dtype=arrays[0].dtype)
for i in six.moves.range(len(arrays)):
src = arrays[i]
slices = tuple(slice(dim) for dim in src.shape)
result[(i,) + slices] = src
return result
| [
"acc1ssnn9terias@gmail.com"
] | acc1ssnn9terias@gmail.com |
a6e55eee3afeb7a80c9617ab43346f05e6f89ec6 | c6ed9aa97166d4778b89321b580af80c543bacc9 | /algorithms/hashmap.py | 6458bd36a8ddddfd8b2f20c0744f16ae71b00198 | [] | no_license | bradyz/sandbox | 381bcaf2f3719dee142a00858f7062aeff98d1ab | ff90335b918886d5b5956c6c6546dbfde5e7f5b3 | refs/heads/master | 2021-01-23T09:03:54.697325 | 2018-02-27T20:47:48 | 2018-02-27T20:47:48 | 21,292,856 | 10 | 0 | null | 2015-09-03T16:53:15 | 2014-06-28T00:29:18 | Python | UTF-8 | Python | false | false | 43 | py | import collections
def enterDict:
:q
| [
"brady.zhou@utexas.edu"
] | brady.zhou@utexas.edu |
fc20c2db8fe61bc1e4f6569081b063d2bf20d899 | 046df94b4f437b2e30b80d24193fcd5380ee7b54 | /practice/8class_and_object_orinented_programming/test_override.py | ed8de443ce1da2e391a7ce408dd459953a793b13 | [] | no_license | LordBao666/MITLecture6.0001_Introduction_To_CS_Programing_In_Python | 570565a3a931269f47fe15fd83527567a24fc134 | e9fca10ad0226c8620ae36d063c2bc49da114ca4 | refs/heads/master | 2023-04-02T10:40:48.564479 | 2021-04-06T15:19:47 | 2021-04-06T15:19:47 | 344,118,089 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 369 | py | """
@Author : Lord_Bao
@Date : 2021/3/15
"""
class MySupClass(object):
def __init__(self):
pass
def method_to_be_implemented(self):
raise NotImplementedError
class Offspring(MySupClass):
def method_to_be_implemented(self):
pass
def special_method(self):
pass
if __name__ == '__main__':
son = Offspring()
| [
"916900021@qq.com"
] | 916900021@qq.com |
4869c36593cd8c01de13e24e36e3cceefb50893a | 350ecc8259bcad075bd376423335bb41cc8a533e | /edit2.cgi | 8313bbb14d29515c7333953d5ce3cee907eeb7e9 | [] | no_license | CodedQuen/python_begin | 39da66ecc4a77b94a5afbbf0900727c8156b85e1 | 1433c319b5d85520c50aee00dd4b6f21a7e6366a | refs/heads/master | 2022-06-10T10:30:28.807874 | 2020-04-25T03:34:03 | 2020-04-25T03:34:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,170 | cgi | #!/usr/bin/python
print ('Content-type: text/html\n')
import cgitb; cgitb.enable()
import psycopg
conn = psycopg.connect('dbname=foo user=bar')
curs = conn.cursor()
import cgi, sys
form = cgi.FieldStorage()
reply_to = form.getvalue('reply_to')
print ("""
<html>
<head>
<title>Compose Message</title>
</head>
<body>
<h1>Compose Message</h1>
<form action='save.cgi' method='POST'>
""")
subject = ''
if reply_to is not None:
print '<input type="hidden" name="reply_to" value="%s"/>' % reply_to
curs.execute('SELECT subject FROM messages WHERE id = %s' % reply_to)
subject = curs.fetchone()[0]
if not subject.startswith('Re: '):
subject = 'Re: ' + subject
print ()"""
<b>Subject:</b><br />
<input type='text' size='40' name='subject' value='%s' /><br />
<b>Sender:</b><br />
<input type='text' size='40' name='sender' /><br />
<b>Message:</b><br />
<textarea name='text' cols='40' rows='20'></textarea><br />
<input type='submit' value='Save'/>
</form>
<hr />
<a href='main.cgi'>Back to the main page</a>'
</body>
</html>
""" % subject)
| [
"noreply@github.com"
] | CodedQuen.noreply@github.com |
3ea8e542055d9af415f0df4954230bc1069c1696 | 45c142c3e3dc8d3211a86c77385ecfdd10d28fb9 | /dstore/engine/procedures/mi_GetInformationTypes_Ad_pb2.py | d044b357e309012a064a921161ed1a2bf83e91db | [] | no_license | dstore-io/dstore-sdk-python | 945d64995c8892af18fab26c90117245abec64a4 | 8494d12ac77c3c3cc6dd59026407ef514ad179fc | refs/heads/master | 2020-06-14T13:07:08.181547 | 2017-01-26T11:19:39 | 2017-01-26T11:19:39 | 75,177,794 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 7,421 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: dstore/engine/procedures/mi_GetInformationTypes_Ad.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from dstore import values_pb2 as dstore_dot_values__pb2
from dstore.engine import engine_pb2 as dstore_dot_engine_dot_engine__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='dstore/engine/procedures/mi_GetInformationTypes_Ad.proto',
package='dstore.engine.mi_GetInformationTypes_Ad',
syntax='proto3',
serialized_pb=_b('\n8dstore/engine/procedures/mi_GetInformationTypes_Ad.proto\x12\'dstore.engine.mi_GetInformationTypes_Ad\x1a\x13\x64store/values.proto\x1a\x1a\x64store/engine/engine.proto\"\x0c\n\nParameters\"\xbc\x02\n\x08Response\x12\x38\n\x10meta_information\x18\x02 \x03(\x0b\x32\x1e.dstore.engine.MetaInformation\x12\'\n\x07message\x18\x03 \x03(\x0b\x32\x16.dstore.engine.Message\x12\x42\n\x03row\x18\x04 \x03(\x0b\x32\x35.dstore.engine.mi_GetInformationTypes_Ad.Response.Row\x1a\x88\x01\n\x03Row\x12\x0f\n\x06row_id\x18\x90N \x01(\x05\x12\x39\n\x13information_type_id\x18\x91N \x01(\x0b\x32\x1b.dstore.values.IntegerValue\x12\x35\n\x10information_type\x18\x92N \x01(\x0b\x32\x1a.dstore.values.StringValueBZ\n\x1bio.dstore.engine.proceduresZ;gosdk.dstore.de/engine/procedures/mi_GetInformationTypes_Adb\x06proto3')
,
dependencies=[dstore_dot_values__pb2.DESCRIPTOR,dstore_dot_engine_dot_engine__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_PARAMETERS = _descriptor.Descriptor(
name='Parameters',
full_name='dstore.engine.mi_GetInformationTypes_Ad.Parameters',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=150,
serialized_end=162,
)
_RESPONSE_ROW = _descriptor.Descriptor(
name='Row',
full_name='dstore.engine.mi_GetInformationTypes_Ad.Response.Row',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='row_id', full_name='dstore.engine.mi_GetInformationTypes_Ad.Response.Row.row_id', index=0,
number=10000, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='information_type_id', full_name='dstore.engine.mi_GetInformationTypes_Ad.Response.Row.information_type_id', index=1,
number=10001, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='information_type', full_name='dstore.engine.mi_GetInformationTypes_Ad.Response.Row.information_type', index=2,
number=10002, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=345,
serialized_end=481,
)
_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='dstore.engine.mi_GetInformationTypes_Ad.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='meta_information', full_name='dstore.engine.mi_GetInformationTypes_Ad.Response.meta_information', index=0,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='message', full_name='dstore.engine.mi_GetInformationTypes_Ad.Response.message', index=1,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='row', full_name='dstore.engine.mi_GetInformationTypes_Ad.Response.row', index=2,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_RESPONSE_ROW, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=165,
serialized_end=481,
)
_RESPONSE_ROW.fields_by_name['information_type_id'].message_type = dstore_dot_values__pb2._INTEGERVALUE
_RESPONSE_ROW.fields_by_name['information_type'].message_type = dstore_dot_values__pb2._STRINGVALUE
_RESPONSE_ROW.containing_type = _RESPONSE
_RESPONSE.fields_by_name['meta_information'].message_type = dstore_dot_engine_dot_engine__pb2._METAINFORMATION
_RESPONSE.fields_by_name['message'].message_type = dstore_dot_engine_dot_engine__pb2._MESSAGE
_RESPONSE.fields_by_name['row'].message_type = _RESPONSE_ROW
DESCRIPTOR.message_types_by_name['Parameters'] = _PARAMETERS
DESCRIPTOR.message_types_by_name['Response'] = _RESPONSE
Parameters = _reflection.GeneratedProtocolMessageType('Parameters', (_message.Message,), dict(
DESCRIPTOR = _PARAMETERS,
__module__ = 'dstore.engine.procedures.mi_GetInformationTypes_Ad_pb2'
# @@protoc_insertion_point(class_scope:dstore.engine.mi_GetInformationTypes_Ad.Parameters)
))
_sym_db.RegisterMessage(Parameters)
Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), dict(
Row = _reflection.GeneratedProtocolMessageType('Row', (_message.Message,), dict(
DESCRIPTOR = _RESPONSE_ROW,
__module__ = 'dstore.engine.procedures.mi_GetInformationTypes_Ad_pb2'
# @@protoc_insertion_point(class_scope:dstore.engine.mi_GetInformationTypes_Ad.Response.Row)
))
,
DESCRIPTOR = _RESPONSE,
__module__ = 'dstore.engine.procedures.mi_GetInformationTypes_Ad_pb2'
# @@protoc_insertion_point(class_scope:dstore.engine.mi_GetInformationTypes_Ad.Response)
))
_sym_db.RegisterMessage(Response)
_sym_db.RegisterMessage(Response.Row)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\033io.dstore.engine.proceduresZ;gosdk.dstore.de/engine/procedures/mi_GetInformationTypes_Ad'))
import grpc
from grpc.beta import implementations as beta_implementations
from grpc.beta import interfaces as beta_interfaces
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
# @@protoc_insertion_point(module_scope)
| [
"b.dolkemeier@dbap.de"
] | b.dolkemeier@dbap.de |
6b4038c50a12b01cae8895691a41ee3c55405e9e | 6e8f2e28479566dbaa338300b2d61f784ff83f97 | /.history/code/datasetup_20210414101740.py | 1b100c5fafb593a7c48ab039e9ecf27287a49d16 | [] | no_license | eeng5/CV-final-project | 55a7d736f75602858233ebc380c4e1d67ab2b866 | 580e28819560b86f6974959efb1d31ef138198fc | refs/heads/main | 2023-04-09T21:28:21.531293 | 2021-04-21T19:57:22 | 2021-04-21T19:57:22 | 352,703,734 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,783 | py | from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import pandas as pd
import cv2
import os
import glob
from pathlib import Path
def cleanTestDirs():
emotions = []
for f in Path('/Users/Natalie/Desktop/cs1430/CV-final-project/data/test/angry').glob('*.jpg'):
try:
#f.unlink()
os.remove(f)
except OSError as e:
print("Error: %s : %s" % (f, e.strerror))
def cleanTrainDirs():
for f in Path('/Users/Natalie/Desktop/cs1430/CV-final-project/data/train/angry').glob('*.jpg'):
try:
os.remove(f)
except OSError as e:
print("Error: %s : %s" % (f, e.strerror))
def cleanAll():
cleanTestDirs()
cleanTrainDirs()
def createPixelArray(arr):
arr = list(map(int, arr.split()))
array = np.array(arr, dtype=np.uint8)
array = array.reshape((48, 48))
return array
def equalize_hist(img):
img = cv2.equalizeHist(img)
return img
def showImages(imgs):
_, axs = plt.subplots(1, len(imgs), figsize=(20, 20))
axs = axs.flatten()
for img, ax in zip(imgs, axs):
ax.imshow(img,cmap=plt.get_cmap('gray'))
plt.show()
def augmentIMG(img, task):
imgs = [img]
img1 = equalize_hist(img)
imgs.append(img1)
if(task == 3):
img2 = cv2.bilateralFilter(img1, d=9, sigmaColor=75, sigmaSpace=75)
imgs.append(img2)
img6 = cv2.flip(img, 1) # flip horizontally
imgs.append(img6)
return imgs
def saveIMG(arr, num, folderLoc):
im = Image.fromarray(arr)
filename = folderLoc + "image_"+ num+".jpg"
im.save(filename)
def createTrain(emotion_dict, task):
df = pd.read_csv('/Users/Natalie/Desktop/cs1430/CV-final-project/data/icml_face_data.csv') # CHANGE ME
base_filename = "/Users/Natalie/Desktop/cs1430/CV-final-project/data/train/" # CHANGE ME
for index, row in df.iterrows():
if (row[' Usage'] == "Training"):
px = row[' pixels']
emot = int(row['emotion'])
emot_loc = emotion_dict[emot]
filename = base_filename + emot_loc
img = createPixelArray(px)
img_arr = augmentIMG(img, task)
idx = 0
for i in img_arr:
num = str(index) + "_" + str(idx)
idx +=1
saveIMG(i, num, filename)
def createTest(emotion_dict , task):
df = pd.read_csv('/Users/Natalie/Desktop/cs1430/CV-final-project/data/icml_face_data.csv') # CHANGE ME
base_filename = "/Users/Natalie/Desktop/cs1430/CV-final-project/data/test/" # CHANGE ME
for index, row in df.iterrows():
if (row[' Usage'] == "PublicTest"):
px = row[' pixels']
emot = int(row['emotion'])
emot_loc = emotion_dict[emot]
filename = base_filename + emot_loc
img = createPixelArray(px)
img_arr = augmentIMG(img, task)
idx = 0
for i in img_arr:
num = str(index) + "_" + str(idx)
idx +=1
saveIMG(i, num, filename)
def createEmotionDict():
emotionDict = {}
emotionDict[0]="angry/"
emotionDict[1]="disgust/"
emotionDict[2]="fear/"
emotionDict[3]="happy/"
emotionDict[4]="sad/"
emotionDict[5]="surprise/"
emotionDict[6] = "neutral/"
return emotionDict
def createSimpleData():
cleanAll()
print("Cleaning done")
emot_dict = createEmotionDict()
createTrain(emot_dict, 1)
print("Training done")
createTest(emot_dict, 1)
print("Testing done")
def createComplexData():
cleanAll()
emot_dict = createEmotionDict()
createTrain(emot_dict, 3)
createTest(emot_dict, 3)
def main():
emot_dict = createEmotionDict()
if __name__ == '__main__':
main() | [
"natalie_rshaidat@brown.edu"
] | natalie_rshaidat@brown.edu |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.