hexsha stringlengths 40 40 | size int64 1 1.03M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 239 | max_stars_repo_name stringlengths 5 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 239 | max_issues_repo_name stringlengths 5 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 239 | max_forks_repo_name stringlengths 5 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.03M | avg_line_length float64 1 958k | max_line_length int64 1 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ace3316bca0f7ec22af0c6b706b19979e2197fe3 | 1,450 | py | Python | github/methods/activity/events/list_user_received_events.py | appheap/PyGithub | 2442a2512a708c6463f81d8398fd91090e134df1 | [
"MIT"
] | 1 | 2022-03-16T21:33:37.000Z | 2022-03-16T21:33:37.000Z | github/methods/activity/events/list_user_received_events.py | appheap/PyGithub | 2442a2512a708c6463f81d8398fd91090e134df1 | [
"MIT"
] | null | null | null | github/methods/activity/events/list_user_received_events.py | appheap/PyGithub | 2442a2512a708c6463f81d8398fd91090e134df1 | [
"MIT"
] | 1 | 2022-03-16T21:28:06.000Z | 2022-03-16T21:28:06.000Z | from github.scaffold import Scaffold
from github.types import Response
from github.utils import utils
class ListUserReceivedEvents(Scaffold):
"""
List events received by the authenticated user
"""
def list_user_received_events(
self,
*,
username: str,
per_page: int = 100,
page: int = 1,
) -> 'Response':
"""
These are events that you've received by watching repos and following users. If you are authenticated as the given user, you will see private events. Otherwise, you'll only see public events.
:param username: Username of the user
:param per_page:
Results per page (max "100")
Default: "30"
:param page:
Page number of the results to fetch.
Default: "1"
:return: 'Response'
"""
response = self.get_with_token(
url=f'https://api.github.com/users/{username}/events/public',
params={
'per_page': per_page,
'page': page,
}
)
if response.status_code == 200:
return Response._parse(
response=response,
success=True,
result=utils.parse_events(response.json()),
)
else:
return Response._parse(
response=response,
success=False,
)
| 27.884615 | 199 | 0.538621 |
ace33519e12feb905b5adc588480f674cb07c6d1 | 3,794 | py | Python | applications/RANSApplication/tests/test_RANSApplication.py | HayoungChung/Kratos | 8a996625261ecf2e33923df95e449c0874d6f4f5 | [
"BSD-4-Clause"
] | null | null | null | applications/RANSApplication/tests/test_RANSApplication.py | HayoungChung/Kratos | 8a996625261ecf2e33923df95e449c0874d6f4f5 | [
"BSD-4-Clause"
] | 1 | 2019-12-05T05:33:34.000Z | 2019-12-05T05:33:34.000Z | applications/RANSApplication/tests/test_RANSApplication.py | HayoungChung/Kratos | 8a996625261ecf2e33923df95e449c0874d6f4f5 | [
"BSD-4-Clause"
] | null | null | null | import subprocess
import os.path
# import Kratos
import KratosMultiphysics
import KratosMultiphysics.FluidDynamicsApplication
import KratosMultiphysics.RANSApplication
# Import Kratos "wrapper" for unittests
import KratosMultiphysics.KratosUnittest as KratosUnittest
import KratosMultiphysics.kratos_utilities as kratos_utilities
# Import the tests o test_classes to create the suites
from evm_k_epsilon_tests import EvmKEpsilonTest
from custom_process_tests import CustomProcessTest
import run_cpp_unit_tests
def AssembleTestSuites():
''' Populates the test suites to run.
Populates the test suites to run. At least, it should populate the suites:
"small", "nighlty" and "all"
Return
------
suites: A dictionary of suites
The set of suites with its test_cases added.
'''
suites = KratosUnittest.KratosSuites
# Create a test suite with the selected tests (Small tests):
smallSuite = suites['small']
# Create a test suite with the selected tests plus all small tests
nightSuite = suites['nightly']
nightSuite.addTests(smallSuite)
nightSuite.addTest(CustomProcessTest('testCheckScalarBoundsProcess'))
nightSuite.addTest(CustomProcessTest('testCheckVectorBoundsProcess'))
nightSuite.addTest(CustomProcessTest('testClipScalarVariableProcess'))
nightSuite.addTest(CustomProcessTest('testApplyFlagProcess'))
nightSuite.addTest(CustomProcessTest('testScalarCellCenterAveragingProcess'))
nightSuite.addTest(CustomProcessTest('testVectorCellCenterAveragingProcess'))
nightSuite.addTest(CustomProcessTest('testVectorAlignProcessTangential'))
nightSuite.addTest(CustomProcessTest('testVectorAlignProcessNormal'))
nightSuite.addTest(CustomProcessTest('testWallDistanceCalculationProcess'))
nightSuite.addTest(CustomProcessTest('testLogarithmicYPlusCalculationProcess'))
nightSuite.addTest(CustomProcessTest('testNutKEpsilonHighReCalculationProcess'))
nightSuite.addTest(EvmKEpsilonTest('testBackwardFacingStepKEpsilonTransient'))
nightSuite.addTest(EvmKEpsilonTest('testChannelFlowKEpsilonSteady'))
nightSuite.addTest(EvmKEpsilonTest('testChannelFlowKEpsilonSteadyPeriodic'))
nightSuite.addTest(EvmKEpsilonTest('testOneElementKEpsilonSteady'))
# For very long tests that should not be in nighly and you can use to validate
# validationSuite = suites['validation']
# Create a test suite that contains all the tests:
allSuite = suites['all']
allSuite.addTests(nightSuite)
# allSuite.addTests(validationSuite)
return suites
if __name__ == '__main__':
KratosMultiphysics.Logger.GetDefaultOutput().SetSeverity(
KratosMultiphysics.Logger.Severity.WARNING)
KratosMultiphysics.Logger.PrintInfo("Unittests", "\nRunning cpp unit tests ...")
run_cpp_unit_tests.run()
KratosMultiphysics.Logger.PrintInfo("Unittests", "Finished running cpp unit tests!")
if kratos_utilities.IsMPIAvailable() and kratos_utilities.CheckIfApplicationsAvailable("MetisApplication", "TrilinosApplication"):
KratosMultiphysics.Logger.PrintInfo("Unittests", "\nRunning mpi python tests ...")
p = subprocess.Popen(
["mpiexec", "-np", "2", "python3", "test_RANSApplication_mpi.py"],
stdout=subprocess.PIPE,
cwd=os.path.dirname(os.path.abspath(__file__)))
p.wait()
KratosMultiphysics.Logger.PrintInfo("Unittests", "Finished mpi python tests!")
else:
KratosMultiphysics.Logger.PrintInfo("Unittests", "\nSkipping mpi python tests due to missing dependencies")
KratosMultiphysics.Logger.PrintInfo("Unittests", "\nRunning python tests ...")
KratosUnittest.runTests(AssembleTestSuites())
KratosMultiphysics.Logger.PrintInfo("Unittests", "Finished python tests!") | 42.155556 | 134 | 0.771218 |
ace3360cbe278b71098bbb020f6e4a1958571466 | 2,244 | py | Python | chat/tests/test_consumers.py | nseetim/Django-chat-app | 064b4bfeae3e525f29af40c19b9b0b04947a599a | [
"BSD-3-Clause"
] | 773 | 2016-03-04T22:00:02.000Z | 2022-02-08T11:35:36.000Z | chat/tests/test_consumers.py | nseetim/Django-chat-app | 064b4bfeae3e525f29af40c19b9b0b04947a599a | [
"BSD-3-Clause"
] | 26 | 2016-03-06T14:45:20.000Z | 2017-07-01T04:06:10.000Z | chat/tests/test_consumers.py | nseetim/Django-chat-app | 064b4bfeae3e525f29af40c19b9b0b04947a599a | [
"BSD-3-Clause"
] | 448 | 2016-03-15T14:18:50.000Z | 2022-03-27T18:12:47.000Z | import json
import pytest
from asgiref.inmemory import ChannelLayer as InMemoryChannelLayer
from channels import Group
from channels.handler import AsgiRequest
from channels.message import Message
from django.contrib.sessions.backends.file import SessionStore as FileSessionStore
from chat.consumers import ws_connect, ws_receive, ws_disconnect
from chat.models import Room
@pytest.fixture
def message_factory(settings, tmpdir):
def factory(name, **content):
channel_layer = InMemoryChannelLayer()
message = Message(content, name, channel_layer)
settings.SESSION_FILE_PATH = str(tmpdir)
message.channel_session = FileSessionStore()
return message
return factory
@pytest.mark.django_db
def test_ws_connect(message_factory):
r = Room.objects.create(label='room1')
message = message_factory('test',
path = b'/chat/room1',
client = ['10.0.0.1', 12345],
reply_channel = u'test-reply',
)
ws_connect(message)
assert 'test-reply' in message.channel_layer._groups['chat-room1']
assert message.channel_session['room'] == 'room1'
@pytest.mark.django_db
def test_ws_receive(message_factory):
r = Room.objects.create(label='room1')
message = message_factory('test',
text = json.dumps({'handle': 'H', 'message': 'M'})
)
# Normally this would happen when the person joins the room, but mock
# it up manually here.
message.channel_session['room'] = 'room1'
Group('chat-room1', channel_layer=message.channel_layer).add(u'test-reply')
ws_receive(message)
_, reply = message.channel_layer.receive_many([u'test-reply'])
reply = json.loads(reply['text'])
assert reply['message'] == 'M'
assert reply['handle'] == 'H'
@pytest.mark.django_db
def test_ws_disconnect(message_factory):
r = Room.objects.create(label='room1')
message = message_factory('test', reply_channel=u'test-reply1')
Group('chat-room1', channel_layer=message.channel_layer).add(u'test-reply1')
Group('chat-room1', channel_layer=message.channel_layer).add(u'test-reply2')
message.channel_session['room'] = 'room1'
ws_disconnect(message)
assert 'test-reply1' not in message.channel_layer._groups['chat-room1']
| 35.0625 | 82 | 0.717469 |
ace336defbebba22401dc0af06f4f1cec3478944 | 1,321 | py | Python | app/core/tests/test_setup.py | OpenLXP/openlxp-xia-dau | 924475761265dc7c580b8889adf5acd7cc871719 | [
"Apache-2.0"
] | null | null | null | app/core/tests/test_setup.py | OpenLXP/openlxp-xia-dau | 924475761265dc7c580b8889adf5acd7cc871719 | [
"Apache-2.0"
] | 10 | 2021-01-05T18:10:47.000Z | 2022-03-23T17:03:57.000Z | app/core/tests/test_setup.py | OpenLXP/openlxp-xia-dau | 924475761265dc7c580b8889adf5acd7cc871719 | [
"Apache-2.0"
] | null | null | null | import pandas as pd
from django.test import TestCase
class TestSetUp(TestCase):
"""Class with setup and teardown for tests in XIS"""
def setUp(self):
"""Function to set up necessary data for testing"""
# globally accessible data sets
self.source_metadata = {
"Test": "0",
"Test_id": "2146",
"Test_url": "https://example.test.com/",
"End_date": "9999-12-31T00:00:00-05:00",
"test_name": "test name",
"Start_date": "2017-03-28T00:00:00-04:00",
"crs_header": "TestData 123",
"SOURCESYSTEM": "DAU",
"test_description": "test description",
}
self.key_value = "TestData 123_DAU"
self.key_value_hash = "6acf7689ea81a1f792e7668a23b1acf5"
self.hash_value = "29f781517cba9c121d6b677803069beb"
self.test_data = {
"key1": ["val1"],
"key2": ["val2"],
"key3": ["val3"]}
self.metadata_df = pd.DataFrame.from_dict({1: self.source_metadata},
orient='index')
self.xis_api_endpoint_url = 'http://example'
self.xsr_api_endpoint_url = 'http://example'
return super().setUp()
def tearDown(self):
return super().tearDown()
| 30.022727 | 76 | 0.551855 |
ace3386ac6dfc91ef30aff27f1c5208c6c4e4cee | 2,876 | py | Python | precedent/tests/test_queue.py | lewisjared/precedent | 6447634c6961f44b058a0379b782c8d46a21560e | [
"MIT"
] | null | null | null | precedent/tests/test_queue.py | lewisjared/precedent | 6447634c6961f44b058a0379b782c8d46a21560e | [
"MIT"
] | null | null | null | precedent/tests/test_queue.py | lewisjared/precedent | 6447634c6961f44b058a0379b782c8d46a21560e | [
"MIT"
] | null | null | null | from unittest import TestCase, mock
from precedent.queues import ProcessingQueue, py_to_redis, redis_to_py
class TestPyToRedis(TestCase):
def test_str(self):
self.assertEqual(py_to_redis('normal_string'), 'normal_string')
def test_dict(self):
self.assertEqual(py_to_redis({'test': 'item'}), '{"test": "item"}')
class TestRedisToPy(TestCase):
def test_str(self):
self.assertEqual(redis_to_py(b'normal_string'), 'normal_string')
def test_dict(self):
self.assertEqual(redis_to_py(b'{"test": "item"}'), {'test': 'item'})
class TestProcessingQueue(TestCase):
def setUp(self):
self.queue = ProcessingQueue('test_queue')
self.r = self.queue._r
def tearDown(self):
self.queue.clean_up()
def test_add_item(self):
self.queue.queue_item('test_item')
self.assertEqual(self.r.llen(self.queue.pending_list), 1)
self.queue.queue_item('another_item')
self.assertEqual(self.r.llen(self.queue.pending_list), 2)
def test_add_item_head(self):
self.queue.queue_item('test_item')
self.queue.queue_item('another_item', head=True)
self.assertEqual(self.queue.get_next_item(), 'another_item')
def test_get_item(self):
self.queue.queue_item('test_item')
self.queue.queue_item('another_item')
self.assertEqual(self.queue.get_next_item(), 'test_item')
self.assertEqual(self.r.llen(self.queue.processing_list), 1)
def test_empty_queue(self):
self.assertTrue(self.queue.get_next_item() is None)
def test_timeout(self):
self.queue.queue_item('test_item')
self.queue.queue_item('another_item')
self.assertEqual(self.queue.get_next_item(), 'test_item')
self.queue.check_for_timeouts()
self.assertEqual(self.r.llen(self.queue._to_delete_list), 1)
self.assertNotEquals(self.r.lindex(self.queue.pending_list, 0), 'test_item')
self.queue.check_for_timeouts()
self.assertEqual(self.r.llen(self.queue._to_delete_list), 0)
self.assertEquals(self.r.lindex(self.queue.pending_list, 0), b'test_item')
def test_multiple_items(self):
self.queue.queue_item('test_item')
self.queue.queue_item('test_item')
self.queue.queue_item('test_item')
self.assertEqual(self.queue.get_next_item(), 'test_item')
self.assertEqual(self.queue.get_next_item(), 'test_item')
self.assertEqual(self.queue.get_next_item(), 'test_item')
self.assertEqual(self.r.llen(self.queue.processing_list), 3)
self.queue.item_complete('test_item')
self.assertEqual(self.r.llen(self.queue.processing_list), 0)
def test_dict_item(self):
item = {
"item": "value"
}
self.queue.queue_item(item)
self.assertEqual(item, self.queue.get_next_item()) | 33.835294 | 84 | 0.675939 |
ace3389983425a595abcacdd1b191841a7fe254b | 1,146 | py | Python | FoodScan/BarcodeSync/BarcodeDecoder/cascadingBarcodeDecoder.py | danielBreitlauch/FoodScan | cf84209c4da84a8cb56deccdbde9c305eee1b8c3 | [
"MIT"
] | 1 | 2017-03-16T00:59:01.000Z | 2017-03-16T00:59:01.000Z | FoodScan/BarcodeSync/BarcodeDecoder/cascadingBarcodeDecoder.py | danielBreitlauch/FoodScan | cf84209c4da84a8cb56deccdbde9c305eee1b8c3 | [
"MIT"
] | null | null | null | FoodScan/BarcodeSync/BarcodeDecoder/cascadingBarcodeDecoder.py | danielBreitlauch/FoodScan | cf84209c4da84a8cb56deccdbde9c305eee1b8c3 | [
"MIT"
] | null | null | null | import traceback
from FoodScan.BarcodeSync.BarcodeDecoder.barcodeDecoder import BarcodeDecoder
from FoodScan.BarcodeSync.BarcodeDecoder.codecheck import CodeCheck
from FoodScan.BarcodeSync.BarcodeDecoder.digitEye import DigitEye
from FoodScan.BarcodeSync.BarcodeDecoder.eanSearch import EanSearch
from FoodScan.BarcodeSync.BarcodeDecoder.geizhalz import Geizhals
class CascadingBarcodeDecoder(BarcodeDecoder):
def __init__(self):
BarcodeDecoder.__init__(self)
self.cc = CodeCheck()
self.es = EanSearch()
self.de = DigitEye()
self.gh = Geizhals()
@staticmethod
def url(barcode):
return CodeCheck.url(barcode)
def wrap(self, method, barcode):
try:
return method.item(barcode)
except Exception:
traceback.print_exc()
return None
def item(self, barcode):
item = self.wrap(self.cc, barcode)
if not item:
item = self.wrap(self.gh, barcode)
if not item:
item = self.wrap(self.de, barcode)
if not item:
item = self.wrap(self.es, barcode)
return item
| 28.65 | 77 | 0.667539 |
ace339936fdec76dda6fdb12a034e41baf3fd9b1 | 713 | py | Python | src/test/tests/unit/annotation_objects.py | visit-dav/vis | c08bc6e538ecd7d30ddc6399ec3022b9e062127e | [
"BSD-3-Clause"
] | 226 | 2018-12-29T01:13:49.000Z | 2022-03-30T19:16:31.000Z | src/test/tests/unit/annotation_objects.py | visit-dav/vis | c08bc6e538ecd7d30ddc6399ec3022b9e062127e | [
"BSD-3-Clause"
] | 5,100 | 2019-01-14T18:19:25.000Z | 2022-03-31T23:08:36.000Z | src/test/tests/unit/annotation_objects.py | visit-dav/vis | c08bc6e538ecd7d30ddc6399ec3022b9e062127e | [
"BSD-3-Clause"
] | 84 | 2019-01-24T17:41:50.000Z | 2022-03-10T10:01:46.000Z | # ----------------------------------------------------------------------------
# CLASSES: nightly
#
# Test Case: annotation_objects.py
#
# Tests: AnnotationObject unit test
#
# Alister Maguire, Fri Jul 16 12:50:11 PDT 2021
#
# Modifications:
#
# ----------------------------------------------------------------------------
def CheckComparisonOperator():
annot = CreateAnnotationObject("Text2D")
#
# In the past, a bug caused a crash whenever we compared to a non-annotation
# type object.
#
TestValueEQ("ComparisonOperator_00", annot == "", False)
TestValueEQ("ComparisonOperator_01", annot == annot, True)
def RunMain():
CheckComparisonOperator()
RunMain()
Exit()
| 23.766667 | 80 | 0.531557 |
ace339bc5af927d07bb6501dcafe57f825f5f5de | 707 | py | Python | setup.py | lpang36/Mixel | c12fba3c81c08ba54bdce03e4aa41d8879e1c244 | [
"MIT"
] | 1 | 2019-05-17T03:10:51.000Z | 2019-05-17T03:10:51.000Z | setup.py | lpang36/Mixel | c12fba3c81c08ba54bdce03e4aa41d8879e1c244 | [
"MIT"
] | null | null | null | setup.py | lpang36/Mixel | c12fba3c81c08ba54bdce03e4aa41d8879e1c244 | [
"MIT"
] | 1 | 2020-07-12T17:57:51.000Z | 2020-07-12T17:57:51.000Z | from setuptools import setup,find_packages
from setuptools.extension import Extension
import numpy
setup(
name='mixel',
version='0.0.3',
author='Lawrence Pang',
author_email='lawrencepang36@gmail.com',
description='a small python module for recoloring images with pixels from a palette image',
url='https://github.com/lpang36/mixel',
packages=find_packages(),
ext_modules=[Extension('mixel.anneal',['mixel/anneal.c'])],
include_dirs=[numpy.get_include()],
platforms=['any'],
license='MIT',
classifiers=[
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 2",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent"
]
) | 30.73913 | 93 | 0.707214 |
ace33ab92ad3bd671e0e96d85552f53578b6f4a8 | 41,308 | py | Python | start.py | Tilix4/OpenPype | 8909bd890170880aa7ec8b673abaa25a9bdf40f2 | [
"MIT"
] | 1 | 2022-02-08T15:40:41.000Z | 2022-02-08T15:40:41.000Z | start.py | zafrs/OpenPype | 4b8e7e1ed002fc55b31307efdea70b0feaed474f | [
"MIT"
] | null | null | null | start.py | zafrs/OpenPype | 4b8e7e1ed002fc55b31307efdea70b0feaed474f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Main entry point for OpenPype command.
Bootstrapping process of OpenPype is as follows:
`OPENPYPE_PATH` is checked for existence - either one from environment or
from user settings. Precedence takes the one set by environment.
On this path we try to find OpenPype in directories version string in their
names. For example: `openpype-v3.0.1-foo` is valid name, or
even `foo_3.0.2` - as long as version can be determined from its name
_AND_ file `openpype/openpype/version.py` can be found inside, it is
considered OpenPype installation.
If no OpenPype repositories are found in `OPENPYPE_PATH` (user data dir)
then **Igniter** (OpenPype setup tool) will launch its GUI.
It can be used to specify `OPENPYPE_PATH` or if it is _not_ specified, current
*"live"* repositories will be used to create zip file and copy it to
appdata dir in user home and extract it there. Version will be determined by
version specified in OpenPype module.
If OpenPype repository directories are found in default install location
(user data dir) or in `OPENPYPE_PATH`, it will get list of those dirs
there and use latest one or the one specified with optional `--use-version`
command line argument. If the one specified doesn't exist then latest
available version will be used. All repositories in that dir will be added
to `sys.path` and `PYTHONPATH`.
If OpenPype is live (not frozen) then current version of OpenPype module
will be used. All directories under `repos` will be added to `sys.path` and
`PYTHONPATH`.
OpenPype depends on connection to `MongoDB`_. You can specify MongoDB
connection string via `OPENPYPE_MONGO` set in environment or it can be set
in user settings or via **Igniter** GUI.
So, bootstrapping OpenPype looks like this::
.. code-block:: bash
┌───────────────────────────────────────────────────────┐
│ Determine MongoDB connection: │
│ Use `OPENPYPE_MONGO`, system keyring `openPypeMongo` │
└──────────────────────────┬────────────────────────────┘
┌───- Found? -─┐
YES NO
│ │
│ ┌──────┴──────────────┐
│ │ Fire up Igniter GUI ├<-────────┐
│ │ and ask User │ │
│ └─────────────────────┘ │
│ │
│ │
┌─────────────────┴─────────────────────────────────────┐ │
│ Get location of OpenPype: │ │
│ 1) Test for `OPENPYPE_PATH` environment variable │ │
│ 2) Test `openPypePath` in registry setting │ │
│ 3) Test user data directory │ │
│ ····················································· │ │
│ If running from frozen code: │ │
│ - Use latest one found in user data dir │ │
│ If running from live code: │ │
│ - Use live code and install it to user data dir │ │
│ * can be overridden with `--use-version` argument │ │
└──────────────────────────┬────────────────────────────┘ │
┌─- Is OpenPype found? -─┐ │
YES NO │
│ │ │
│ ┌─────────────────┴─────────────┐ │
│ │ Look in `OPENPYPE_PATH`, find │ │
│ │ latest version and install it │ │
│ │ to user data dir. │ │
│ └──────────────┬────────────────┘ │
│ ┌─- Is OpenPype found? -─┐ │
│ YES NO -──────┘
│ │
├<-───────┘
│
┌─────────────┴────────────┐
│ Run OpenPype │
└─────═══════════════──────┘
Todo:
Move or remove bootstrapping environments out of the code.
Attributes:
silent_commands (set): list of commands for which we won't print OpenPype
logo and info header.
.. _MongoDB:
https://www.mongodb.com/
"""
import os
import re
import sys
import platform
import traceback
import subprocess
import site
import distutils.spawn
from pathlib import Path
# OPENPYPE_ROOT is variable pointing to build (or code) directory
# WARNING `OPENPYPE_ROOT` must be defined before igniter import
# - igniter changes cwd which cause that filepath of this script won't lead
# to right directory
if not getattr(sys, 'frozen', False):
# Code root defined by `start.py` directory
OPENPYPE_ROOT = os.path.dirname(os.path.abspath(__file__))
else:
OPENPYPE_ROOT = os.path.dirname(sys.executable)
# add dependencies folder to sys.pat for frozen code
frozen_libs = os.path.normpath(
os.path.join(OPENPYPE_ROOT, "dependencies")
)
sys.path.append(frozen_libs)
sys.path.insert(0, OPENPYPE_ROOT)
# add stuff from `<frozen>/dependencies` to PYTHONPATH.
pythonpath = os.getenv("PYTHONPATH", "")
paths = pythonpath.split(os.pathsep)
paths.append(frozen_libs)
os.environ["PYTHONPATH"] = os.pathsep.join(paths)
# Vendored python modules that must not be in PYTHONPATH environment but
# are required for OpenPype processes
vendor_python_path = os.path.join(OPENPYPE_ROOT, "vendor", "python")
sys.path.insert(0, vendor_python_path)
import blessed # noqa: E402
import certifi # noqa: E402
if sys.__stdout__:
term = blessed.Terminal()
def _print(message: str):
if message.startswith("!!! "):
print("{}{}".format(term.orangered2("!!! "), message[4:]))
return
if message.startswith(">>> "):
print("{}{}".format(term.aquamarine3(">>> "), message[4:]))
return
if message.startswith("--- "):
print("{}{}".format(term.darkolivegreen3("--- "), message[4:]))
return
if message.startswith("*** "):
print("{}{}".format(term.gold("*** "), message[4:]))
return
if message.startswith(" - "):
print("{}{}".format(term.wheat(" - "), message[4:]))
return
if message.startswith(" . "):
print("{}{}".format(term.tan(" . "), message[4:]))
return
if message.startswith(" - "):
print("{}{}".format(term.seagreen3(" - "), message[7:]))
return
if message.startswith(" ! "):
print("{}{}".format(term.goldenrod(" ! "), message[7:]))
return
if message.startswith(" * "):
print("{}{}".format(term.aquamarine1(" * "), message[7:]))
return
if message.startswith(" "):
print("{}{}".format(term.darkseagreen3(" "), message[4:]))
return
print(message)
else:
def _print(message: str):
print(message)
# if SSL_CERT_FILE is not set prior to OpenPype launch, we set it to point
# to certifi bundle to make sure we have reasonably new CA certificates.
if os.getenv("SSL_CERT_FILE") and \
os.getenv("SSL_CERT_FILE") != certifi.where():
_print("--- your system is set to use custom CA certificate bundle.")
else:
ssl_cert_file = certifi.where()
os.environ["SSL_CERT_FILE"] = ssl_cert_file
if "--headless" in sys.argv:
os.environ["OPENPYPE_HEADLESS_MODE"] = "1"
sys.argv.remove("--headless")
else:
if os.getenv("OPENPYPE_HEADLESS_MODE") != "1":
os.environ.pop("OPENPYPE_HEADLESS_MODE", None)
# Enabled logging debug mode when "--debug" is passed
if "--verbose" in sys.argv:
expected_values = (
"Expected: notset, debug, info, warning, error, critical"
" or integer [0-50]."
)
idx = sys.argv.index("--verbose")
sys.argv.pop(idx)
if idx < len(sys.argv):
value = sys.argv.pop(idx)
else:
raise RuntimeError((
"Expect value after \"--verbose\" argument. {}"
).format(expected_values))
log_level = None
low_value = value.lower()
if low_value.isdigit():
log_level = int(low_value)
elif low_value == "notset":
log_level = 0
elif low_value == "debug":
log_level = 10
elif low_value == "info":
log_level = 20
elif low_value == "warning":
log_level = 30
elif low_value == "error":
log_level = 40
elif low_value == "critical":
log_level = 50
if log_level is None:
raise RuntimeError((
"Unexpected value after \"--verbose\" argument \"{}\". {}"
).format(value, expected_values))
os.environ["OPENPYPE_LOG_LEVEL"] = str(log_level)
# Enable debug mode, may affect log level if log level is not defined
if "--debug" in sys.argv:
sys.argv.remove("--debug")
os.environ["OPENPYPE_DEBUG"] = "1"
import igniter # noqa: E402
from igniter import BootstrapRepos # noqa: E402
from igniter.tools import (
get_openpype_global_settings,
get_openpype_path_from_settings,
validate_mongo_connection,
OpenPypeVersionNotFound
) # noqa
from igniter.bootstrap_repos import OpenPypeVersion # noqa: E402
bootstrap = BootstrapRepos()
silent_commands = {"run", "igniter", "standalonepublisher",
"extractenvironments"}
def list_versions(openpype_versions: list, local_version=None) -> None:
"""Print list of detected versions."""
_print(" - Detected versions:")
for v in sorted(openpype_versions):
_print(f" - {v}: {v.path}")
if not openpype_versions:
_print(" ! none in repository detected")
if local_version:
_print(f" * local version {local_version}")
def set_openpype_global_environments() -> None:
"""Set global OpenPype's environments."""
import acre
from openpype.settings import get_general_environments
general_env = get_general_environments()
merged_env = acre.merge(
acre.parse(general_env),
dict(os.environ)
)
env = acre.compute(
merged_env,
cleanup=False
)
os.environ.clear()
os.environ.update(env)
# Hardcoded default values
os.environ["PYBLISH_GUI"] = "pyblish_pype"
# Change scale factor only if is not set
if "QT_AUTO_SCREEN_SCALE_FACTOR" not in os.environ:
os.environ["QT_AUTO_SCREEN_SCALE_FACTOR"] = "1"
def run(arguments: list, env: dict = None) -> int:
"""Use correct executable to run stuff.
This passing arguments to correct OpenPype executable. If OpenPype is run
from live sources, executable will be `python` in virtual environment.
If running from frozen code, executable will be `openpype_console` or
`openpype_gui`. Its equivalent in live code is `python start.py`.
Args:
arguments (list): Argument list to pass OpenPype.
env (dict, optional): Dictionary containing environment.
Returns:
int: Process return code.
"""
if getattr(sys, 'frozen', False):
interpreter = [sys.executable]
else:
interpreter = [sys.executable, __file__]
interpreter.extend(arguments)
p = subprocess.Popen(interpreter, env=env)
p.wait()
_print(f">>> done [{p.returncode}]")
return p.returncode
def run_disk_mapping_commands(settings):
""" Run disk mapping command
Used to map shared disk for OP to pull codebase.
"""
low_platform = platform.system().lower()
disk_mapping = settings.get("disk_mapping")
if not disk_mapping:
return
mappings = disk_mapping.get(low_platform) or []
for source, destination in mappings:
destination = destination.rstrip('/')
source = source.rstrip('/')
if low_platform == "windows":
args = ["subst", destination, source]
elif low_platform == "darwin":
scr = "do shell script \"ln -s {} {}\" with administrator privileges".format(source, destination) # noqa: E501
args = ["osascript", "-e", scr]
else:
args = ["sudo", "ln", "-s", source, destination]
_print("disk mapping args:: {}".format(args))
try:
if not os.path.exists(destination):
output = subprocess.Popen(args)
if output.returncode and output.returncode != 0:
exc_msg = "Executing was not successful: \"{}\"".format(
args)
raise RuntimeError(exc_msg)
except TypeError as exc:
_print("Error {} in mapping drive {}, {}".format(str(exc),
source,
destination))
raise
def set_avalon_environments():
"""Set avalon specific environments.
These are non modifiable environments for avalon workflow that must be set
before avalon module is imported because avalon works with globals set with
environment variables.
"""
avalon_db = os.environ.get("AVALON_DB") or "avalon" # for tests
os.environ.update({
# Mongo DB name where avalon docs are stored
"AVALON_DB": avalon_db,
# Name of config
"AVALON_LABEL": "OpenPype"
})
def set_modules_environments():
"""Set global environments for OpenPype modules.
This requires to have OpenPype in `sys.path`.
"""
from openpype.modules import ModulesManager
import acre
modules_manager = ModulesManager()
module_envs = modules_manager.collect_global_environments()
# Merge environments with current environments and update values
if module_envs:
parsed_envs = acre.parse(module_envs)
env = acre.merge(parsed_envs, dict(os.environ))
os.environ.clear()
os.environ.update(env)
def _startup_validations():
"""Validations before OpenPype starts."""
try:
_validate_thirdparty_binaries()
except Exception as exc:
if os.environ.get("OPENPYPE_HEADLESS_MODE"):
raise
import tkinter
from tkinter.messagebox import showerror
root = tkinter.Tk()
root.attributes("-alpha", 0.0)
root.wm_state("iconic")
if platform.system().lower() != "windows":
root.withdraw()
showerror(
"Startup validations didn't pass",
str(exc)
)
root.withdraw()
sys.exit(1)
def _validate_thirdparty_binaries():
"""Check existence of thirdpart executables."""
low_platform = platform.system().lower()
binary_vendors_dir = os.path.join(
os.environ["OPENPYPE_ROOT"],
"vendor",
"bin"
)
error_msg = (
"Missing binary dependency {}. Please fetch thirdparty dependencies."
)
# Validate existence of FFmpeg
ffmpeg_dir = os.path.join(binary_vendors_dir, "ffmpeg", low_platform)
if low_platform == "windows":
ffmpeg_dir = os.path.join(ffmpeg_dir, "bin")
ffmpeg_executable = os.path.join(ffmpeg_dir, "ffmpeg")
ffmpeg_result = distutils.spawn.find_executable(ffmpeg_executable)
if ffmpeg_result is None:
raise RuntimeError(error_msg.format("FFmpeg"))
# Validate existence of OpenImageIO (not on MacOs)
oiio_tool_path = None
if low_platform == "linux":
oiio_tool_path = os.path.join(
binary_vendors_dir,
"oiio",
low_platform,
"bin",
"oiiotool"
)
elif low_platform == "windows":
oiio_tool_path = os.path.join(
binary_vendors_dir,
"oiio",
low_platform,
"oiiotool"
)
oiio_result = None
if oiio_tool_path is not None:
oiio_result = distutils.spawn.find_executable(oiio_tool_path)
if oiio_result is None:
raise RuntimeError(error_msg.format("OpenImageIO"))
def _process_arguments() -> tuple:
"""Process command line arguments.
Returns:
tuple: Return tuple with specific version to use (if any) and flag
to prioritize staging (if set)
"""
# check for `--use-version=3.0.0` argument and `--use-staging`
use_version = None
use_staging = False
commands = []
# OpenPype version specification through arguments
use_version_arg = "--use-version"
for arg in sys.argv:
if arg.startswith(use_version_arg):
# Remove arg from sys argv
sys.argv.remove(arg)
# Extract string after use version arg
use_version_value = arg[len(use_version_arg):]
if (
not use_version_value
or not use_version_value.startswith("=")
):
_print("!!! Please use option --use-version like:")
_print(" --use-version=3.0.0")
sys.exit(1)
version_str = use_version_value[1:]
use_version = None
if version_str.lower() == "latest":
use_version = "latest"
else:
m = re.search(
r"(?P<version>\d+\.\d+\.\d+(?:\S*)?)", version_str
)
if m and m.group('version'):
use_version = m.group('version')
_print(">>> Requested version [ {} ]".format(use_version))
if "+staging" in use_version:
use_staging = True
break
if use_version is None:
_print("!!! Requested version isn't in correct format.")
_print((" Use --list-versions to find out"
" proper version string."))
sys.exit(1)
if arg == "--validate-version":
_print("!!! Please use option --validate-version like:")
_print(" --validate-version=3.0.0")
sys.exit(1)
if arg.startswith("--validate-version="):
m = re.search(
r"--validate-version=(?P<version>\d+\.\d+\.\d+(?:\S*)?)", arg)
if m and m.group('version'):
use_version = m.group('version')
sys.argv.remove(arg)
commands.append("validate")
else:
_print("!!! Requested version isn't in correct format.")
_print((" Use --list-versions to find out"
" proper version string."))
sys.exit(1)
if "--use-staging" in sys.argv:
use_staging = True
sys.argv.remove("--use-staging")
if "--list-versions" in sys.argv:
commands.append("print_versions")
sys.argv.remove("--list-versions")
# handle igniter
# this is helper to run igniter before anything else
if "igniter" in sys.argv:
if os.getenv("OPENPYPE_HEADLESS_MODE") == "1":
_print("!!! Cannot open Igniter dialog in headless mode.")
sys.exit(1)
return_code = igniter.open_dialog()
# this is when we want to run OpenPype without installing anything.
# or we are ready to run.
if return_code not in [2, 3]:
sys.exit(return_code)
idx = sys.argv.index("igniter")
sys.argv.pop(idx)
sys.argv.insert(idx, "tray")
return use_version, use_staging, commands
def _determine_mongodb() -> str:
"""Determine mongodb connection string.
First use ``OPENPYPE_MONGO`` environment variable, then system keyring.
Then try to run **Igniter UI** to let user specify it.
Returns:
str: mongodb connection URL
Raises:
RuntimeError: if mongodb connection url cannot by determined.
"""
openpype_mongo = os.getenv("OPENPYPE_MONGO", None)
if not openpype_mongo:
# try system keyring
try:
openpype_mongo = bootstrap.secure_registry.get_item(
"openPypeMongo"
)
except ValueError:
pass
if openpype_mongo:
result, msg = validate_mongo_connection(openpype_mongo)
if not result:
_print(msg)
openpype_mongo = None
if not openpype_mongo:
_print("*** No DB connection string specified.")
if os.getenv("OPENPYPE_HEADLESS_MODE") == "1":
_print("!!! Cannot open Igniter dialog in headless mode.")
_print(
"!!! Please use `OPENPYPE_MONGO` to specify server address.")
sys.exit(1)
_print("--- launching setup UI ...")
result = igniter.open_dialog()
if result == 0:
raise RuntimeError("MongoDB URL was not defined")
openpype_mongo = os.getenv("OPENPYPE_MONGO")
if not openpype_mongo:
try:
openpype_mongo = bootstrap.secure_registry.get_item(
"openPypeMongo")
except ValueError:
raise RuntimeError("Missing MongoDB url")
return openpype_mongo
def _initialize_environment(openpype_version: OpenPypeVersion) -> None:
version_path = openpype_version.path
os.environ["OPENPYPE_VERSION"] = str(openpype_version)
# set OPENPYPE_REPOS_ROOT to point to currently used OpenPype version.
os.environ["OPENPYPE_REPOS_ROOT"] = os.path.normpath(
version_path.as_posix()
)
# inject version to Python environment (sys.path, ...)
_print(">>> Injecting OpenPype version to running environment ...")
bootstrap.add_paths_from_directory(version_path)
# Additional sys paths related to OPENPYPE_REPOS_ROOT directory
# TODO move additional paths to `boot` part when OPENPYPE_REPOS_ROOT will
# point to same hierarchy from code and from frozen OpenPype
additional_paths = [
os.environ["OPENPYPE_REPOS_ROOT"],
# add OpenPype tools
os.path.join(os.environ["OPENPYPE_REPOS_ROOT"], "openpype", "tools"),
# add common OpenPype vendor
# (common for multiple Python interpreter versions)
os.path.join(
os.environ["OPENPYPE_REPOS_ROOT"],
"openpype",
"vendor",
"python",
"common"
)
]
split_paths = os.getenv("PYTHONPATH", "").split(os.pathsep)
for path in additional_paths:
split_paths.insert(0, path)
sys.path.insert(0, path)
os.environ["PYTHONPATH"] = os.pathsep.join(split_paths)
def _find_frozen_openpype(use_version: str = None,
use_staging: bool = False) -> Path:
"""Find OpenPype to run from frozen code.
This will process and modify environment variables:
``PYTHONPATH``, ``OPENPYPE_VERSION``, ``OPENPYPE_REPOS_ROOT``
Args:
use_version (str, optional): Try to use specified version.
use_staging (bool, optional): Prefer *staging* flavor over production.
Returns:
Path: Path to version to be used.
Raises:
RuntimeError: If no OpenPype version are found or no staging version
(if requested).
"""
# Collect OpenPype versions
installed_version = OpenPypeVersion.get_installed_version()
# Expected version that should be used by studio settings
# - this option is used only if version is not explictly set and if
# studio has set explicit version in settings
studio_version = OpenPypeVersion.get_expected_studio_version(use_staging)
if use_version is not None:
# Specific version is defined
if use_version.lower() == "latest":
# Version says to use latest version
_print("Finding latest version defined by use version")
openpype_version = bootstrap.find_latest_openpype_version(
use_staging
)
else:
_print("Finding specified version \"{}\"".format(use_version))
openpype_version = bootstrap.find_openpype_version(
use_version, use_staging
)
if openpype_version is None:
raise OpenPypeVersionNotFound(
"Requested version \"{}\" was not found.".format(
use_version
)
)
elif studio_version is not None:
# Studio has defined a version to use
_print("Finding studio version \"{}\"".format(studio_version))
openpype_version = bootstrap.find_openpype_version(
studio_version, use_staging
)
if openpype_version is None:
raise OpenPypeVersionNotFound((
"Requested OpenPype version \"{}\" defined by settings"
" was not found."
).format(studio_version))
else:
# Default behavior to use latest version
_print("Finding latest version")
openpype_version = bootstrap.find_latest_openpype_version(
use_staging
)
if openpype_version is None:
if use_staging:
reason = "Didn't find any staging versions."
else:
reason = "Didn't find any versions."
raise OpenPypeVersionNotFound(reason)
# get local frozen version and add it to detected version so if it is
# newer it will be used instead.
if installed_version == openpype_version:
version_path = _bootstrap_from_code(use_version, use_staging)
openpype_version = OpenPypeVersion(
version=BootstrapRepos.get_version(version_path),
path=version_path)
_initialize_environment(openpype_version)
return version_path
# test if latest detected is installed (in user data dir)
is_inside = False
try:
is_inside = openpype_version.path.resolve().relative_to(
bootstrap.data_dir)
except ValueError:
# if relative path cannot be calculated, openpype version is not
# inside user data dir
pass
if not is_inside:
# install latest version to user data dir
if os.getenv("OPENPYPE_HEADLESS_MODE") == "1":
version_path = bootstrap.install_version(
openpype_version, force=True
)
else:
version_path = igniter.open_update_window(openpype_version)
openpype_version.path = version_path
_initialize_environment(openpype_version)
return openpype_version.path
if openpype_version.path.is_file():
_print(">>> Extracting zip file ...")
try:
version_path = bootstrap.extract_openpype(openpype_version)
except OSError as e:
_print("!!! failed: {}".format(str(e)))
sys.exit(1)
else:
# cleanup zip after extraction
os.unlink(openpype_version.path)
openpype_version.path = version_path
_initialize_environment(openpype_version)
return openpype_version.path
def _bootstrap_from_code(use_version, use_staging):
"""Bootstrap live code (or the one coming with frozen OpenPype.
Args:
use_version: (str): specific version to use.
Returns:
Path: path to sourced version.
"""
# run through repos and add them to `sys.path` and `PYTHONPATH`
# set root
_openpype_root = OPENPYPE_ROOT
# Unset use version if latest should be used
# - when executed from code then code is expected as latest
# - when executed from build then build is already marked as latest
# in '_find_frozen_openpype'
if use_version and use_version.lower() == "latest":
use_version = None
if getattr(sys, 'frozen', False):
local_version = bootstrap.get_version(Path(_openpype_root))
switch_str = f" - will switch to {use_version}" if use_version else ""
_print(f" - booting version: {local_version}{switch_str}")
assert local_version
else:
# get current version of OpenPype
local_version = OpenPypeVersion.get_installed_version_str()
# All cases when should be used different version than build
if (use_version and use_version != local_version) or use_staging:
if use_version:
# Explicit version should be used
version_to_use = bootstrap.find_openpype_version(
use_version, use_staging
)
if version_to_use is None:
raise OpenPypeVersionNotFound(
"Requested version \"{}\" was not found.".format(
use_version
)
)
else:
# Staging version should be used
version_to_use = bootstrap.find_latest_openpype_version(
use_staging
)
if version_to_use is None:
if use_staging:
reason = "Didn't find any staging versions."
else:
# This reason is backup for possible bug in code
reason = "Didn't find any versions."
raise OpenPypeVersionNotFound(reason)
# Start extraction of version if needed
if version_to_use.path.is_file():
version_to_use.path = bootstrap.extract_openpype(version_to_use)
bootstrap.add_paths_from_directory(version_to_use.path)
os.environ["OPENPYPE_VERSION"] = use_version
version_path = version_to_use.path
os.environ["OPENPYPE_REPOS_ROOT"] = (
version_path / "openpype"
).as_posix()
_openpype_root = version_to_use.path.as_posix()
else:
os.environ["OPENPYPE_VERSION"] = local_version
version_path = Path(_openpype_root)
os.environ["OPENPYPE_REPOS_ROOT"] = _openpype_root
# add self to sys.path of current process
# NOTE: this seems to be duplicate of 'add_paths_from_directory'
sys.path.insert(0, _openpype_root)
# add venv 'site-packages' to PYTHONPATH
python_path = os.getenv("PYTHONPATH", "")
split_paths = python_path.split(os.pathsep)
# add self to python paths
split_paths.insert(0, _openpype_root)
# last one should be venv site-packages
# this is slightly convoluted as we can get here from frozen code too
# in case when we are running without any version installed.
if not getattr(sys, 'frozen', False):
split_paths.append(site.getsitepackages()[-1])
# TODO move additional paths to `boot` part when OPENPYPE_ROOT will
# point to same hierarchy from code and from frozen OpenPype
additional_paths = [
# add OpenPype tools
os.path.join(_openpype_root, "openpype", "tools"),
# add common OpenPype vendor
# (common for multiple Python interpreter versions)
os.path.join(
_openpype_root,
"openpype",
"vendor",
"python",
"common"
)
]
for path in additional_paths:
split_paths.insert(0, path)
sys.path.insert(0, path)
os.environ["PYTHONPATH"] = os.pathsep.join(split_paths)
return version_path
def _boot_validate_versions(use_version, local_version):
_print(f">>> Validating version [ {use_version} ]")
openpype_versions = bootstrap.find_openpype(include_zips=True,
staging=True)
openpype_versions += bootstrap.find_openpype(include_zips=True,
staging=False)
v: OpenPypeVersion
found = [v for v in openpype_versions if str(v) == use_version]
if not found:
_print(f"!!! Version [ {use_version} ] not found.")
list_versions(openpype_versions, local_version)
sys.exit(1)
# print result
version_path = bootstrap.get_version_path_from_list(
use_version, openpype_versions
)
valid, message = bootstrap.validate_openpype_version(version_path)
_print("{}{}".format(">>> " if valid else "!!! ", message))
def _boot_print_versions(use_staging, local_version, openpype_root):
if not use_staging:
_print("--- This will list only non-staging versions detected.")
_print(" To see staging versions, use --use-staging argument.")
else:
_print("--- This will list only staging versions detected.")
_print(" To see other version, omit --use-staging argument.")
openpype_versions = bootstrap.find_openpype(include_zips=True,
staging=use_staging)
if getattr(sys, 'frozen', False):
local_version = bootstrap.get_version(Path(openpype_root))
else:
local_version = OpenPypeVersion.get_installed_version_str()
list_versions(openpype_versions, local_version)
def _boot_handle_missing_version(local_version, use_staging, message):
_print(message)
if os.environ.get("OPENPYPE_HEADLESS_MODE") == "1":
openpype_versions = bootstrap.find_openpype(
include_zips=True, staging=use_staging
)
list_versions(openpype_versions, local_version)
else:
igniter.show_message_dialog("Version not found", message)
def boot():
"""Bootstrap OpenPype."""
# ------------------------------------------------------------------------
# Set environment to OpenPype root path
# ------------------------------------------------------------------------
os.environ["OPENPYPE_ROOT"] = OPENPYPE_ROOT
# ------------------------------------------------------------------------
# Do necessary startup validations
# ------------------------------------------------------------------------
_startup_validations()
# ------------------------------------------------------------------------
# Process arguments
# ------------------------------------------------------------------------
use_version, use_staging, commands = _process_arguments()
if os.getenv("OPENPYPE_VERSION"):
if use_version:
_print(("*** environment variable OPENPYPE_VERSION"
"is overridden by command line argument."))
else:
_print(">>> version set by environment variable")
use_staging = "staging" in os.getenv("OPENPYPE_VERSION")
use_version = os.getenv("OPENPYPE_VERSION")
# ------------------------------------------------------------------------
# Determine mongodb connection
# ------------------------------------------------------------------------
try:
openpype_mongo = _determine_mongodb()
except RuntimeError as e:
# without mongodb url we are done for.
_print(f"!!! {e}")
sys.exit(1)
os.environ["OPENPYPE_MONGO"] = openpype_mongo
# name of Pype database
os.environ["OPENPYPE_DATABASE_NAME"] = \
os.environ.get("OPENPYPE_DATABASE_NAME") or "openpype"
global_settings = get_openpype_global_settings(openpype_mongo)
_print(">>> run disk mapping command ...")
run_disk_mapping_commands(global_settings)
# Logging to server enabled/disabled
log_to_server = global_settings.get("log_to_server", True)
if log_to_server:
os.environ["OPENPYPE_LOG_TO_SERVER"] = "1"
log_to_server_msg = "ON"
else:
os.environ.pop("OPENPYPE_LOG_TO_SERVER", None)
log_to_server_msg = "OFF"
_print(f">>> Logging to server is turned {log_to_server_msg}")
# Get openpype path from database and set it to environment so openpype can
# find its versions there and bootstrap them.
openpype_path = get_openpype_path_from_settings(global_settings)
if getattr(sys, 'frozen', False):
local_version = bootstrap.get_version(Path(OPENPYPE_ROOT))
else:
local_version = OpenPypeVersion.get_installed_version_str()
if "validate" in commands:
_boot_validate_versions(use_version, local_version)
sys.exit(1)
if not openpype_path:
_print("*** Cannot get OpenPype path from database.")
if not os.getenv("OPENPYPE_PATH") and openpype_path:
os.environ["OPENPYPE_PATH"] = openpype_path
if "print_versions" in commands:
_boot_print_versions(use_staging, local_version, OPENPYPE_ROOT)
sys.exit(1)
# ------------------------------------------------------------------------
# Find OpenPype versions
# ------------------------------------------------------------------------
# WARNING: Environment OPENPYPE_REPOS_ROOT may change if frozen OpenPype
# is executed
if getattr(sys, 'frozen', False):
# find versions of OpenPype to be used with frozen code
try:
version_path = _find_frozen_openpype(use_version, use_staging)
except OpenPypeVersionNotFound as exc:
_boot_handle_missing_version(local_version, use_staging, str(exc))
sys.exit(1)
except RuntimeError as e:
# no version to run
_print(f"!!! {e}")
sys.exit(1)
# validate version
_print(f">>> Validating version [ {str(version_path)} ]")
result = bootstrap.validate_openpype_version(version_path)
if not result[0]:
_print(f"!!! Invalid version: {result[1]}")
sys.exit(1)
_print(f"--- version is valid")
else:
try:
version_path = _bootstrap_from_code(use_version, use_staging)
except OpenPypeVersionNotFound as exc:
_boot_handle_missing_version(local_version, use_staging, str(exc))
sys.exit(1)
# set this to point either to `python` from venv in case of live code
# or to `openpype` or `openpype_console` in case of frozen code
os.environ["OPENPYPE_EXECUTABLE"] = sys.executable
# delete OpenPype module and it's submodules from cache so it is used from
# specific version
modules_to_del = [
sys.modules.pop(module_name)
for module_name in tuple(sys.modules)
if module_name == "openpype" or module_name.startswith("openpype.")
]
try:
for module_name in modules_to_del:
del sys.modules[module_name]
except AttributeError:
pass
except KeyError:
pass
_print(">>> loading environments ...")
# Avalon environments must be set before avalon module is imported
_print(" - for Avalon ...")
set_avalon_environments()
_print(" - global OpenPype ...")
set_openpype_global_environments()
_print(" - for modules ...")
set_modules_environments()
assert version_path, "Version path not defined."
# print info when not running scripts defined in 'silent commands'
if all(arg not in silent_commands for arg in sys.argv):
from openpype.lib import terminal as t
from openpype.version import __version__
info = get_info(use_staging)
info.insert(0, f">>> Using OpenPype from [ {version_path} ]")
t_width = 20
try:
t_width = os.get_terminal_size().columns - 2
except (ValueError, OSError):
# running without terminal
pass
_header = f"*** OpenPype [{__version__}] "
info.insert(0, _header + "-" * (t_width - len(_header)))
for i in info:
t.echo(i)
from openpype import cli
try:
cli.main(obj={}, prog_name="openpype")
except Exception: # noqa
exc_info = sys.exc_info()
_print("!!! OpenPype crashed:")
traceback.print_exception(*exc_info)
sys.exit(1)
def get_info(use_staging=None) -> list:
"""Print additional information to console."""
from openpype.lib.mongo import get_default_components
from openpype.lib.log import PypeLogger
components = get_default_components()
inf = []
if use_staging:
inf.append(("OpenPype variant", "staging"))
else:
inf.append(("OpenPype variant", "production"))
inf.append(
("Running OpenPype from", os.environ.get('OPENPYPE_REPOS_ROOT'))
)
inf.append(("Using mongodb", components["host"]))
if os.environ.get("FTRACK_SERVER"):
inf.append(("Using FTrack at",
os.environ.get("FTRACK_SERVER")))
if os.environ.get('DEADLINE_REST_URL'):
inf.append(("Using Deadline webservice at",
os.environ.get("DEADLINE_REST_URL")))
if os.environ.get('MUSTER_REST_URL'):
inf.append(("Using Muster at",
os.environ.get("MUSTER_REST_URL")))
# Reinitialize
PypeLogger.initialize()
mongo_components = get_default_components()
if mongo_components["host"]:
inf.append(("Logging to MongoDB", mongo_components["host"]))
inf.append((" - port", mongo_components["port"] or "<N/A>"))
inf.append((" - database", PypeLogger.log_database_name))
inf.append((" - collection", PypeLogger.log_collection_name))
inf.append((" - user", mongo_components["username"] or "<N/A>"))
if mongo_components["auth_db"]:
inf.append((" - auth source", mongo_components["auth_db"]))
maximum = max(len(i[0]) for i in inf)
formatted = []
for info in inf:
padding = (maximum - len(info[0])) + 1
formatted.append(
"... {}:{}[ {} ]".format(info[0], " " * padding, info[1]))
return formatted
if __name__ == "__main__":
boot()
| 35.396744 | 123 | 0.591072 |
ace33c1aa660a31a9c1ffc6d39738233aee6cb07 | 7,241 | py | Python | src/jose/jwa/aes.py | hdknr/jose | d872407e9f3b3a0262e6bb1cdb599b5c4c1d9ee4 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | src/jose/jwa/aes.py | hdknr/jose | d872407e9f3b3a0262e6bb1cdb599b5c4c1d9ee4 | [
"BSD-2-Clause-FreeBSD"
] | 1 | 2015-01-23T09:37:17.000Z | 2015-01-23T09:37:17.000Z | src/jose/jwa/aes.py | hdknr/jose | d872407e9f3b3a0262e6bb1cdb599b5c4c1d9ee4 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | from __future__ import print_function
from Crypto.Cipher import AES
from Crypto.Hash import HMAC, SHA256, SHA384, SHA512
from Crypto.Util.strxor import strxor
from struct import pack
from jose.base import BaseContentEncryptor
def slice(s, n):
return [s[i:i + n] for i in range(0, len(s), n)]
AES_IV = b'\xA6\xA6\xA6\xA6\xA6\xA6\xA6\xA6'
def aes_key_wrap(K, P):
"""
aes key wrap : :rfc:`3394` 2.2.1
:param str K: key encrytpion key
:param str P: plaintext
"""
assert len(K) * 8 in [128, 192, 256] # key bits
assert len(P) % 8 == 0 # 64 bit blok
n = len(P) / 8 # 64 bit blocks
A = AES_IV # Set A = IV
R = [b'\0\0\0\0\0\0\0\0'] + slice(P, 8)
# copy of slice every 8 octets
# For i = 1 to n ; R[i] = P[i]
_AES = AES.AESCipher(K)
for j in range(0, 6): # For j=0 to 5
for i in range(1, n + 1): # For i=1 to n
B = _AES.encrypt(A + R[i]) # B = AES(K, A | R[i])
R[i] = B[8:] # R[i] = LSB(64, B)
t = pack("!q", (n * j) + i)
A = strxor(B[:8], t)
# A = MSB(64, B) ^ t where t = (n*j)+i
R[0] = A # Set C[0] = A
return "".join(R) # For i = 1 to n C[i] = R[i]
def aes_key_unwrap(K, C):
"""
aes key unwrap : :rfc:`3394` 2.2.2
:param str K: key encrytpion key
:param str C: ciphertext
"""
assert len(K) * 8 in [128, 192, 256] # key bits
assert len(C) % 8 == 0 # 64 bit blok
n = len(C) / 8 - 1 # 64bit blocks
R = slice(C, 8)
A = R[0] # Set A = C[0] (=R[0])
R[0] = [b'\0\0\0\0\0\0\0\0']
# init R[0]
# For i = 1 to n ; R[i] = C[i]
_AES = AES.AESCipher(K)
for j in range(5, -1, -1): # For j = 5 to 0
for i in range(n, 0, -1): # For i = n to 1
t = pack("!q", (n * j) + i) # t = n * j + i
src = strxor(A, t) + R[i] # A ^ t
B = _AES.decrypt(src)
# B = AES-1(K, (A ^ t) | R[i]) where t = n*j+i
A = B[:8] # A = MSB(64, B)
R[i] = B[8:] # R[i] = LSB(64, B)
if A == AES_IV:
return "".join(R[1:]) # For i = 1 to n; P[i] = R[i]
else:
raise Exception("unwrap failed: Invalid IV")
# Key Encryption
from jose.base import BaseKeyEncryptor
class AesKeyEncryptor(BaseKeyEncryptor):
@classmethod
def key_length(cls):
return cls._KEY_LEN
@classmethod
def iv_length(cls):
return cls._IV_LEN
@classmethod
def encrypt(cls, jwk, cek, *args, **kwargs):
key = jwk.key.shared_key[:cls._KEY_LEN]
return cls.kek_encrypt(key, cek)
@classmethod
def decrypt(cls, jwk, cek_ci, *args, **kwargs):
key = jwk.key.shared_key[:cls._KEY_LEN]
return cls.kek_decrypt(key, cek_ci)
@classmethod
def kek_encrypt(cls, kek, cek, *args, **kwargs):
return aes_key_wrap(kek, cek)
@classmethod
def kek_decrypt(cls, kek, cek_ci, *args, **kwargs):
return aes_key_unwrap(kek, cek_ci)
@classmethod
def provide(cls, enc, jwk, jwe, cek=None, iv=None, *args, **kwargs):
_enc = enc.encryptor
if cek:
# TODO check iv lenth and validity
pass
else:
cek, iv = _enc.create_key_iv()
cek_ci = cls.encrypt(jwk, cek, iv, "")
return (cek, iv, cek_ci, None)
@classmethod
def agree(cls, enc, jwk, jwe, cek_ci, *args, **kwargs):
cek = cls.decrypt(jwk, cek_ci)
return cek
class A128KW(AesKeyEncryptor):
_KEY_LEN = 16
_IV_LEN = 16
class A192KW(AesKeyEncryptor):
_KEY_LEN = 24
_IV_LEN = 16
class A256KW(AesKeyEncryptor):
_KEY_LEN = 32
_IV_LEN = 16
# Content Encryption
_BS = 16
def pkcs5_pad(s):
return s + (_BS - len(s) % _BS) * chr(_BS - len(s) % _BS)
def pkcs5_unpad(s):
return s[0:-ord(s[-1])]
def to_al(x):
return pack("!Q", 8 * len(x))
class AesContentEncrypor(BaseContentEncryptor):
''' AES_CBC_HMAC_SHA2 (Jwa 5.2)
'''
@classmethod
def unpack_key(cls, cek):
assert cek
assert len(cek) == cls._MAC_KEY_LEN + cls._ENC_KEY_LEN
return (
cek[:cls._MAC_KEY_LEN],
cek[-1 * cls._ENC_KEY_LEN:]
)
@classmethod
def mac_input(cls, ciphert, iv, assoc):
al = to_al(assoc)
return b"".join([assoc, iv, ciphert, al])
@classmethod
def make_tag(cls, mac_k, ciphert, iv, aad):
mac_i = cls.mac_input(ciphert, iv, aad)
hmac = HMAC.new(mac_k, digestmod=cls._HASH)
hmac.update(mac_i)
return hmac.digest()[:cls._TAG_LEN]
@classmethod
def encrypt(cls, cek, plaint, iv, aad):
mac_k, enc_k = cls.unpack_key(cek)
ci = AES.new(enc_k, AES.MODE_CBC, iv)
ciphert = ci.encrypt(pkcs5_pad(plaint))
tag = cls.make_tag(mac_k, ciphert, iv, aad)
return (ciphert, tag)
@classmethod
def decrypt(cls, cek, ciphert, iv, aad, tag):
mac_k, enc_k = cls.unpack_key(cek)
if tag != cls.make_tag(mac_k, ciphert, iv, aad):
return (None, False)
ci = AES.new(enc_k, AES.MODE_CBC, iv)
plaint = pkcs5_unpad(ci.decrypt(ciphert))
return (plaint, True)
class A128CBC_HS256(AesContentEncrypor):
''' AES_128_CBC_HMAC_SHA_256 (Jwa 5.2.3)
'''
_KEY_LEN = 32
_IV_LEN = 16
_ENC_KEY_LEN = 16
_MAC_KEY_LEN = 16
_HASH = SHA256
_TAG_LEN = 16
class A192CBC_HS384(AesContentEncrypor):
''' AES_192_CBC_HMAC_SHA_384 (Jwa 5.2.4)
'''
_KEY_LEN = 48
_IV_LEN = 16
_ENC_KEY_LEN = 24
_MAC_KEY_LEN = 24
_TAG_LEN = 24 # Authentication Tag Length
_HASH = SHA384
class A256CBC_HS512(AesContentEncrypor):
''' AES_256_CBC_HMAC_SHA_512 (Jwa 5.2.5)
'''
_KEY_LEN = 64
_IV_LEN = 16
_ENC_KEY_LEN = 32
_MAC_KEY_LEN = 32
_HASH = SHA512
_TAG_LEN = 32
if __name__ == '__main__':
from jose.jwa.encs import KeyEncEnum, EncEnum
encs = ['A128CBC-HS256', 'A192CBC-HS384', 'A256CBC-HS512']
algs = ['A128KW', 'A192KW', 'A256KW']
from jose.utils import base64
for e in encs:
enc = EncEnum.create(e).encryptor
cek, iv = enc.create_key_iv()
assert len(cek) == enc._KEY_LEN
assert len(iv) == enc._IV_LEN
print(enc.__name__)
print("CEK =", base64.urlsafe_b64encode(cek))
print("IV=", base64.urlsafe_b64encode(iv))
import itertools
from jose.jwk import Jwk
from jose.jwe import Jwe
jwk = Jwk.generate(kty="oct")
for a, e in list(itertools.product(algs, encs)):
jwe = Jwe(
alg=KeyEncEnum.create(a),
enc=EncEnum.create(e),
)
cek, iv, cek_ci, kek = jwe.provide_key(jwk)
print("alg=", a, "enc=", e)
print("CEK=", base64.base64url_encode(cek))
print("IV=", base64.base64url_encode(iv))
print("CEK_CI=", base64.base64url_encode(cek_ci))
print("Jwe.iv=", jwe.iv)
print("Jwe.tag=", jwe.tag)
cek2 = jwe.agree_key(jwk, cek_ci)
print("CEK AGREED=", base64.base64url_encode(cek2))
| 25.953405 | 72 | 0.551581 |
ace33ddbf634b62f6a7c710869b788fcf27089ff | 8,156 | py | Python | contrib/devtools/update-translations.py | NewBlockchainDev/charitycoin | a8f3c09ec5932e606b69fde50d5e291677f81d03 | [
"MIT"
] | null | null | null | contrib/devtools/update-translations.py | NewBlockchainDev/charitycoin | a8f3c09ec5932e606b69fde50d5e291677f81d03 | [
"MIT"
] | null | null | null | contrib/devtools/update-translations.py | NewBlockchainDev/charitycoin | a8f3c09ec5932e606b69fde50d5e291677f81d03 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# Copyright (c) 2014 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Run this script from the root of the repository to update all translations from
transifex.
It will do the following automatically:
- fetch all translations using the tx tool
- post-process them into valid and committable format
- remove invalid control characters
- remove location tags (makes diffs less noisy)
TODO:
- auto-add new translations to the build system according to the translation process
'''
from __future__ import division, print_function
import subprocess
import re
import sys
import os
import io
import xml.etree.ElementTree as ET
# Name of transifex tool
TX = 'tx'
# Name of source language file
SOURCE_LANG = 'charitycoin_en.ts'
# Directory with locale files
LOCALE_DIR = 'src/qt/locale'
# Minimum number of messages for translation to be considered at all
MIN_NUM_MESSAGES = 10
def check_at_repository_root():
if not os.path.exists('.git'):
print('No .git directory found')
print('Execute this script at the root of the repository', file=sys.stderr)
exit(1)
def fetch_all_translations():
if subprocess.call([TX, 'pull', '-f', '-a']):
print('Error while fetching translations', file=sys.stderr)
exit(1)
def find_format_specifiers(s):
'''Find all format specifiers in a string.'''
pos = 0
specifiers = []
while True:
percent = s.find('%', pos)
if percent < 0:
break
try:
specifiers.append(s[percent+1])
except:
print('Failed to get specifier')
pos = percent+2
return specifiers
def split_format_specifiers(specifiers):
'''Split format specifiers between numeric (Qt) and others (strprintf)'''
numeric = []
other = []
for s in specifiers:
if s in {'1','2','3','4','5','6','7','8','9'}:
numeric.append(s)
else:
other.append(s)
# If both numeric format specifiers and "others" are used, assume we're dealing
# with a Qt-formatted message. In the case of Qt formatting (see https://doc.qt.io/qt-5/qstring.html#arg)
# only numeric formats are replaced at all. This means "(percentage: %1%)" is valid, without needing
# any kind of escaping that would be necessary for strprintf. Without this, this function
# would wrongly detect '%)' as a printf format specifier.
if numeric:
other = []
# numeric (Qt) can be present in any order, others (strprintf) must be in specified order
return set(numeric),other
def sanitize_string(s):
'''Sanitize string for printing'''
return s.replace('\n',' ')
def check_format_specifiers(source, translation, errors, numerus):
source_f = split_format_specifiers(find_format_specifiers(source))
# assert that no source messages contain both Qt and strprintf format specifiers
# if this fails, go change the source as this is hacky and confusing!
assert(not(source_f[0] and source_f[1]))
try:
translation_f = split_format_specifiers(find_format_specifiers(translation))
except IndexError:
errors.append("Parse error in translation for '%s': '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
else:
if source_f != translation_f:
if numerus and source_f == (set(), ['n']) and translation_f == (set(), []) and translation.find('%') == -1:
# Allow numerus translations to omit %n specifier (usually when it only has one possible value)
return True
errors.append("Mismatch between '%s' and '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
return True
def all_ts_files(suffix=''):
for filename in os.listdir(LOCALE_DIR):
# process only language files, and do not process source language
if not filename.endswith('.ts'+suffix) or filename == SOURCE_LANG+suffix:
continue
if suffix: # remove provided suffix
filename = filename[0:-len(suffix)]
filepath = os.path.join(LOCALE_DIR, filename)
yield(filename, filepath)
FIX_RE = re.compile(b'[\x00-\x09\x0b\x0c\x0e-\x1f]')
def remove_invalid_characters(s):
'''Remove invalid characters from translation string'''
return FIX_RE.sub(b'', s)
# Override cdata escape function to make our output match Qt's (optional, just for cleaner diffs for
# comparison, disable by default)
_orig_escape_cdata = None
def escape_cdata(text):
text = _orig_escape_cdata(text)
text = text.replace("'", ''')
text = text.replace('"', '"')
return text
def postprocess_translations(reduce_diff_hacks=False):
print('Checking and postprocessing...')
if reduce_diff_hacks:
global _orig_escape_cdata
_orig_escape_cdata = ET._escape_cdata
ET._escape_cdata = escape_cdata
for (filename,filepath) in all_ts_files():
os.rename(filepath, filepath+'.orig')
have_errors = False
for (filename,filepath) in all_ts_files('.orig'):
# pre-fixups to cope with transifex output
parser = ET.XMLParser(encoding='utf-8') # need to override encoding because 'utf8' is not understood only 'utf-8'
with open(filepath + '.orig', 'rb') as f:
data = f.read()
# remove control characters; this must be done over the entire file otherwise the XML parser will fail
data = remove_invalid_characters(data)
tree = ET.parse(io.BytesIO(data), parser=parser)
# iterate over all messages in file
root = tree.getroot()
for context in root.findall('context'):
for message in context.findall('message'):
numerus = message.get('numerus') == 'yes'
source = message.find('source').text
translation_node = message.find('translation')
# pick all numerusforms
if numerus:
translations = [i.text for i in translation_node.findall('numerusform')]
else:
translations = [translation_node.text]
for translation in translations:
if translation is None:
continue
errors = []
valid = check_format_specifiers(source, translation, errors, numerus)
for error in errors:
print('%s: %s' % (filename, error))
if not valid: # set type to unfinished and clear string if invalid
translation_node.clear()
translation_node.set('type', 'unfinished')
have_errors = True
# Remove location tags
for location in message.findall('location'):
message.remove(location)
# Remove entire message if it is an unfinished translation
if translation_node.get('type') == 'unfinished':
context.remove(message)
# check if document is (virtually) empty, and remove it if so
num_messages = 0
for context in root.findall('context'):
for message in context.findall('message'):
num_messages += 1
if num_messages < MIN_NUM_MESSAGES:
print('Removing %s, as it contains only %i messages' % (filepath, num_messages))
continue
# write fixed-up tree
# if diff reduction requested, replace some XML to 'sanitize' to qt formatting
if reduce_diff_hacks:
out = io.BytesIO()
tree.write(out, encoding='utf-8')
out = out.getvalue()
out = out.replace(b' />', b'/>')
with open(filepath, 'wb') as f:
f.write(out)
else:
tree.write(filepath, encoding='utf-8')
return have_errors
if __name__ == '__main__':
check_at_repository_root()
fetch_all_translations()
postprocess_translations()
| 38.654028 | 124 | 0.634134 |
ace33f4994226d11b1d235a24fc49bd6f5ad21a3 | 783 | py | Python | neurokit2/stats/mad.py | raimonpv/NeuroKit | cb37d83ee20d6a13a91c4848aa435f41e979e203 | [
"MIT"
] | 1 | 2021-11-14T21:18:43.000Z | 2021-11-14T21:18:43.000Z | neurokit2/stats/mad.py | raimonpv/NeuroKit | cb37d83ee20d6a13a91c4848aa435f41e979e203 | [
"MIT"
] | null | null | null | neurokit2/stats/mad.py | raimonpv/NeuroKit | cb37d83ee20d6a13a91c4848aa435f41e979e203 | [
"MIT"
] | 1 | 2021-11-14T21:18:48.000Z | 2021-11-14T21:18:48.000Z | # -*- coding: utf-8 -*-
import numpy as np
def mad(x, constant=1.4826):
"""Median Absolute Deviation: a "robust" version of standard deviation.
Parameters
----------
x : Union[list, np.array, pd.Series]
A vector of values.
constant : float
Scale factor. Use 1.4826 for results similar to default R.
Returns
----------
float
The MAD.
Examples
----------
>>> import neurokit2 as nk
>>> nk.mad([2, 8, 7, 5, 4, 12, 5, 1])
3.7064999999999997
References
-----------
- https://en.wikipedia.org/wiki/Median_absolute_deviation
"""
median = np.nanmedian(np.ma.array(x).compressed())
mad_value = np.nanmedian(np.abs(x - median))
mad_value = mad_value * constant
return mad_value
| 22.371429 | 75 | 0.579821 |
ace33fa6d7a980ed6b21736b8187a09c5cbd8f00 | 1,696 | py | Python | dashboard/migrations/0061_update_user_feedback.py | eric-scott-owens/loopla | 1fd5e6e7e9907198ff904111010b362a129d5e39 | [
"MIT"
] | null | null | null | dashboard/migrations/0061_update_user_feedback.py | eric-scott-owens/loopla | 1fd5e6e7e9907198ff904111010b362a129d5e39 | [
"MIT"
] | 6 | 2020-06-05T22:27:20.000Z | 2022-03-24T10:25:50.000Z | dashboard/migrations/0061_update_user_feedback.py | eric-scott-owens/loopla | 1fd5e6e7e9907198ff904111010b362a129d5e39 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2019-03-05 17:50
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import imagekit.models.fields
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('dashboard', '0060_add_post_indexes_for_newest_update'),
]
operations = [
migrations.RemoveField(
model_name='userfeedback',
name='internal_copy',
),
migrations.RemoveField(
model_name='userfeedback',
name='is_visible_to_group',
),
migrations.RemoveField(
model_name='userfeedback',
name='original',
),
migrations.RemoveField(
model_name='userfeedback',
name='original_group',
),
migrations.RemoveField(
model_name='userfeedback',
name='original_owner',
),
migrations.AddField(
model_name='userfeedback',
name='owner',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='userfeedback',
name='photo',
field=imagekit.models.fields.ProcessedImageField(blank=True, null=True, upload_to='profile_photos'),
),
migrations.AddField(
model_name='userfeedback',
name='text',
field=models.TextField(default=''),
preserve_default=False,
),
]
| 30.285714 | 122 | 0.605542 |
ace3400bcc7102515e170e4270acc2b465889aa0 | 1,425 | py | Python | ENV/lib/python3.5/site-packages/pyrogram/api/types/text_plain.py | block1o1/CryptoPredicted | 7f660cdc456fb8252b3125028f31fd6f5a3ceea5 | [
"MIT"
] | 4 | 2021-10-14T21:22:25.000Z | 2022-03-12T19:58:48.000Z | ENV/lib/python3.5/site-packages/pyrogram/api/types/text_plain.py | inevolin/CryptoPredicted | 7f660cdc456fb8252b3125028f31fd6f5a3ceea5 | [
"MIT"
] | null | null | null | ENV/lib/python3.5/site-packages/pyrogram/api/types/text_plain.py | inevolin/CryptoPredicted | 7f660cdc456fb8252b3125028f31fd6f5a3ceea5 | [
"MIT"
] | 1 | 2022-03-15T22:52:53.000Z | 2022-03-15T22:52:53.000Z | # Pyrogram - Telegram MTProto API Client Library for Python
# Copyright (C) 2017-2018 Dan Tès <https://github.com/delivrance>
#
# This file is part of Pyrogram.
#
# Pyrogram is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pyrogram is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Pyrogram. If not, see <http://www.gnu.org/licenses/>.
from io import BytesIO
from pyrogram.api.core import *
class TextPlain(Object):
"""Attributes:
ID: ``0x744694e0``
Args:
text: ``str``
"""
ID = 0x744694e0
def __init__(self, text: str):
self.text = text # string
@staticmethod
def read(b: BytesIO, *args) -> "TextPlain":
# No flags
text = String.read(b)
return TextPlain(text)
def write(self) -> bytes:
b = BytesIO()
b.write(Int(self.ID, False))
# No flags
b.write(String(self.text))
return b.getvalue()
| 26.388889 | 74 | 0.654035 |
ace34220b9709d78bf6a15f4641afe3d9b8aaa0a | 10,560 | py | Python | env/lib/python2.7/site-packages/djcelery/tests/test_schedulers.py | jlwysf/onduty | 20d90583a6996d037912af08eb29a6d6fa06bf66 | [
"MIT"
] | null | null | null | env/lib/python2.7/site-packages/djcelery/tests/test_schedulers.py | jlwysf/onduty | 20d90583a6996d037912af08eb29a6d6fa06bf66 | [
"MIT"
] | null | null | null | env/lib/python2.7/site-packages/djcelery/tests/test_schedulers.py | jlwysf/onduty | 20d90583a6996d037912af08eb29a6d6fa06bf66 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from datetime import datetime, timedelta
from itertools import count
from celery.five import monotonic
from celery.schedules import schedule, crontab
from celery.utils.timeutils import timedelta_seconds
from djcelery import schedulers
from djcelery import celery
from djcelery.app import app
from djcelery.models import PeriodicTask, IntervalSchedule, CrontabSchedule
from djcelery.models import PeriodicTasks
from djcelery.tests.utils import unittest
from djcelery.compat import unicode
def create_model_interval(schedule, **kwargs):
return create_model(interval=IntervalSchedule.from_schedule(schedule),
**kwargs)
def create_model_crontab(schedule, **kwargs):
return create_model(crontab=CrontabSchedule.from_schedule(schedule),
**kwargs)
_next_id_get = count(0)
_next_id = lambda: next(_next_id_get)
def create_model(Model=PeriodicTask, **kwargs):
entry = dict(name='thefoo{0}'.format(_next_id()),
task='djcelery.unittest.add{0}'.format(_next_id()),
args='[2, 2]',
kwargs='{"callback": "foo"}',
queue='xaz',
routing_key='cpu',
exchange='foo')
return Model(**dict(entry, **kwargs))
class EntryTrackSave(schedulers.ModelEntry):
def __init__(self, *args, **kwargs):
self.saved = 0
super(EntryTrackSave, self).__init__(*args, **kwargs)
def save(self):
self.saved += 1
super(EntryTrackSave, self).save()
class EntrySaveRaises(schedulers.ModelEntry):
def save(self):
raise RuntimeError('this is expected')
class TrackingScheduler(schedulers.DatabaseScheduler):
Entry = EntryTrackSave
def __init__(self, *args, **kwargs):
self.flushed = 0
schedulers.DatabaseScheduler.__init__(self, *args, **kwargs)
def sync(self):
self.flushed += 1
schedulers.DatabaseScheduler.sync(self)
class test_ModelEntry(unittest.TestCase):
Entry = EntryTrackSave
def tearDown(self):
PeriodicTask.objects.all().delete()
def test_entry(self):
m = create_model_interval(schedule(timedelta(seconds=10)))
e = self.Entry(m)
self.assertListEqual(e.args, [2, 2])
self.assertDictEqual(e.kwargs, {'callback': 'foo'})
self.assertTrue(e.schedule)
self.assertEqual(e.total_run_count, 0)
self.assertIsInstance(e.last_run_at, datetime)
self.assertDictContainsSubset({'queue': 'xaz',
'exchange': 'foo',
'routing_key': 'cpu'}, e.options)
right_now = celery.now()
m2 = create_model_interval(schedule(timedelta(seconds=10)),
last_run_at=right_now)
self.assertTrue(m2.last_run_at)
e2 = self.Entry(m2)
self.assertIs(e2.last_run_at, right_now)
e3 = e2.next()
self.assertGreater(e3.last_run_at, e2.last_run_at)
self.assertEqual(e3.total_run_count, 1)
class test_DatabaseScheduler(unittest.TestCase):
Scheduler = TrackingScheduler
def setUp(self):
PeriodicTask.objects.all().delete()
self.prev_schedule = celery.conf.CELERYBEAT_SCHEDULE
celery.conf.CELERYBEAT_SCHEDULE = {}
m1 = create_model_interval(schedule(timedelta(seconds=10)))
m2 = create_model_interval(schedule(timedelta(minutes=20)))
m3 = create_model_crontab(crontab(minute='2,4,5'))
for obj in m1, m2, m3:
obj.save()
self.s = self.Scheduler(app=app)
self.m1 = PeriodicTask.objects.get(name=m1.name)
self.m2 = PeriodicTask.objects.get(name=m2.name)
self.m3 = PeriodicTask.objects.get(name=m3.name)
def tearDown(self):
celery.conf.CELERYBEAT_SCHEDULE = self.prev_schedule
PeriodicTask.objects.all().delete()
def test_constructor(self):
self.assertIsInstance(self.s._dirty, set)
self.assertIsNone(self.s._last_sync)
self.assertTrue(self.s.sync_every)
def test_all_as_schedule(self):
sched = self.s.schedule
self.assertTrue(sched)
self.assertEqual(len(sched), 4)
self.assertIn('celery.backend_cleanup', sched)
for n, e in sched.items():
self.assertIsInstance(e, self.s.Entry)
def test_schedule_changed(self):
self.m2.args = '[16, 16]'
self.m2.save()
e2 = self.s.schedule[self.m2.name]
self.assertListEqual(e2.args, [16, 16])
self.m1.args = '[32, 32]'
self.m1.save()
e1 = self.s.schedule[self.m1.name]
self.assertListEqual(e1.args, [32, 32])
e1 = self.s.schedule[self.m1.name]
self.assertListEqual(e1.args, [32, 32])
self.m3.delete()
self.assertRaises(KeyError, self.s.schedule.__getitem__, self.m3.name)
def test_should_sync(self):
self.assertTrue(self.s.should_sync())
self.s._last_sync = monotonic()
self.assertFalse(self.s.should_sync())
self.s._last_sync -= self.s.sync_every
self.assertTrue(self.s.should_sync())
def test_reserve(self):
e1 = self.s.schedule[self.m1.name]
self.s.schedule[self.m1.name] = self.s.reserve(e1)
self.assertEqual(self.s.flushed, 1)
e2 = self.s.schedule[self.m2.name]
self.s.schedule[self.m2.name] = self.s.reserve(e2)
self.assertEqual(self.s.flushed, 1)
self.assertIn(self.m2.name, self.s._dirty)
def test_sync_saves_last_run_at(self):
e1 = self.s.schedule[self.m2.name]
last_run = e1.last_run_at
last_run2 = last_run - timedelta(days=1)
e1.model.last_run_at = last_run2
self.s._dirty.add(self.m2.name)
self.s.sync()
e2 = self.s.schedule[self.m2.name]
self.assertEqual(e2.last_run_at, last_run2)
def test_sync_syncs_before_save(self):
# Get the entry for m2
e1 = self.s.schedule[self.m2.name]
# Increment the entry (but make sure it doesn't sync)
self.s._last_sync = monotonic()
e2 = self.s.schedule[e1.name] = self.s.reserve(e1)
self.assertEqual(self.s.flushed, 1)
# Fetch the raw object from db, change the args
# and save the changes.
m2 = PeriodicTask.objects.get(pk=self.m2.pk)
m2.args = '[16, 16]'
m2.save()
# get_schedule should now see the schedule has changed.
# and also sync the dirty objects.
e3 = self.s.schedule[self.m2.name]
self.assertEqual(self.s.flushed, 2)
self.assertEqual(e3.last_run_at, e2.last_run_at)
self.assertListEqual(e3.args, [16, 16])
def test_sync_not_dirty(self):
self.s._dirty.clear()
self.s.sync()
def test_sync_object_gone(self):
self.s._dirty.add('does-not-exist')
self.s.sync()
def test_sync_rollback_on_save_error(self):
self.s.schedule[self.m1.name] = EntrySaveRaises(self.m1)
self.s._dirty.add(self.m1.name)
self.assertRaises(RuntimeError, self.s.sync)
class test_models(unittest.TestCase):
def test_IntervalSchedule_unicode(self):
self.assertEqual(unicode(IntervalSchedule(every=1, period='seconds')),
'every second')
self.assertEqual(unicode(IntervalSchedule(every=10, period='seconds')),
'every 10 seconds')
def test_CrontabSchedule_unicode(self):
self.assertEqual(unicode(CrontabSchedule(minute=3,
hour=3,
day_of_week=None)),
'3 3 * * * (m/h/d/dM/MY)')
self.assertEqual(unicode(CrontabSchedule(minute=3,
hour=3,
day_of_week='tue',
day_of_month='*/2',
month_of_year='4,6')),
'3 3 tue */2 4,6 (m/h/d/dM/MY)')
def test_PeriodicTask_unicode_interval(self):
p = create_model_interval(schedule(timedelta(seconds=10)))
self.assertEqual(unicode(p),
'{0}: every 10.0 seconds'.format(p.name))
def test_PeriodicTask_unicode_crontab(self):
p = create_model_crontab(crontab(hour='4, 5', day_of_week='4, 5'))
self.assertEqual(unicode(p),
'{0}: * 4,5 4,5 * * (m/h/d/dM/MY)'.format(p.name))
def test_PeriodicTask_schedule_property(self):
p1 = create_model_interval(schedule(timedelta(seconds=10)))
s1 = p1.schedule
self.assertEqual(timedelta_seconds(s1.run_every), 10)
p2 = create_model_crontab(crontab(hour='4, 5',
minute='10,20,30',
day_of_month='1-7',
month_of_year='*/3'))
s2 = p2.schedule
self.assertSetEqual(s2.hour, set([4, 5]))
self.assertSetEqual(s2.minute, set([10, 20, 30]))
self.assertSetEqual(s2.day_of_week, set([0, 1, 2, 3, 4, 5, 6]))
self.assertSetEqual(s2.day_of_month, set([1, 2, 3, 4, 5, 6, 7]))
self.assertSetEqual(s2.month_of_year, set([1, 4, 7, 10]))
def test_PeriodicTask_unicode_no_schedule(self):
p = create_model()
self.assertEqual(unicode(p), '{0}: {{no schedule}}'.format(p.name))
def test_CrontabSchedule_schedule(self):
s = CrontabSchedule(minute='3, 7', hour='3, 4', day_of_week='*',
day_of_month='1, 16', month_of_year='1, 7')
self.assertEqual(s.schedule.minute, set([3, 7]))
self.assertEqual(s.schedule.hour, set([3, 4]))
self.assertEqual(s.schedule.day_of_week, set([0, 1, 2, 3, 4, 5, 6]))
self.assertEqual(s.schedule.day_of_month, set([1, 16]))
self.assertEqual(s.schedule.month_of_year, set([1, 7]))
class test_model_PeriodicTasks(unittest.TestCase):
def setUp(self):
PeriodicTasks.objects.all().delete()
def test_track_changes(self):
self.assertIsNone(PeriodicTasks.last_change())
m1 = create_model_interval(schedule(timedelta(seconds=10)))
m1.save()
x = PeriodicTasks.last_change()
self.assertTrue(x)
m1.args = '(23, 24)'
m1.save()
y = PeriodicTasks.last_change()
self.assertTrue(y)
self.assertGreater(y, x)
| 35.918367 | 79 | 0.607197 |
ace3427660b900c772b28c3f87a6083691682c1b | 2,170 | py | Python | eventsourcing/tests/core_tests/test_simple_application.py | alexanderlarin/eventsourcing | 6f2a4ded3c783ba3ee465243a48f66ecdee20f52 | [
"BSD-3-Clause"
] | null | null | null | eventsourcing/tests/core_tests/test_simple_application.py | alexanderlarin/eventsourcing | 6f2a4ded3c783ba3ee465243a48f66ecdee20f52 | [
"BSD-3-Clause"
] | null | null | null | eventsourcing/tests/core_tests/test_simple_application.py | alexanderlarin/eventsourcing | 6f2a4ded3c783ba3ee465243a48f66ecdee20f52 | [
"BSD-3-Clause"
] | null | null | null | from unittest import TestCase
from eventsourcing.tests.sequenced_item_tests.test_django_record_manager import DjangoTestCase
from eventsourcing.application.django import DjangoApplication
from eventsourcing.application.notificationlog import NotificationLogReader
from eventsourcing.application.snapshotting import SnapshottingApplication
from eventsourcing.application.sqlalchemy import SQLAlchemyApplication
from eventsourcing.domain.model.events import assert_event_handlers_empty, DomainEvent
from eventsourcing.tests.core_tests.test_aggregate_root import ExampleAggregateRoot
from eventsourcing.utils.random import encode_random_bytes
class TestSimpleApplication(TestCase):
application_class = SQLAlchemyApplication
def test(self):
with self.get_application() as app:
# Start with a new table.
app.drop_table()
app.drop_table()
app.setup_table()
app.setup_table()
# Check the application's persistence policy,
# repository, and event store, are working.
aggregate = ExampleAggregateRoot.__create__()
aggregate.__save__()
self.assertTrue(aggregate.id in app.repository)
# Check the notifications.
reader = NotificationLogReader(app.notification_log)
notifications = reader.read_list()
self.assertEqual(1, len(notifications))
topic = 'eventsourcing.tests.core_tests.test_aggregate_root#ExampleAggregateRoot.Created'
self.assertEqual(topic, notifications[0]['topic'])
app.drop_table()
def get_application(self):
return self.application_class(
cipher_key=encode_random_bytes(16),
persist_event_type=DomainEvent,
)
def tearDown(self):
# Check the close() method leaves everything unsubscribed.
assert_event_handlers_empty()
class TestDjangoApplication(DjangoTestCase, TestSimpleApplication):
application_class = DjangoApplication
class TestSnapshottingApplication(TestSimpleApplication):
application_class = SnapshottingApplication.mixin(SQLAlchemyApplication)
| 36.166667 | 101 | 0.738249 |
ace3429f1a43629a293ff6b1db291bdbcf6e837f | 5,541 | py | Python | application/ceres/sandbox.py | traxtar3/slapt | a799b2f9b4b290a3c1cca2018f2826e1c4e94232 | [
"MIT"
] | null | null | null | application/ceres/sandbox.py | traxtar3/slapt | a799b2f9b4b290a3c1cca2018f2826e1c4e94232 | [
"MIT"
] | null | null | null | application/ceres/sandbox.py | traxtar3/slapt | a799b2f9b4b290a3c1cca2018f2826e1c4e94232 | [
"MIT"
] | null | null | null | import pandas as pd
def mkTaskData():
taskData = pd.DataFrame(columns=["Satellite", "Priority", "Task Type", "Voice Report", "Routing", "Stop", "Site"])
taskData = taskData.append({"Satellite": "25544", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "BLE"}, ignore_index=True)
taskData = taskData.append({"Satellite": "25544", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "CAV"}, ignore_index=True)
taskData = taskData.append({"Satellite": "25544", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "COD"}, ignore_index=True)
taskData = taskData.append({"Satellite": "25544", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "CLR"}, ignore_index=True)
# taskData = taskData.append({"Satellite": "25544", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "THL"}, ignore_index=True)
taskData = taskData.append({"Satellite": "25544", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "FYL"}, ignore_index=True)
taskData = taskData.append({"Satellite": "25544", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "EGL"}, ignore_index=True)
taskData = taskData.append({"Satellite": "27880", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "BLE"}, ignore_index=True)
taskData = taskData.append({"Satellite": "27880", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "CAV"}, ignore_index=True)
taskData = taskData.append({"Satellite": "27880", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "COD"}, ignore_index=True)
taskData = taskData.append({"Satellite": "27880", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "CLR"}, ignore_index=True)
taskData = taskData.append({"Satellite": "27880", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "THL"}, ignore_index=True)
# taskData = taskData.append({"Satellite": "27880", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "FYL"}, ignore_index=True)
taskData = taskData.append({"Satellite": "27880", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "EGL"}, ignore_index=True)
taskData = taskData.append({"Satellite": "45589", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "BLE"}, ignore_index=True)
taskData = taskData.append({"Satellite": "45589", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "CAV"}, ignore_index=True)
# taskData = taskData.append({"Satellite": "45589", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "COD"}, ignore_index=True)
taskData = taskData.append({"Satellite": "44861", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "CLR"}, ignore_index=True)
taskData = taskData.append({"Satellite": "44861", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "THL"}, ignore_index=True)
taskData = taskData.append({"Satellite": "44861", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "FYL"}, ignore_index=True)
taskData = taskData.append({"Satellite": "44861", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "EGL"}, ignore_index=True)
taskData = taskData.append({"Satellite": "43931", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "BLE"}, ignore_index=True)
# taskData = taskData.append({"Satellite": "26846", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "CAV"}, ignore_index=True)
taskData = taskData.append({"Satellite": "43931", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "COD"}, ignore_index=True)
taskData = taskData.append({"Satellite": "26846", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "CLR"}, ignore_index=True)
taskData = taskData.append({"Satellite": "43931", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "THL"}, ignore_index=True)
taskData = taskData.append({"Satellite": "26846", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "FYL"}, ignore_index=True)
taskData = taskData.append({"Satellite": "43931", "Priority": "3D", "Task Type": "PosNeg", "Voice Report": "none", "Routing": "routine", "Stop": "UFN", "Site": "EGL"}, ignore_index=True)
return taskData
| 135.146341 | 192 | 0.622992 |
ace343bb8370427544a67915b534475d5764f673 | 3,147 | py | Python | vendor-local/lib/python/debug_toolbar/panels/cache.py | Koenkk/popcorn_maker | 0978b9f98dacd4e8eb753404b24eb584f410aa11 | [
"BSD-3-Clause"
] | 285 | 2019-12-23T09:50:21.000Z | 2021-12-08T09:08:49.000Z | vendor-local/lib/python/debug_toolbar/panels/cache.py | Koenkk/popcorn_maker | 0978b9f98dacd4e8eb753404b24eb584f410aa11 | [
"BSD-3-Clause"
] | null | null | null | vendor-local/lib/python/debug_toolbar/panels/cache.py | Koenkk/popcorn_maker | 0978b9f98dacd4e8eb753404b24eb584f410aa11 | [
"BSD-3-Clause"
] | 16 | 2015-02-18T21:43:31.000Z | 2021-11-09T22:50:03.000Z | import time
import inspect
from django.core import cache
from django.core.cache.backends.base import BaseCache
from django.utils.translation import ugettext_lazy as _
from debug_toolbar.panels import DebugPanel
class CacheStatTracker(BaseCache):
"""A small class used to track cache calls."""
def __init__(self, cache):
self.cache = cache
self.reset()
def reset(self):
self.calls = []
self.hits = 0
self.misses = 0
self.sets = 0
self.gets = 0
self.get_many = 0
self.deletes = 0
self.total_time = 0
def _get_func_info(self):
stack = inspect.stack()[2]
return (stack[1], stack[2], stack[3], stack[4])
def get(self, key, default=None):
t = time.time()
value = self.cache.get(key, default)
this_time = time.time() - t
self.total_time += this_time * 1000
if value is None:
self.misses += 1
else:
self.hits += 1
self.gets += 1
self.calls.append((this_time, 'get', (key,), self._get_func_info()))
return value
def set(self, key, value, timeout=None):
t = time.time()
self.cache.set(key, value, timeout)
this_time = time.time() - t
self.total_time += this_time * 1000
self.sets += 1
self.calls.append((this_time, 'set', (key, value, timeout), self._get_func_info()))
def delete(self, key):
t = time.time()
self.cache.delete(key)
this_time = time.time() - t
self.total_time += this_time * 1000
self.deletes += 1
self.calls.append((this_time, 'delete', (key,), self._get_func_info()))
def get_many(self, keys):
t = time.time()
results = self.cache.get_many(keys)
this_time = time.time() - t
self.total_time += this_time * 1000
self.get_many += 1
for key, value in results.iteritems():
if value is None:
self.misses += 1
else:
self.hits += 1
self.calls.append((this_time, 'get_many', (keys,), self._get_func_info()))
class CacheDebugPanel(DebugPanel):
"""
Panel that displays the cache statistics.
"""
name = 'Cache'
template = 'debug_toolbar/panels/cache.html'
has_content = True
def __init__(self, *args, **kwargs):
super(CacheDebugPanel, self).__init__(*args, **kwargs)
# This is hackish but to prevent threading issues is somewhat needed
if isinstance(cache.cache, CacheStatTracker):
cache.cache.reset()
self.cache = cache.cache
else:
self.cache = CacheStatTracker(cache.cache)
cache.cache = self.cache
def nav_title(self):
return _('Cache: %.2fms') % self.cache.total_time
def title(self):
return _('Cache Usage')
def url(self):
return ''
def process_response(self, request, response):
self.record_stats({
'cache_calls': len(self.cache.calls),
'cache_time': self.cache.total_time,
'cache': self.cache,
})
| 29.688679 | 91 | 0.580553 |
ace343c36509b508241f175dba353697bfd039e1 | 10,404 | py | Python | angr-doc/api-doc/source/conf.py | Ruide/angr-dev | 964dc80c758e25c698c2cbcc454ef5954c5fa0a0 | [
"BSD-2-Clause"
] | null | null | null | angr-doc/api-doc/source/conf.py | Ruide/angr-dev | 964dc80c758e25c698c2cbcc454ef5954c5fa0a0 | [
"BSD-2-Clause"
] | null | null | null | angr-doc/api-doc/source/conf.py | Ruide/angr-dev | 964dc80c758e25c698c2cbcc454ef5954c5fa0a0 | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
#
# angr documentation build configuration file, created by
# sphinx-quickstart on Sun Feb 14 00:29:24 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
from recommonmark.parser import CommonMarkParser
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_parsers = {
'.md': CommonMarkParser,
}
source_suffix = ['.rst', '.md']
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'angr'
copyright = u'2017, The angr project'
author = u'The angr project'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'6.7.4.12'
# The full version, including alpha/beta/rc tags.
release = u'6.7.4.12'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
intersphinx_mapping = {'python': ('https://docs.python.org/2.7', None)}
autoclass_content = "both"
autodoc_member_order = 'bysource'
autodoc_default_flags = ['members',
#'special-members',
'show-inheritance'
# 'private-members',
'undoc-members',
]
def autodoc_skip_member(app, what, name, obj, skip, options):
exclusions = ('__weakref__', '__doc__', '__module__', '__dict__')
for excluded_name in exclusions:
if name.find(excluded_name) >= 0:
return True
return skip
def setup(app):
app.connect('autodoc-skip-member', autodoc_skip_member)
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'angrdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'angr.tex', u'angr Documentation',
u'The angr project', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('angr', 'angr', u'angr analysis module', [author], 3),
('simuvex', 'simuvex', u'angr emulation and instrumentation module', [author], 3),
('claripy', 'claripy', u'angr numerical and constraint-solving module', [author], 3),
('cle', 'cle', u'angr binary loader', [author], 3),
('pyvex', 'pyvex', u'angr binary translator', [author], 3),
('archinfo', 'archinfo', u'angr architecture information repository', [author], 3),
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'angr', u'angr Documentation',
author, 'angr', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| 32.820189 | 89 | 0.703287 |
ace344a062cb91f30e4d4e86420a6acd1371f0f1 | 1,133 | py | Python | examples/proxy_smtp_port.py | askvrtsv/ProxyBroker | fe8e8be00685a34d7235903ebe25ea8ab1f99cc5 | [
"Apache-2.0"
] | 3,157 | 2015-12-29T18:50:51.000Z | 2022-03-31T09:53:14.000Z | examples/proxy_smtp_port.py | askvrtsv/ProxyBroker | fe8e8be00685a34d7235903ebe25ea8ab1f99cc5 | [
"Apache-2.0"
] | 177 | 2015-12-04T03:07:20.000Z | 2022-03-19T19:06:07.000Z | examples/proxy_smtp_port.py | askvrtsv/ProxyBroker | fe8e8be00685a34d7235903ebe25ea8ab1f99cc5 | [
"Apache-2.0"
] | 859 | 2016-01-07T04:17:55.000Z | 2022-03-24T14:40:32.000Z | """Find 10 working proxies supporting CONNECT method
to 25 port (SMTP) and save them to a file."""
import asyncio
from proxybroker import Broker
async def save(proxies, filename):
"""Save proxies to a file."""
with open(filename, 'w') as f:
while True:
proxy = await proxies.get()
if proxy is None:
break
f.write('smtp://%s:%d\n' % (proxy.host, proxy.port))
def main():
proxies = asyncio.Queue()
broker = Broker(proxies, judges=['smtp://smtp.gmail.com'], max_tries=1)
# Check proxy in spam databases (DNSBL). By default is disabled.
# more databases: http://www.dnsbl.info/dnsbl-database-check.php
dnsbl = [
'bl.spamcop.net',
'cbl.abuseat.org',
'dnsbl.sorbs.net',
'zen.spamhaus.org',
'bl.mcafee.com',
'spam.spamrats.com',
]
tasks = asyncio.gather(
broker.find(types=['CONNECT:25'], dnsbl=dnsbl, limit=10),
save(proxies, filename='proxies.txt'),
)
loop = asyncio.get_event_loop()
loop.run_until_complete(tasks)
if __name__ == '__main__':
main()
| 25.75 | 75 | 0.597529 |
ace34578e7144ec46c286aeb3121cf8a694ce742 | 7,013 | py | Python | tests/app/api_v0/test_subject.py | AspectLeft/server | fb40c4f8591659617332bd936195e7216bf404bb | [
"BSD-3-Clause"
] | null | null | null | tests/app/api_v0/test_subject.py | AspectLeft/server | fb40c4f8591659617332bd936195e7216bf404bb | [
"BSD-3-Clause"
] | null | null | null | tests/app/api_v0/test_subject.py | AspectLeft/server | fb40c4f8591659617332bd936195e7216bf404bb | [
"BSD-3-Clause"
] | null | null | null | from pathlib import Path
import orjson.orjson
from redis import Redis
from sqlalchemy.orm import Session
from starlette.testclient import TestClient
from pol import sa, config
from pol.db.tables import ChiiSubjectField
fixtures_path = Path(__file__).parent.joinpath("fixtures")
def test_subject_not_found(client: TestClient):
response = client.get("/v0/subjects/2000000")
assert response.status_code == 404
assert response.headers["content-type"] == "application/json"
def test_subject_not_valid(client: TestClient):
response = client.get("/v0/subjects/hello")
assert response.status_code == 422
assert response.headers["content-type"] == "application/json"
response = client.get("/v0/subjects/0")
assert response.status_code == 422
assert response.headers["content-type"] == "application/json"
def test_subject_basic(client: TestClient):
response = client.get("/v0/subjects/2")
assert response.status_code == 200
assert response.headers["content-type"] == "application/json"
data = response.json()
assert data["id"] == 2
assert data["name"] == "坟场"
assert not response.json()["nsfw"]
def test_subject_locked(client: TestClient):
response = client.get("/v0/subjects/2")
assert response.status_code == 200
assert response.headers["content-type"] == "application/json"
data = response.json()
assert data["locked"]
def test_subject_nsfw_auth_200(client: TestClient, auth_header):
"""authorized 200 nsfw subject"""
response = client.get("/v0/subjects/16", headers=auth_header)
assert response.status_code == 200
assert response.headers["content-type"] == "application/json"
def test_subject_redirect(client: TestClient):
response = client.get("/v0/subjects/18", allow_redirects=False)
assert response.status_code == 307
assert response.headers["location"] == "/v0/subjects/19"
assert response.headers["cache-control"] == "public, max-age=300"
def test_subject_empty_image(client: TestClient, mock_subject):
mock_subject(200)
response = client.get("/v0/subjects/200")
assert response.status_code == 200
data = response.json()
assert data["images"] is None
def test_subject_ep_query_limit_offset(client: TestClient):
response = client.get("/v0/episodes", params={"subject_id": 8, "limit": 5})
assert response.status_code == 200
data = response.json()["data"]
assert isinstance(data, list)
assert len(data) == 5
ids = [x["id"] for x in data]
new_data = client.get(
"/v0/episodes", params={"subject_id": 8, "limit": 4, "offset": 1}
).json()["data"]
assert ids[1:] == [x["id"] for x in new_data]
def test_subject_ep_type(client: TestClient):
response = client.get("/v0/episodes", params={"type": 3, "subject_id": 253})
assert response.status_code == 200
data = response.json()["data"]
assert [x["id"] for x in data] == [103233, 103234, 103235]
def test_subject_characters(client: TestClient):
response = client.get("/v0/subjects/8/characters")
assert response.status_code == 200
data = response.json()
assert isinstance(data, list)
assert data
def test_subject_persons(client: TestClient):
response = client.get("/v0/subjects/4/persons")
assert response.status_code == 200
data = response.json()
assert isinstance(data, list)
assert data
def test_subject_subjects_ban(client: TestClient):
response = client.get("/v0/subjects/5/subjects")
assert response.status_code == 404
def test_subject_subjects(client: TestClient):
response = client.get("/v0/subjects/11/subjects")
assert response.status_code == 200
data = response.json()
assert isinstance(data, list)
assert data
def test_subject_cache_broken_purge(client: TestClient, redis_client: Redis):
cache_key = config.CACHE_KEY_PREFIX + "subject:1"
redis_client.set(cache_key, orjson.dumps({"id": 10, "test": "1"}))
response = client.get("/v0/subjects/1")
assert response.status_code == 200, "broken cache should be purged"
in_cache = orjson.loads(redis_client.get(cache_key))
assert response.json()["name"] == in_cache["name"]
assert "test" not in in_cache
def test_subject_tags(client: TestClient):
response = client.get("/v0/subjects/2")
assert response.json()["tags"] == [
{"name": "陈绮贞", "count": 9},
{"name": "中配", "count": 1},
{"name": "银魂中配", "count": 1},
{"name": "神还原", "count": 1},
{"name": "冷泉夜月", "count": 1},
{"name": "银他妈", "count": 1},
{"name": "陈老师", "count": 1},
{"name": "银魂", "count": 1},
{"name": "治愈系", "count": 1},
{"name": "恶搞", "count": 1},
]
def test_subject_tags_empty(client: TestClient, mock_subject):
sid = 15234523
mock_subject(sid)
response = client.get(f"/v0/subjects/{sid}")
assert response.json()["tags"] == []
def test_subject_tags_none(client: TestClient, mock_subject, db_session: Session):
"""
should exclude a tag if name is None.
todo: can count be None too?
"""
sid = 15234524
mock_subject(sid)
field_tags_with_none_val = (
fixtures_path.joinpath("subject_2585_tags.txt").read_bytes().strip()
)
db_session.execute(
sa.update(ChiiSubjectField)
.where(ChiiSubjectField.field_sid == sid)
.values(field_tags=field_tags_with_none_val)
)
db_session.commit()
response = client.get(f"/v0/subjects/{sid}")
assert response.json()["tags"] == [
{"name": "炮姐", "count": 1956},
{"name": "超电磁炮", "count": 1756},
{"name": "J.C.STAFF", "count": 1746},
{"name": "御坂美琴", "count": 1367},
{"name": "百合", "count": 1240},
{"name": "2009年10月", "count": 917},
{"name": "bilibili", "count": 795},
{"name": "TV", "count": 709},
{"name": "黑子", "count": 702},
{"name": "科学超电磁炮", "count": 621},
{"name": "魔法禁书目录", "count": 518},
{"name": "2009", "count": 409},
{"name": "漫画改", "count": 288},
{"name": "傲娇娘", "count": 280},
{"name": "校园", "count": 156},
{"name": "战斗", "count": 144},
{"name": "长井龙雪", "count": 123},
{"name": "漫改", "count": 110},
{"name": "姐控", "count": 107},
{"name": "轻小说改", "count": 93},
{"name": "科幻", "count": 82},
{"name": "超能力", "count": 73},
{"name": "日常", "count": 58},
{"name": "奇幻", "count": 54},
{"name": "豊崎愛生", "count": 53},
{"name": "長井龍雪", "count": 47},
{"name": "某科学的超电磁炮", "count": 47},
{"name": "佐藤利奈", "count": 38},
{"name": "新井里美", "count": 34},
]
def test_subject_cache_header_public(client: TestClient, redis_client: Redis):
response = client.get("/v0/subjects/1")
assert response.status_code == 200, "broken cache should be purged"
assert response.headers["cache-control"] == "public, max-age=300"
assert not response.json()["nsfw"]
| 31.877273 | 82 | 0.627264 |
ace345cac4642df7e486124136a971a8e3a3218b | 2,819 | py | Python | tools/ota_e2e_tests/aws_ota_test/aws_ota_test_case_back_to_back_downloads.py | ictk-solution-dev/amazon-freertos | cc76512292ddfb70bba3030dbcb740ef3c6ead8b | [
"MIT"
] | 2 | 2020-06-23T08:05:58.000Z | 2020-06-24T01:25:51.000Z | tools/ota_e2e_tests/aws_ota_test/aws_ota_test_case_back_to_back_downloads.py | LibreWireless/amazon-freertos-uno | 2ddb5c0ac906e4ab5340062641776f44e0f1d67d | [
"MIT"
] | 2 | 2022-03-29T05:16:50.000Z | 2022-03-29T05:16:50.000Z | tools/ota_e2e_tests/aws_ota_test/aws_ota_test_case_back_to_back_downloads.py | ictk-solution-dev/amazon-freertos | cc76512292ddfb70bba3030dbcb740ef3c6ead8b | [
"MIT"
] | null | null | null | """
FreeRTOS
Copyright (C) 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
http://aws.amazon.com/freertos
http://www.FreeRTOS.org
"""
from .aws_ota_test_case import OtaTestCase
from .aws_ota_test_result import OtaTestResult
class OtaTestBackToBackDownloads(OtaTestCase):
is_positive = True
def __buildAndOtaInputVersion(self, x, y, z):
# Build x.y.z for download
self._otaProject.setApplicationVersion(x, y, z)
# Build the OTA image.
self._otaProject.buildProject()
# Start an OTA Update.
otaUpdateId = self._otaAwsAgent.quickCreateOtaUpdate(self._otaConfig, [self._protocol])
# Poll on completion
jobStatus, summary = self._otaAwsAgent.pollOtaUpdateCompletion(otaUpdateId, self._otaConfig['ota_timeout_sec'])
return jobStatus, summary
def run(self):
# Build 0.9.1 for download
jobStatus, summary = self.__buildAndOtaInputVersion(0, 9, 1)
if jobStatus.status != 'SUCCEEDED':
return OtaTestResult.testResultFromJobStatus(self.getName(), jobStatus, self._positive, summary)
# Build 0.9.2 for download
jobStatus, summary = self.__buildAndOtaInputVersion(0, 9, 2)
if jobStatus.status != 'SUCCEEDED':
return OtaTestResult.testResultFromJobStatus(self.getName(), jobStatus, self._positive, summary)
# Build 0.9.3 for download
jobStatus, summary = self.__buildAndOtaInputVersion(0, 9, 3)
if jobStatus.status != 'SUCCEEDED':
return OtaTestResult.testResultFromJobStatus(self.getName(), jobStatus, self._positive, summary)
return OtaTestResult.testResultFromJobStatus(self.getName(), jobStatus, self._positive, summary)
| 46.213115 | 120 | 0.726144 |
ace345f1c83715f3a05b9919150c4d200305d7b3 | 9,024 | py | Python | tensorflow/python/kernel_tests/sparse_xent_op_test.py | jylinman/tensorflow | 5248d111c3aeaf9f560cd77bff0f183f38e31e0b | [
"Apache-2.0"
] | 2 | 2016-03-15T16:14:49.000Z | 2016-07-07T16:16:05.000Z | tensorflow/python/kernel_tests/sparse_xent_op_test.py | jylinman/tensorflow | 5248d111c3aeaf9f560cd77bff0f183f38e31e0b | [
"Apache-2.0"
] | null | null | null | tensorflow/python/kernel_tests/sparse_xent_op_test.py | jylinman/tensorflow | 5248d111c3aeaf9f560cd77bff0f183f38e31e0b | [
"Apache-2.0"
] | 2 | 2018-01-22T05:57:45.000Z | 2018-09-24T09:48:22.000Z | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for SparseSoftmaxCrossEntropyWithLogits op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import time
import numpy as np
import tensorflow as tf
from tensorflow.python.client import graph_util
from tensorflow.python.ops import sparse_ops
class SparseXentTest(tf.test.TestCase):
def _npXent(self, features, labels):
batch_dim = 0
class_dim = 1
batch_size = features.shape[batch_dim]
e = np.exp(features -
np.reshape(np.amax(features, axis=class_dim), [batch_size, 1]))
probs = e / np.reshape(np.sum(e, axis=class_dim), [batch_size, 1])
labels_mat = np.zeros_like(probs).astype(probs.dtype)
labels_mat[np.arange(batch_size), labels] = 1.0
bp = (probs - labels_mat)
l = -np.sum(labels_mat * np.log(probs + 1.0e-20), axis=1)
return l, bp
def _testXent(self, np_features, np_labels, use_gpu=False):
np_loss, np_backprop = self._npXent(np_features, np_labels)
with self.test_session(use_gpu=use_gpu) as sess:
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
np_features, np_labels)
backprop = loss.op.outputs[1]
tf_loss, tf_backprop = sess.run([loss, backprop])
self.assertAllClose(np_loss, tf_loss)
self.assertAllClose(np_backprop, tf_backprop)
def _testAll(self, features, labels):
self._testXent(features, labels, use_gpu=False)
self._testXent(features, labels, use_gpu=True)
def _testSingleClass(self, use_gpu=False):
with self.test_session(use_gpu=use_gpu) as sess:
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
np.array([[1.], [-1.], [0.]]).astype(np.float32),
np.array([1, 1, 1]).astype(np.int64))
backprop = loss.op.outputs[1]
tf_loss, tf_backprop = sess.run([loss, backprop])
# loss = -1.0*log(1.0), 1.0*log(1.0), 0.0*log(0.0) == 1.0
self.assertAllClose([0.0, 0.0, 0.0], tf_loss)
self.assertAllClose([[2.0], [1.0], [0.0]], tf_backprop)
# def testSingleClass(self):
# self._testSingleClass(True)
# self._testSingleClass(False)
def testRankTooLarge(self):
np_features = np.array(
[[[1., 1., 1., 1.]], [[1., 2., 3., 4.]]]).astype(np.float32)
np_labels = np.array([1, 2]).astype(np.int64)
self.assertRaisesRegexp(
ValueError, "must have rank 2",
tf.nn.sparse_softmax_cross_entropy_with_logits, np_features, np_labels)
def testNpXent(self):
# We create 2 batches of logits for testing.
# batch 0 is the boring uniform distribution: 1, 1, 1, 1, with target 3.
# batch 1 has a bit of difference: 1, 2, 3, 4, with target 0.
features = [[1., 1., 1., 1.], [1., 2., 3., 4.]]
labels = [3, 0]
# For batch 0, we expect the uniform distribution: 0.25, 0.25, 0.25, 0.25
# With a hard target 3, the backprop is [0.25, 0.25, 0.25, -0.75]
# The loss for this batch is -log(0.25) = 1.386
#
# For batch 1, we have:
# exp(0) = 1
# exp(1) = 2.718
# exp(2) = 7.389
# exp(3) = 20.085
# SUM = 31.192
# So we have as probabilities:
# exp(0) / SUM = 0.032
# exp(1) / SUM = 0.087
# exp(2) / SUM = 0.237
# exp(3) / SUM = 0.644
# With a hard 1, the backprop is [0.032 - 1.0 = -0.968, 0.087, 0.237, 0.644]
# The loss for this batch is [1.0 * -log(0.25), 1.0 * -log(0.032)]
# = [1.3862, 3.4420]
np_loss, np_backprop = self._npXent(
np.array(features), np.array(labels, dtype=np.int64))
self.assertAllClose(np.array([[0.25, 0.25, 0.25, -0.75],
[-0.968, 0.087, 0.237, 0.6439]]),
np_backprop,
rtol=1.e-3, atol=1.e-3)
self.assertAllClose(np.array([1.3862, 3.4420]), np_loss,
rtol=1.e-3, atol=1.e-3)
def testShapeMismatch(self):
with self.test_session():
with self.assertRaises(ValueError):
tf.nn.sparse_softmax_cross_entropy_with_logits(
[[0., 1.], [2., 3.]], [[0, 2]])
def testNotMatrix(self):
with self.test_session():
with self.assertRaises(ValueError):
tf.nn.sparse_softmax_cross_entropy_with_logits(
[0., 1., 2., 3.], [0, 2])
def testFloat(self):
self._testAll(
np.array([[1., 1., 1., 1.], [1., 2., 3., 4.]]).astype(np.float32),
np.array([3, 0]).astype(np.int64))
def testDouble(self):
self._testXent(
np.array([[1., 1., 1., 1.], [1., 2., 3., 4.]]).astype(np.float64),
np.array([0, 3]).astype(np.int64),
use_gpu=False)
def testGradient(self):
with self.test_session():
l = tf.constant([3, 0, 1], dtype=tf.int64, name="l")
f = tf.constant([0.1, 0.2, 0.3, 0.4,
0.1, 0.4, 0.9, 1.6,
0.1, 0.8, 2.7, 6.4], shape=[3, 4],
dtype=tf.float64, name="f")
x = tf.nn.sparse_softmax_cross_entropy_with_logits(f, l, name="xent")
err = tf.test.compute_gradient_error(f, [3, 4], x, [3])
print("cross entropy gradient err = ", err)
self.assertLess(err, 5e-8)
def _sparse_vs_dense_xent_benchmark_dense(labels, logits):
labels = tf.identity(labels)
logits = tf.identity(logits)
with tf.device("/cpu:0"): # Sparse-to-dense must be on CPU
batch_size = tf.shape(logits)[0]
num_entries = tf.shape(logits)[1]
length = batch_size * num_entries
labels += num_entries * tf.range(batch_size)
target = sparse_ops.sparse_to_dense(
labels, tf.pack([length]), 1.0, 0.0)
target = tf.reshape(target, tf.pack([-1, num_entries]))
crossent = tf.nn.softmax_cross_entropy_with_logits(
logits, target, name="SequenceLoss/CrossEntropy")
crossent_sum = tf.reduce_sum(crossent)
grads = tf.gradients([crossent_sum], [logits])[0]
return (crossent_sum, grads)
def _sparse_vs_dense_xent_benchmark_sparse(labels, logits):
# Using sparse_softmax_cross_entropy_with_logits
labels = labels.astype(np.int64)
labels = tf.identity(labels)
logits = tf.identity(logits)
crossent = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits, labels, name="SequenceLoss/CrossEntropy")
crossent_sum = tf.reduce_sum(crossent)
grads = tf.gradients([crossent_sum], [logits])[0]
return (crossent_sum, grads)
def sparse_vs_dense_xent_benchmark(batch_size, num_entries, use_gpu):
config = tf.ConfigProto()
config.allow_soft_placement = True
config.gpu_options.per_process_gpu_memory_fraction = 0.3
labels = np.random.randint(num_entries, size=batch_size).astype(np.int32)
logits = np.random.randn(batch_size, num_entries).astype(np.float32)
def _timer(sess, ops):
# Warm in
for _ in range(20):
sess.run(ops)
# Timing run
start = time.time()
for _ in range(20):
sess.run(ops)
end = time.time()
return (end - start)/20.0 # Average runtime per iteration
# Using sparse_to_dense and softmax_cross_entropy_with_logits
with tf.Session(config=config) as sess:
if not use_gpu:
with tf.device(graph_util.pin_to_cpu):
ops = _sparse_vs_dense_xent_benchmark_dense(labels, logits)
else:
ops = _sparse_vs_dense_xent_benchmark_dense(labels, logits)
delta_dense = _timer(sess, ops)
# Using sparse_softmax_cross_entropy_with_logits
with tf.Session(config=config) as sess:
if not use_gpu:
with tf.device(graph_util.pin_to_cpu):
ops = _sparse_vs_dense_xent_benchmark_sparse(labels, logits)
else:
ops = _sparse_vs_dense_xent_benchmark_sparse(labels, logits)
delta_sparse = _timer(sess, ops)
print(
"%d \t %d \t %s \t %f \t %f \t %f"
% (batch_size, num_entries, use_gpu, delta_dense, delta_sparse,
delta_sparse/delta_dense))
def main(_):
print("Sparse Xent vs. SparseToDense + Xent")
print("batch \t depth \t gpu \t dt(dense) \t dt(sparse) "
"\t dt(sparse)/dt(dense)")
for use_gpu in (False, True):
for batch_size in (32, 64, 128):
for num_entries in (100, 1000, 10000):
sparse_vs_dense_xent_benchmark(
batch_size, num_entries, use_gpu)
sparse_vs_dense_xent_benchmark(
32, 100000, use_gpu)
sparse_vs_dense_xent_benchmark(
8, 1000000, use_gpu)
if __name__ == "__main__":
if "--benchmarks" in sys.argv:
sys.argv.remove("--benchmarks")
tf.app.run()
else:
tf.test.main()
| 36.096 | 80 | 0.644393 |
ace345f7722fb0b8edfe30012afca37e16be53fd | 3,047 | py | Python | tools/datasets_convert/voc_aug.py | UESTC-Liuxin/SkmtSeg | 1251de57fae967aca395644d1c70a9ba0bb52271 | [
"Apache-2.0"
] | 2 | 2020-12-22T08:40:05.000Z | 2021-03-30T08:09:44.000Z | tools/datasets_convert/voc_aug.py | UESTC-Liuxin/SkmtSeg | 1251de57fae967aca395644d1c70a9ba0bb52271 | [
"Apache-2.0"
] | null | null | null | tools/datasets_convert/voc_aug.py | UESTC-Liuxin/SkmtSeg | 1251de57fae967aca395644d1c70a9ba0bb52271 | [
"Apache-2.0"
] | null | null | null | import argparse
import os.path as osp
from functools import partial
import numpy as np
from PIL import Image
from scipy.io import loadmat
AUG_LEN = 10582
def convert_mat(mat_file, in_dir, out_dir):
data = loadmat(osp.join(in_dir, mat_file))
mask = data['GTcls'][0]['Segmentation'][0].astype(np.uint8)
seg_filename = osp.join(out_dir, mat_file.replace('.mat', '.png'))
Image.fromarray(mask).save(seg_filename, 'PNG')
def generate_aug_list(merged_list, excluded_list):
return list(set(merged_list) - set(excluded_list))
def parse_args():
parser = argparse.ArgumentParser(
description='Convert PASCAL VOC annotations to mmsegmentation format')
parser.add_argument('devkit_path', help='pascal voc devkit path')
parser.add_argument('aug_path', help='pascal voc aug path')
parser.add_argument('-o', '--out_dir', help='output path')
parser.add_argument(
'--nproc', default=1, type=int, help='number of process')
args = parser.parse_args()
return args
def main():
args = parse_args()
devkit_path = args.devkit_path
aug_path = args.aug_path
nproc = args.nproc
if args.out_dir is None:
out_dir = osp.join(devkit_path, 'VOC2012', 'SegmentationClassAug')
else:
out_dir = args.out_dir
mmcv.mkdir_or_exist(out_dir)
in_dir = osp.join(aug_path, 'dataset', 'cls')
mmcv.track_parallel_progress(
partial(convert_mat, in_dir=in_dir, out_dir=out_dir),
list(mmcv.scandir(in_dir, suffix='.mat')),
nproc=nproc)
full_aug_list = []
with open(osp.join(aug_path, 'dataset', 'train.txt')) as f:
full_aug_list += [line.strip() for line in f]
with open(osp.join(aug_path, 'dataset', 'val.txt')) as f:
full_aug_list += [line.strip() for line in f]
with open(
osp.join(devkit_path, 'VOC2012/ImageSets/Segmentation',
'train.txt')) as f:
ori_train_list = [line.strip() for line in f]
with open(
osp.join(devkit_path, 'VOC2012/ImageSets/Segmentation',
'val.txt')) as f:
val_list = [line.strip() for line in f]
aug_train_list = generate_aug_list(ori_train_list + full_aug_list,
val_list)
assert len(aug_train_list) == AUG_LEN, 'len(aug_train_list) != {}'.format(
AUG_LEN)
with open(
osp.join(devkit_path, 'VOC2012/ImageSets/Segmentation',
'trainaug.txt'), 'w') as f:
f.writelines(line + '\n' for line in aug_train_list)
aug_list = generate_aug_list(full_aug_list, ori_train_list + val_list)
assert len(aug_list) == AUG_LEN - len(
ori_train_list), 'len(aug_list) != {}'.format(AUG_LEN -
len(ori_train_list))
with open(
osp.join(devkit_path, 'VOC2012/ImageSets/Segmentation', 'aug.txt'),
'w') as f:
f.writelines(line + '\n' for line in aug_list)
print('Done!')
if __name__ == '__main__':
main()
| 33.483516 | 79 | 0.629143 |
ace347a5cc37245890ac9df369c5a63dcde52cc5 | 280 | py | Python | adv/julietta.py | hcc123915/dl | 7425e5271b72323dce117f6a6a85e1dde4941a16 | [
"Apache-2.0"
] | null | null | null | adv/julietta.py | hcc123915/dl | 7425e5271b72323dce117f6a6a85e1dde4941a16 | [
"Apache-2.0"
] | null | null | null | adv/julietta.py | hcc123915/dl | 7425e5271b72323dce117f6a6a85e1dde4941a16 | [
"Apache-2.0"
] | null | null | null | import adv_test
import adv
def module():
return Julietta
class Julietta(adv.Adv):
comment = 'do not use fs'
pass
if __name__ == '__main__':
conf = {}
conf['acl'] = """
`s1
`s3,seq=4
"""
adv_test.test(module(), conf, verbose=0)
| 14 | 44 | 0.546429 |
ace348497f4737ec48e42efe14a8e180afa629c8 | 1,605 | py | Python | OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/GLES2/EXT/draw_buffers_indexed.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/GLES2/EXT/draw_buffers_indexed.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/GLES2/EXT/draw_buffers_indexed.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | '''OpenGL extension EXT.draw_buffers_indexed
This module customises the behaviour of the
OpenGL.raw.GLES2.EXT.draw_buffers_indexed to provide a more
Python-friendly API
Overview (from the spec)
This extension builds upon the EXT_draw_buffers extension.
In EXT_draw_buffers (part of OpenGL ES 3.0), separate values could
be written to each color buffer, but the blend enable, blend functions,
blend equations and color write masks are global and apply to all color
outputs.
This extension provides the ability to independently
* enable or disable blending,
* set the blend equations,
* set the blend functions, and
* set the color write masks
per color output.
This extension introduces indexed versions of the enable,
blend equation, blend function, and color mask commands, as
well as associated indexed queries in order to control and
query these states independently on a per-color output basis.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/EXT/draw_buffers_indexed.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GLES2 import _types, _glgets
from OpenGL.raw.GLES2.EXT.draw_buffers_indexed import *
from OpenGL.raw.GLES2.EXT.draw_buffers_indexed import _EXTENSION_NAME
def glInitDrawBuffersIndexedEXT():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION | 37.325581 | 73 | 0.776324 |
ace3495c5fd7893575e05003f045d6b50afb1295 | 4,522 | py | Python | examples/sdf_renderer.py | ppwwyyxx/taichi | ef0c3367bb06ad78b3457b8f93b5370f14b1d9c4 | [
"MIT"
] | 1 | 2020-08-04T05:43:54.000Z | 2020-08-04T05:43:54.000Z | examples/sdf_renderer.py | ppwwyyxx/taichi | ef0c3367bb06ad78b3457b8f93b5370f14b1d9c4 | [
"MIT"
] | null | null | null | examples/sdf_renderer.py | ppwwyyxx/taichi | ef0c3367bb06ad78b3457b8f93b5370f14b1d9c4 | [
"MIT"
] | null | null | null | import taichi as ti
import time
import math
import numpy as np
ti.init(arch=ti.gpu)
res = 1280, 720
color_buffer = ti.Vector(3, dt=ti.f32, shape=res)
max_ray_depth = 6
eps = 1e-4
inf = 1e10
fov = 0.23
dist_limit = 100
camera_pos = ti.Vector([0.0, 0.32, 3.7])
light_pos = [-1.5, 0.6, 0.3]
light_normal = [1.0, 0.0, 0.0]
light_radius = 2.0
@ti.func
def intersect_light(pos, d):
light_loc = ti.Vector(light_pos)
dot = -ti.dot(d, ti.Vector(light_normal))
dist = ti.dot(d, light_loc - pos)
dist_to_light = inf
if dot > 0 and dist > 0:
D = dist / dot
dist_to_center = (light_loc - (pos + D * d)).norm_sqr()
if dist_to_center < light_radius**2:
dist_to_light = D
return dist_to_light
@ti.func
def out_dir(n):
u = ti.Vector([1.0, 0.0, 0.0])
if abs(n[1]) < 1 - eps:
u = ti.normalized(ti.cross(n, ti.Vector([0.0, 1.0, 0.0])))
v = ti.cross(n, u)
phi = 2 * math.pi * ti.random()
ay = ti.sqrt(ti.random())
ax = ti.sqrt(1 - ay**2)
return ax * (ti.cos(phi) * u + ti.sin(phi) * v) + ay * n
@ti.func
def make_nested(f):
f = f * 40
i = int(f)
if f < 0:
if i % 2 == 1:
f -= ti.floor(f)
else:
f = ti.floor(f) + 1 - f
f = (f - 0.2) / 40
return f
# https://www.iquilezles.org/www/articles/distfunctions/distfunctions.htm
@ti.func
def sdf(o):
wall = min(o[1] + 0.1, o[2] + 0.4)
sphere = (o - ti.Vector([0.0, 0.35, 0.0])).norm() - 0.36
q = (o - ti.Vector([0.8, 0.3, 0])).abs() - ti.Vector([0.3, 0.3, 0.3])
box = ti.Vector([max(0, q[0]), max(0, q[1]),
max(0, q[2])]).norm() + min(q.max(), 0)
O = o - ti.Vector([-0.8, 0.3, 0])
d = ti.Vector([ti.Vector([O[0], O[2]]).norm() - 0.3, abs(O[1]) - 0.3])
cylinder = min(d.max(), 0.0) + ti.Vector([max(0, d[0]),
max(0, d[1])]).norm()
geometry = make_nested(min(sphere, box, cylinder))
geometry = max(geometry, -(0.32 - (o[1] * 0.6 + o[2] * 0.8)))
return min(wall, geometry)
@ti.func
def ray_march(p, d):
j = 0
dist = 0.0
while j < 100 and sdf(p + dist * d) > 1e-6 and dist < inf:
dist += sdf(p + dist * d)
j += 1
return min(inf, dist)
@ti.func
def sdf_normal(p):
d = 1e-3
n = ti.Vector([0.0, 0.0, 0.0])
sdf_center = sdf(p)
for i in ti.static(range(3)):
inc = p
inc[i] += d
n[i] = (1 / d) * (sdf(inc) - sdf_center)
return ti.normalized(n)
@ti.func
def next_hit(pos, d):
closest, normal, c = inf, ti.Vector.zero(ti.f32,
3), ti.Vector.zero(ti.f32, 3)
ray_march_dist = ray_march(pos, d)
if ray_march_dist < dist_limit and ray_march_dist < closest:
closest = ray_march_dist
normal = sdf_normal(pos + d * closest)
hit_pos = pos + d * closest
t = int((hit_pos[0] + 10) * 1.1 + 0.5) % 3
c = ti.Vector(
[0.4 + 0.3 * (t == 0), 0.4 + 0.2 * (t == 1), 0.4 + 0.3 * (t == 2)])
return closest, normal, c
@ti.kernel
def render():
for u, v in color_buffer:
aspect_ratio = res[0] / res[1]
pos = camera_pos
d = ti.Vector([
(2 * fov * (u + ti.random()) / res[1] - fov * aspect_ratio - 1e-5),
2 * fov * (v + ti.random()) / res[1] - fov - 1e-5, -1.0
])
d = ti.normalized(d)
throughput = ti.Vector([1.0, 1.0, 1.0])
depth = 0
hit_light = 0.00
while depth < max_ray_depth:
closest, normal, c = next_hit(pos, d)
depth += 1
dist_to_light = intersect_light(pos, d)
if dist_to_light < closest:
hit_light = 1
depth = max_ray_depth
else:
hit_pos = pos + closest * d
if normal.norm_sqr() != 0:
d = out_dir(normal)
pos = hit_pos + 1e-4 * d
throughput *= c
else:
depth = max_ray_depth
color_buffer[u, v] += throughput * hit_light
gui = ti.GUI('SDF Path Tracer', res)
last_t = 0
for i in range(50000):
render()
interval = 10
if i % interval == 0 and i > 0:
print("{:.2f} samples/s".format(interval / (time.time() - last_t)))
last_t = time.time()
img = color_buffer.to_numpy(as_vector=True) * (1 / (i + 1))
img = img / img.mean() * 0.24
gui.set_image(np.sqrt(img))
gui.show()
| 27.573171 | 79 | 0.500221 |
ace34a4c3a4e17cd226d41fe0af3e88e3731ed17 | 2,347 | py | Python | windowsTerminal2b24.py | Base24/base24-helpers | ad8c9d5e1016774a8ca75be6c7a75f98d6426f20 | [
"MIT"
] | null | null | null | windowsTerminal2b24.py | Base24/base24-helpers | ad8c9d5e1016774a8ca75be6c7a75f98d6426f20 | [
"MIT"
] | null | null | null | windowsTerminal2b24.py | Base24/base24-helpers | ad8c9d5e1016774a8ca75be6c7a75f98d6426f20 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""Convert profiles.json to base24 scheme
"""
import os
import sys
import argparse
import yaml
import commentjson
from metprint import (
LogType,
Logger
)
import base24tools
def genBase24s(winTerm):
"""Generate the base24 json object
Args:
winTerm (str): a dictionary representing the source theme
Returns:
dict[]: base24 dicts to write to scheme file
"""
base24s = []
for scheme in winTerm:
base24 = {"author": "WinTerm2B24", "scheme": scheme["name"]}
base24lookup = {
"base00": "background",
"base01": "black", #Black
"base02": "brightBlack", #Bright black
"base05": "foreground", #Bright black
"base06": "white", #White
"base07": "brightWhite", #Bright white
"base08": "red", #Red
"base09": "yellow", #Yellow
"base0A": "brightYellow", #Bright yellow
"base0B": "green", #Green
"base0C": "cyan", #Cyan
"base0D": "blue", #Blue
"base0E": "purple", #Purple
"base12": "brightRed", #Bright red
"base13": "brightYellow", #Bright yellow
"base14": "brightGreen", #Bright green
"base15": "brightCyan", #Bright cyan
"base16": "brightBlue", #Bright blue
"base17": "brightPurple", #Bright purple
}
base24s.append(base24tools.process(base24, base24lookup, scheme, 2, 5))
return base24s
def winTerm2hex(filename):
"""Generate the shemes (without #)
Args:
filename (str): filename from args
Returns:
dict: a dictionary representing the source theme
"""
profiles = commentjson.loads(open(filename).read())
for scheme in profiles["schemes"]:
for colour in scheme:
scheme[colour] = scheme[colour].replace("#", "")
return profiles["schemes"]
def main():
''' Main entry point for cli '''
parser = argparse.ArgumentParser(
description="Convert profiles.json to base24 scheme")
parser.add_argument("file", action="store",
help="profiles.json")
args = parser.parse_args()
# Check for and report level8 errors
if not os.path.isfile(args.file):
Logger().logPrint(args.file + " is not a valid file", LogType.ERROR)
sys.exit(1)
filename = args.file
base24s = genBase24s(winTerm2hex(filename))
for base24 in base24s:
with open(base24["scheme"]+".yaml", "w") as outfile:
Logger().logPrint("writing \"" + base24["scheme"] + "\" to file", LogType.SUCCESS)
yaml.dump(base24, outfile)
if __name__ == '__main__':
main()
| 24.195876 | 85 | 0.680017 |
ace34a8460a199964c7461019cd808a0763349c3 | 708 | py | Python | solutions/832-flipping-an-image.py | lk-hang/leetcode | 4c8735463bdcb9f48666e03a39eb03ee9f625cec | [
"MIT"
] | null | null | null | solutions/832-flipping-an-image.py | lk-hang/leetcode | 4c8735463bdcb9f48666e03a39eb03ee9f625cec | [
"MIT"
] | null | null | null | solutions/832-flipping-an-image.py | lk-hang/leetcode | 4c8735463bdcb9f48666e03a39eb03ee9f625cec | [
"MIT"
] | null | null | null | """
Given a binary matrix A, we want to flip the image horizontally, then invert it, and return the resulting image.
To flip an image horizontally means that each row of the image is reversed. For example, flipping [1, 1, 0] horizontally results in [0, 1, 1].
To invert an image means that each 0 is replaced by 1, and each 1 is replaced by 0. For example, inverting [0, 1, 1] results in [1, 0, 0].
"""
from typing import List
class Solution:
def flipAndInvertImage(self, A: List[List[int]]) -> List[List[int]]:
sol = []
for row in A:
new_row = []
for col in row[::-1]:
new_row.append(1 - col)
sol.append(new_row)
return sol
| 39.333333 | 143 | 0.629944 |
ace34b531ad792ededfdb0297df1439a3497bba6 | 2,266 | py | Python | get_fb_token.py | amagrabi/preference-learning | 4a82675a5b8cf5efa0cb23dec68a70e446b11a7b | [
"Apache-2.0"
] | 1 | 2019-05-27T16:15:46.000Z | 2019-05-27T16:15:46.000Z | get_fb_token.py | amagrabi/preference-learning | 4a82675a5b8cf5efa0cb23dec68a70e446b11a7b | [
"Apache-2.0"
] | null | null | null | get_fb_token.py | amagrabi/preference-learning | 4a82675a5b8cf5efa0cb23dec68a70e446b11a7b | [
"Apache-2.0"
] | null | null | null | # Used from https://github.com/philipperemy/Deep-Learning-Tinder/blob/master/tinder_token.py
import getpass
import re
import requests
import robobrowser
MOBILE_USER_AGENT = "Tinder/7.5.3 (iPhone; iOS 10.3.2; Scale/2.00)"
FB_AUTH = "https://www.facebook.com/v2.6/dialog/oauth?redirect_uri=fb464891386855067%3A%2F%2Fauthorize%2F&display=touch&state=%7B%22challenge%22%3A%22IUUkEUqIGud332lfu%252BMJhxL4Wlc%253D%22%2C%220_auth_logger_id%22%3A%2230F06532-A1B9-4B10-BB28-B29956C71AB1%22%2C%22com.facebook.sdk_client_state%22%3Atrue%2C%223_method%22%3A%22sfvc_auth%22%7D&scope=user_birthday%2Cuser_photos%2Cuser_education_history%2Cemail%2Cuser_relationship_details%2Cuser_friends%2Cuser_work_history%2Cuser_likes&response_type=token%2Csigned_request&default_audience=friends&return_scopes=true&auth_type=rerequest&client_id=464891386855067&ret=login&sdk=ios&logger_id=30F06532-A1B9-4B10-BB28-B29956C71AB1&ext=1470840777&hash=AeZqkIcf-NEW6vBd"
def get_fb_access_token(email, password):
s = robobrowser.RoboBrowser(user_agent=MOBILE_USER_AGENT, parser="lxml")
s.open(FB_AUTH)
f = s.get_form()
f["pass"] = password
f["email"] = email
s.submit_form(f)
f = s.get_form()
try:
s.submit_form(f, submit=f.submit_fields['__CONFIRM__'])
access_token = re.search(
r"access_token=([\w\d]+)", s.response.content.decode()).groups()[0]
return access_token
except Exception as ex:
print("access token could not be retrieved. Check your username and password.")
print("Official error: %s" % ex)
return {"error": "access token could not be retrieved. Check your username and password."}
def get_fb_id(access_token):
if "error" in access_token:
return {"error": "access token could not be retrieved"}
"""Gets facebook ID from access token"""
req = requests.get(
'https://graph.facebook.com/me?access_token=' + access_token)
return req.json()["id"]
if __name__ == '__main__':
email = input('Facebook email: ')
password = getpass.getpass('Facebook password: ')
fb_access_token = get_fb_access_token(email, password)
print(f'Facebook access token: {fb_access_token}')
fb_id = get_fb_id(fb_access_token)
print(f'Facebook id: {fb_id}')
| 48.212766 | 715 | 0.737864 |
ace34c0aaa13a2f29244ef8a890493e1c3632300 | 706 | py | Python | projects/04/test.py | chapnitsky/Nand2Tetris | bef03c5e8f286d377a315c79793d4b49f86af713 | [
"MIT"
] | null | null | null | projects/04/test.py | chapnitsky/Nand2Tetris | bef03c5e8f286d377a315c79793d4b49f86af713 | [
"MIT"
] | null | null | null | projects/04/test.py | chapnitsky/Nand2Tetris | bef03c5e8f286d377a315c79793d4b49f86af713 | [
"MIT"
] | null | null | null | import os
import subprocess
TOOLS_DIR = f'../../tools'
extention = 'bat' if os.name == 'nt' else 'sh'
HWSimulator = os.path.join(os.path.abspath(TOOLS_DIR), f'HardwareSimulator.{extention}')
CPUEmulator = os.path.join(os.path.abspath(TOOLS_DIR), f'CPUEmulator.{extention}')
VMEmulator = os.path.join(os.path.abspath(TOOLS_DIR), f'VMEmulator.{extention}')
success_msg = b"End of script - Comparison ended successfully" + os.linesep.encode('ascii')
chips = ['Mult'#, 'Fill' #Fill.tst needs manual help, can you think of refactoring the test?
]
for chip in chips:
assert subprocess.check_output([CPUEmulator, f'{chip}/{chip}.tst']) == success_msg, (f'Hardware simulator failure on chip {chip}') | 44.125 | 135 | 0.72238 |
ace34ce886683246458e999290adfb92c2741155 | 89 | py | Python | tests/integration.py | Matvey-Kuk/spark-python | 69b8d8c708fd032077dcccb01a8466705b33c4a7 | [
"MIT"
] | null | null | null | tests/integration.py | Matvey-Kuk/spark-python | 69b8d8c708fd032077dcccb01a8466705b33c4a7 | [
"MIT"
] | 102 | 2017-01-30T05:50:10.000Z | 2022-03-07T18:56:23.000Z | tests/integration.py | Matvey-Kuk/cspark-python | 69b8d8c708fd032077dcccb01a8466705b33c4a7 | [
"MIT"
] | null | null | null | import os
from cspark.Updater import Updater
ACCESS_TOKEN = os.environ['ACCESS_TOKEN'] | 14.833333 | 41 | 0.797753 |
ace34dd19ec130f1f4a79a4093aa84c452565482 | 391,642 | py | Python | pysnmp-with-texts/LJ5200-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/LJ5200-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/LJ5200-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module LJ5200-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/LJ5200-MIB
# Produced by pysmi-0.3.4 at Wed May 1 14:07:54 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, SingleValueConstraint, ConstraintsIntersection, ValueRangeConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "SingleValueConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "ValueSizeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Counter32, Bits, ModuleIdentity, NotificationType, Unsigned32, TimeTicks, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, iso, MibIdentifier, Gauge32, ObjectIdentity, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "Bits", "ModuleIdentity", "NotificationType", "Unsigned32", "TimeTicks", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "iso", "MibIdentifier", "Gauge32", "ObjectIdentity", "Counter64")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
hp = MibIdentifier((1, 3, 6, 1, 4, 1, 11))
netPMLmgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2))
class DisplayString(OctetString):
pass
device = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1))
device_system = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1)).setLabel("device-system")
status_system = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2)).setLabel("status-system")
test = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 5))
processing_subsystem = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3)).setLabel("processing-subsystem")
pml = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 4))
control_panel_display = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 65)).setLabel("control-panel-display")
settings_system = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 1)).setLabel("settings-system")
interface = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4))
simm = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1))
simm1 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 1))
simm1_bank = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 1, 6)).setLabel("simm1-bank")
simm1_bank1 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 1, 6, 1)).setLabel("simm1-bank1")
simm1_bank2 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 1, 6, 2)).setLabel("simm1-bank2")
simm2 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 2))
simm2_bank = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 2, 6)).setLabel("simm2-bank")
simm2_bank1 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 2, 6, 1)).setLabel("simm2-bank1")
simm2_bank2 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 2, 6, 2)).setLabel("simm2-bank2")
simm3 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 3))
simm3_bank = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 3, 6)).setLabel("simm3-bank")
simm3_bank1 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 3, 6, 1)).setLabel("simm3-bank1")
simm3_bank2 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 3, 6, 2)).setLabel("simm3-bank2")
simm4 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 4))
simm4_bank = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 4, 6)).setLabel("simm4-bank")
simm4_bank1 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 4, 6, 1)).setLabel("simm4-bank1")
simm4_bank2 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 4, 6, 2)).setLabel("simm4-bank2")
simm5 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 5))
simm5_bank = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 5, 6)).setLabel("simm5-bank")
simm5_bank1 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 5, 6, 1)).setLabel("simm5-bank1")
simm5_bank2 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 5, 6, 2)).setLabel("simm5-bank2")
job = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6))
settings_job = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 1)).setLabel("settings-job")
operating_system = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 19)).setLabel("operating-system")
pdl = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3))
settings_pdl = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 1)).setLabel("settings-pdl")
status_pdl = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 2)).setLabel("status-pdl")
background_message = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 37)).setLabel("background-message")
background_message1 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 37, 1)).setLabel("background-message1")
background_message2 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 37, 2)).setLabel("background-message2")
destination_subsystem = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4)).setLabel("destination-subsystem")
print_engine = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1)).setLabel("print-engine")
menus = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 13))
errorlog = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11))
error1 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 1))
error2 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 2))
error3 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 3))
error4 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 4))
error5 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 5))
error6 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 6))
error7 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 7))
error8 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 8))
error9 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 9))
error10 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 10))
error11 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 11))
error12 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 12))
error13 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 13))
error14 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 14))
error15 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 15))
error16 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 16))
error17 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 17))
error18 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 18))
error19 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 19))
error20 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 20))
error21 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 21))
error22 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 22))
error23 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 23))
error24 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 24))
error25 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 25))
error26 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 26))
error27 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 27))
error28 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 28))
error29 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 29))
error30 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 30))
error31 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 31))
error32 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 32))
error33 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 33))
error34 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 34))
error35 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 35))
error36 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 36))
error37 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 37))
error38 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 38))
error39 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 39))
error40 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 40))
error41 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 41))
error42 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 42))
error43 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 43))
error44 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 44))
error45 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 45))
error46 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 46))
error47 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 47))
error48 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 48))
error49 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 49))
error50 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 50))
channel = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 6))
display = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 20))
display_status = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 20, 1)).setLabel("display-status")
id = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 3))
pdl_pcl = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 3)).setLabel("pdl-pcl")
pdl_postscript = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 4)).setLabel("pdl-postscript")
socket_ping = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 7)).setLabel("socket-ping")
active_print_jobs = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 2)).setLabel("active-print-jobs")
job_being_parsed = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 2, 1)).setLabel("job-being-parsed")
job_info = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5)).setLabel("job-info")
job_info_attribute = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 23)).setLabel("job-info-attribute")
job_info_accounting = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 28)).setLabel("job-info-accounting")
held_job = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 7)).setLabel("held-job")
held_job_info = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 7, 1)).setLabel("held-job-info")
held_job_control = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 7, 2)).setLabel("held-job-control")
source_subsystem = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 2)).setLabel("source-subsystem")
spooler = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 2, 4))
settings_spooler = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 2, 4, 1)).setLabel("settings-spooler")
pjl = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 5))
mio = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 3))
mio1 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 3, 1))
mio4 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 3, 4))
io = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 2, 1))
settings_io = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 2, 1, 1)).setLabel("settings-io")
ports = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 2, 1, 3))
port1 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 2, 1, 3, 1))
tables = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 7))
remote_procedure_call = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 13)).setLabel("remote-procedure-call")
settings_rpc = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 13, 1)).setLabel("settings-rpc")
status_rpc = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 13, 2)).setLabel("status-rpc")
file_system = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 10)).setLabel("file-system")
settings_file_system = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 10, 1)).setLabel("settings-file-system")
file_systems = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 10, 3)).setLabel("file-systems")
file_system2 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 10, 3, 2)).setLabel("file-system2")
file_system3 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 10, 3, 3)).setLabel("file-system3")
file_system4 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 10, 3, 4)).setLabel("file-system4")
resource_manager = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 12)).setLabel("resource-manager")
mass_storage_resources = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 12, 3)).setLabel("mass-storage-resources")
mass_storage_block_driver = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 15)).setLabel("mass-storage-block-driver")
settings_mass_storage_bd = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 15, 1)).setLabel("settings-mass-storage-bd")
status_mass_storage_bd = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 15, 2)).setLabel("status-mass-storage-bd")
device_configure = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 1, 32)).setLabel("device-configure")
settings_prt_eng = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 1)).setLabel("settings-prt-eng")
marking_agent_density = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 1, 9)).setLabel("marking-agent-density")
status_prt_eng = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 2)).setLabel("status-prt-eng")
intray = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3))
settings_intray = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 1)).setLabel("settings-intray")
intrays = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 3))
intray1 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 3, 1))
intray2 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 3, 2))
intray3 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 3, 3))
outbin = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 4))
settings_outbin = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 4, 1)).setLabel("settings-outbin")
outbins = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 4, 3))
outbin1 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 4, 3, 1))
print_media = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8)).setLabel("print-media")
settings_print_media = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 1)).setLabel("settings-print-media")
media_info = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3)).setLabel("media-info")
media1 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 1))
media2 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 2))
media3 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 3))
media4 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 4))
media5 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 5))
media6 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 6))
media7 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 7))
media8 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 8))
media9 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 9))
media10 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 10))
media11 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 11))
media12 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 12))
media13 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 13))
media14 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 14))
media15 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 15))
media16 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 16))
media17 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 17))
media18 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 18))
media19 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 19))
media20 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 20))
media21 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 21))
media_modes = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 4)).setLabel("media-modes")
media_types = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 8)).setLabel("media-types")
media_counts = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 7)).setLabel("media-counts")
media_size = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 5)).setLabel("media-size")
accounting = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16))
printer_accounting = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1)).setLabel("printer-accounting")
printed_media_usage = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 1)).setLabel("printed-media-usage")
printed_modes_accounting = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 4)).setLabel("printed-modes-accounting")
printed_modes_usage = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 4, 1)).setLabel("printed-modes-usage")
source_tray_accounting = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 5)).setLabel("source-tray-accounting")
source_tray_usage = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 5, 1)).setLabel("source-tray-usage")
destination_bin_accounting = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 6)).setLabel("destination-bin-accounting")
destination_bin_usage = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 6, 1)).setLabel("destination-bin-usage")
marking_agent = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 5)).setLabel("marking-agent")
settings_marking_agent = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 5, 1)).setLabel("settings-marking-agent")
consumables = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10))
consumables_1 = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1)).setLabel("consumables-1")
consumable_status = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1)).setLabel("consumable-status")
consumable_string = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 8)).setLabel("consumable-string")
consumables_status = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 5)).setLabel("consumables-status")
consumables_life = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 5, 1)).setLabel("consumables-life")
print_meter = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 11)).setLabel("print-meter")
printer_average = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 11, 1)).setLabel("printer-average")
webserver_proc_sub = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 9)).setLabel("webserver-proc-sub")
settings_webserver = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 9, 1)).setLabel("settings-webserver")
firmware_download = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 18)).setLabel("firmware-download")
upgradable_devices = MibIdentifier((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 20)).setLabel("upgradable-devices")
perm_store_init_occurred = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 10), OctetString()).setLabel("perm-store-init-occurred").setMaxAccess("readonly")
if mibBuilder.loadTexts: perm_store_init_occurred.setStatus('optional')
if mibBuilder.loadTexts: perm_store_init_occurred.setDescription('This object will set the cAllPermDevices bit when a full perm storage initialization occurs (as would be the case for a brand new system or as a result of a powerup key sequence or <<hidden>> object request). If only one device was initialized (as would be the case if a disk were added to an existing system or a formatter were swapped out), then only the appropriate collection bits will be returned. If there are no collection bits set then this indicates that no initialization took place.')
self_test = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 5, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 4))).clone(namedValues=NamedValues(("eNotInASelfTest", 1), ("eNonDestructiveSelfTest", 4)))).setLabel("self-test").setMaxAccess("readwrite")
if mibBuilder.loadTexts: self_test.setStatus('optional')
if mibBuilder.loadTexts: self_test.setDescription('Writing this object allows a device self test to be started. Reading this object provides an indication what self-test is currently executing, if any. Actual self-test operation is device specific. A self test may not be allowed at any arbitrary time. If the device supports the requested self test, but can not start the execution of the self test when requested, the device will respond with <genErr>. If a non-destructive self test is being started, the device will generate a response before the self test is completed. RECOMMENDATION: If the device is ready (i.e. the NOT-READY-PRINTER object does not contain any items, except maybe being off-line) and is idle (i.e. the NOT-IDLE object does not contain any items), this request should always succeed. This provides a mechanism for driver writers to always determine if the action will succeed or not. Additional information: The eNonDestructiveSelfTest performs limited testing on the printer and its attached paper handling devices. After the self-test is complete a configuration page is printed. The recommended way to cause a configuration page to be printed is to use the PRINT-INTERNAL-PAGE object. Setting this object to eNonDestructiveSelfTest results in a status of <noError> and a value of eNonDestructiveSelfTest being returned. If the printer is not idle (whether printing an external job or printing an internal page), this action will be delayed until the next job boundary. See the PRINT-INTERNAL-PAGE object for more details.')
print_internal_page = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 5, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 7, 8, 9, 100, 101, 253, 254, 255, 350, 450, 1400, 1401, 1402, 1403, 1404, 1405))).clone(namedValues=NamedValues(("eNotPrintingAnInternalPage", 1), ("ePrintingAnUnknownInternalPage", 2), ("eDeviceDemoPage1ConfigurationPage", 3), ("eDeviceDemoPage5ErrorLog", 7), ("eDeviceDemoPage6FileSystemDirectoryListing", 8), ("eDeviceDemoPage7MenuMap", 9), ("ePrintUsagePage", 100), ("eSuppliesPage", 101), ("eDeviceCleaningPage", 253), ("eDevicePaperPathTest", 254), ("eDevicePageRegistrationPage", 255), ("ePCLFontList1", 350), ("ePSFontList", 450), ("eDeviceShowMeHowPageClearJams", 1400), ("eDeviceShowMeHowPageLoadTrays", 1401), ("eDeviceShowMeHowPageLoadSpecialMedia", 1402), ("eDeviceShowMeHowPagePrintBothSides", 1403), ("eDeviceShowMeHowPageSupportedPaper", 1404), ("eDeviceShowMeHowPageMoreHelp", 1405)))).setLabel("print-internal-page").setMaxAccess("readwrite")
if mibBuilder.loadTexts: print_internal_page.setStatus('optional')
if mibBuilder.loadTexts: print_internal_page.setDescription("Writing this object to a value other than eNotPrintingAnInternalPage causes the device to attempt to print an internally generated page. Reading this object provides an indication what internally generated page is currently being printed, if any. The actual page is device specific. Some devices may support continuously printing an internally generated page. Setting this object to eNotPrintingAnInternalPage terminates continuously printing internally generated pages. An internally generated page may not be allowed to be printed at any arbitrary time; under what conditions it will succeed or fail is device specific. If the device supports the requested page, but can not start printing the page when requested, the device will respond with <genErr>. RECOMMENDATION: If the device is ready (i.e. the NOT-READY-PRINTER object does not contain any items, except maybe being off-line) and is idle (i.e. the NOT-IDLE object does not contain any items), this request should always succeed. This provides a mechanism for driver writers to always determine if the action will succeed or not. Additional information: Previous products used ePCLDemoPage1ConfigurationPage(300) for the Configuration or Self Test page. This product uses eDeviceDemoPage1ConfigurationPage(3). $product_str does not support continuously printing a demo page. When this object is set to a valid value, the status returned is <noError> and the value returned is ePrintingAnUnknownInternalPage. If the printer is idle, the page will be printed immediately. If the printer is currently printing another job, the internal page requested will not print until that job is finished. Setting this object to a valid value causes the desired page to be formatted and put in the printer's print queue. While the page is being formatted a get on this object will return the value ePrintingAnUnknownInternalPage. Once the page (or pages) is finished being formatted, this object returns a value of eNotPrintingAnInternalPage, even though the page may not have finished being printed. Setting this object multiple times will queue up the requests, but only a limited number will be queued. Once the queue is full, additional requests will be ignored.")
localization_languages_supported = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 52), OctetString()).setLabel("localization-languages-supported").setMaxAccess("readonly")
if mibBuilder.loadTexts: localization_languages_supported.setStatus('optional')
if mibBuilder.loadTexts: localization_languages_supported.setDescription('The list of languages supported by the device. The languages are primarily, but not limited to, two character codes from ISO 639, each separated by a comma character. Additional information: This string will always be in the Roman-8 character set. See prtLocalizationLanguage for details about each language value.')
localization_countries_supported = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 53), OctetString()).setLabel("localization-countries-supported").setMaxAccess("readonly")
if mibBuilder.loadTexts: localization_countries_supported.setStatus('optional')
if mibBuilder.loadTexts: localization_countries_supported.setDescription('The list of countries supported by the device. The countries are primarily, but not limited to, two character codes from ISO 3166, each separated by a comma character. Additional information: This string will always be in the Roman-8 character set. See prtLocalizationCountry for details about each country value.')
control_panel_button_press = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 60), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29))).clone(namedValues=NamedValues(("eMenuPlusButton", 2), ("eMenuMinusButton", 3), ("eItemPlusButton", 4), ("eItemMinusButton", 5), ("eValuePlusButton", 6), ("eValueMinusButton", 7), ("eSelectButton", 8), ("eCancelJobButton", 9), ("ePauseResumeButton", 10), ("eUpArrowButton", 11), ("eDownArrowButton", 12), ("eBackButton", 13), ("eQuestionMarkButton", 14), ("eClearButton", 15), ("eNumericButton0", 16), ("eNumericButton1", 17), ("eNumericButton2", 18), ("eNumericButton3", 19), ("eNumericButton4", 20), ("eNumericButton5", 21), ("eNumericButton6", 22), ("eNumericButton7", 23), ("eNumericButton8", 24), ("eNumericButton9", 25), ("eRotateButton", 26), ("eInfoButton", 27), ("eMenuButton", 28), ("eStopButton", 29)))).setLabel("control-panel-button-press").setMaxAccess("readwrite")
if mibBuilder.loadTexts: control_panel_button_press.setStatus('optional')
if mibBuilder.loadTexts: control_panel_button_press.setDescription('Writing this object simulates pressing a button on the control panel. Reading it will return the last key pressed either on the control panel or via PML. The device POS will specify which keys are supported. Additional information: Writing this object simulates pressing a button on the control panel. Reading it will return the last key pressed either on the control panel or via PML. The device POS will specify which keys are supported. NOTE:eGoButton(1) has been removed from the enum list. Go button is not supported as this is changed to Menu.')
control_panel_display_contents_change_counter = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 63), Integer32()).setLabel("control-panel-display-contents-change-counter").setMaxAccess("readonly")
if mibBuilder.loadTexts: control_panel_display_contents_change_counter.setStatus('optional')
if mibBuilder.loadTexts: control_panel_display_contents_change_counter.setDescription('A counter which increments whenever the contents of the front panel display changes. This object is implemented as a 32-bit signed integer which rolls over to zero when it reaches a maximum value.')
control_panel_display_contents_crc = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 64), Integer32()).setLabel("control-panel-display-contents-crc").setMaxAccess("readonly")
if mibBuilder.loadTexts: control_panel_display_contents_crc.setStatus('optional')
if mibBuilder.loadTexts: control_panel_display_contents_crc.setDescription('Reading this object returns a 32-bit Cyclical Redundancy Check (CRC) which represents the current contents of the display. Additional information: This object has been implimented as an 8-bit CRC for this product.')
control_panel_display_graphical_contents = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 65, 1), OctetString()).setLabel("control-panel-display-graphical-contents").setMaxAccess("readonly")
if mibBuilder.loadTexts: control_panel_display_graphical_contents.setStatus('optional')
if mibBuilder.loadTexts: control_panel_display_graphical_contents.setDescription('Reading this object returns a graphical file format image representing the current pixel content of the display. The device POS will specify the expected screen resolution, color depth of the display and graphics file format for a given product (eg. 160x64x1 GIF format OR 640x240x4 JPEG format). If the image is large enough that it needs to be returned in multiple objects then each array object will contain a portion of the image. The image will then need to be reconstructed by a host application. An application that needs to determine if their is an additional object to be retreived will need to perform a GETNEXT operation until there are no more objects in the sub-tree. Additional information: This object returns a GIF image that represents the current contents of the 160x64x1 control panel display. Most display images require more than one instance of this object in order to retreive the complete GIF image.')
energy_star = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 1, 1), Integer32()).setLabel("energy-star").setMaxAccess("readwrite")
if mibBuilder.loadTexts: energy_star.setStatus('optional')
if mibBuilder.loadTexts: energy_star.setDescription('Returns or changes the Energy Star sleep value. If the value is greater than zero, then the device will go into energy saving sleep mode after the print engine has been idle for the number of seconds specified by this object. A value of zero means Energy Star is disabled and the device will not go to sleep based on print engine idle time. The value must be non-negative. Additional information: Returns or changes the Energy Star sleep value. The device will go into energy saving sleep mode after the print engine has been idle for the number of seconds specified by this object. LaserJet 5500 supports values of 0, 60, 900, 1800, 3600, 5400, 7200, 14400 seconds. A value of 0 means never enter sleep mode based on the print engine idle time. Setting to an unsupported value causes the printer to substitute a value (listed below) and to return <noError> status. Setting this value when the printer is in sleep mode will not cause it to wakeup unless it is set to 0. The values are as follow: <=0 snap to 0 >=1 and <= 479 snap to 60 (1 minute) >=480 and <= 1349 snap to 900 (15 minutes) >=1350 and <= 2249 snap to 1800 (30 minutes) >=2250 and <= 3149 snap to 2700 (45 minutes) >=3150 and <= 4499 snap to 3600 (1 hour) >=4500 and <= 6299 snap to 5400 (90 minutes) >=6300 and <= 10799 snap to 7200 (2 hours) >=10800 snap to 14400 (4 hours).')
sleep_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("eFalse", 1), ("eTrue", 2)))).setLabel("sleep-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: sleep_mode.setStatus('optional')
if mibBuilder.loadTexts: sleep_mode.setDescription("Returns eTrue if the device is in energy saving sleep mode, otherwise returns eFalse. Setting SLEEP-MODE to eFalse causes the device to wake up, if it is in sleep mode. Setting SLEEP-MODE to eTrue causes the device to go into sleep mode. Additional information: This object returns eTrue if the device is in energy saving sleep mode, otherwise it returns eFalse. Setting this object to eTrue while the printer is awake will not change the printer's current state and will return <genErr> status. NOTE: This object should behave this way when the printer does not have an instant-on fuser. Setting this object to eTrue while printer is already in Sleep Mode will not change the printer's current state and will return <noError> status. Setting this object to eFalse while printer is already awake will not change the printer's current state and will return <noError> status. Setting this object to eFalse while the printer is asleep causes the device to wake up. ")
on_off_line = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("eOnline", 1), ("eOffline", 2), ("eOfflineAtEndOfJob", 3)))).setLabel("on-off-line").setMaxAccess("readwrite")
if mibBuilder.loadTexts: on_off_line.setStatus('optional')
if mibBuilder.loadTexts: on_off_line.setDescription("To bring the PDL processing sub-system on or off line. If the device is a printer, then the printer does not process print job data when the printer is off line. PML communication persists. Additional information: When the printer is in the Offline state, printing will stop as soon as possible (no more sheets of media are pulled from input trays). The I/O is taken offline also. Setting to eOnline has the following affect: Printer will immediately go to or remain in the Online state; <noError> status is returned. If the printer's current state is Offline with a pending error condition that prevents the printer from going to the Online state, the printer will remain in the Offline state; <genErr> status is returned (see the Control Panel ERS for a list of error conditions). Setting to eOffline has the following affect: Printer will immediately go to or remain in the Offline state; <noError> status is returned. If pages are being printed, those pages will complete with the printer in the Offline state. Setting to eOfflineAtEndOfJob has the following affect: If not in a job or already in the Offline state, the printer will immediately go to or remain in the Offline state; <noError> status is returned. If in a job and the current state is Online, the printer will remain in the Online state, with the value of this object as eOfflineAtEndOfJob, until the end of the job; <noError> status is returned. At the end of the job, the printer goes to the Offline state and the value of this object becomes eOffline. Setting this object to eOffline or eOnline before the end of the job causes the action for that value to be taken immediately.")
pysmi_continue = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("eInitiateAction", 1)))).setLabel("continue").setMaxAccess("writeonly")
if mibBuilder.loadTexts: pysmi_continue.setStatus('optional')
if mibBuilder.loadTexts: pysmi_continue.setDescription("A device can support a class of errors called continuable errors. When a continuable error is encountered, the device requires a continue event to occur before the device will continue operation. One continue event is setting the CONTINUE object to eInitiateAction. Devices can support other continue events, like auto-continue. A continue event causes the continuable error to be acknowledged, thus allowing the device to continue. Each device needs to list the continuable errors. If the device doesn't currently have an unacknowledged continuable error, the response will contain <genErr>. Additional information: See the CLEARABLE-WARNING and AUTO-CONTINUE objects for the errors that this object will clear.")
auto_continue = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("eOff", 1), ("eOn", 2)))).setLabel("auto-continue").setMaxAccess("readwrite")
if mibBuilder.loadTexts: auto_continue.setStatus('optional')
if mibBuilder.loadTexts: auto_continue.setDescription('Indicates if the device will automatically continue after encountering a continuable error. If AUTO-CONTINUE is set to eOn, the device will automatically generate continue event to acknowledge continuable errors. If AUTO-CONTINUE is set to eOff, then some other continue event will have to acknowledge the continuable error. Additional information: If this is set to eOn the device displays an error message and goes offline for ten seconds. After ten seconds the printer automatically returns to the online state. If this is set to eOff then the device displays an error message and goes offline. It remains offline until the operator presses the GO key or until the CONTINUE object is set. If the printer is not idle, the new value may not take effect until a job boundary is reached. If a get is done on this object before the job boundary is reached, the value last set will be returned.')
simm1_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 7, 9, 10))).clone(namedValues=NamedValues(("eEmpty", 1), ("eUnknown", 2), ("eUnSupported", 3), ("eReadOnlyMemory", 4), ("eVolatileRandomAccessMemory", 5), ("eFlashMemory", 7), ("eRamRom", 9), ("eVolatileRAMOnBoardMemory", 10)))).setLabel("simm1-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm1_type.setStatus('optional')
if mibBuilder.loadTexts: simm1_type.setDescription("Returns an indication of the type of option installed in SIMM slot 1. eEmpty means the device did not detect any option installed in the interface slot. eUnknown means the device doesn't recognize the installed option. eUnSupported means the device recognizes the installed option, but does not support the option. eReadOnlyMemory means the installed option contains ROM Ics. eVolatileRandomAccessMemory means the installed option contains RAM ICs that loose data when the power is turned off. eNonVolatileRandomAccessMemory means that the installed option contains RAM ICs that do not loose data when the power is turned off. eFlashMemory means that the installed option contains a type of non-volatile RAM that needs to be erased before it can be written. eDiskDrive means the installed option contains a disk drive. eRamRom means the installed option contains both volatile random access memory and read only memory. eInputPHD means the installed option is an input paper handling device. eOutputPHD means the installed option is an output paper handling device. eIOCard means the installed option is an I/O card. Additional information: This object is used for describing DIMMs instead of SIMMs on $product_str. eRamRom is used to denote $product_str's combo simm.")
simm1_capacity = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 1, 5), Integer32()).setLabel("simm1-capacity").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm1_capacity.setStatus('optional')
if mibBuilder.loadTexts: simm1_capacity.setDescription('Returns an indication of the capacity of the SIMM installed in SIMM slot 1. The capacity is dependent on the type of option, as indicated by the SIMM1-TYPE object. This object has no meaning if the installed option type is eUnknown or eReadOnlyMemory. This object contains the size, in bytes, if the installed option type is eVolatileRandomAccessMemory, eNonVolatileRandomAccessMemory, eFlashMemory, or eDiskDrive. If the type of the installed option is eRamRom, this object contains the size, in bytes, of the random access memory. If the type of the installed option is eInputPHD, the capacity indicates the number of input trays supported by the installed option. If the type of the installed option is eOutputPHD, the capacity indicates the number of output bins supported by the installed option. If the type of the installed option is eIOCard, the capacity indicates the number of logical I/O ports supported by the I/O card. Additional information: Returns an indication of the capacity of the installed option in bytes. This object is not supported unless the SIMM1-TYPE type is eVolatileRandomAccessMemory, eRamRom, or eFlashMemory. For eRamRom only the size of the Ram portion of the SIMM is returned.')
simm1_bank1_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 1, 6, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 7, 9, 10))).clone(namedValues=NamedValues(("eEmpty", 1), ("eUnknown", 2), ("eUnSupported", 3), ("eReadOnlyMemory", 4), ("eVolatileRandomAccessMemory", 5), ("eFlashMemory", 7), ("eRamRom", 9), ("eVolatileRAMOnBoardMemory", 10)))).setLabel("simm1-bank1-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm1_bank1_type.setStatus('optional')
if mibBuilder.loadTexts: simm1_bank1_type.setDescription('Returns an indication of the type of option installed in Bank 1 of SIMM slot 1. See SIMM1-TYPE for a description. Additional information: This object is used for describing the type of DIMM banks. Each physical DIMM slot has up to 2 banks.')
simm1_bank1_capacity = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 1, 6, 1, 2), Integer32()).setLabel("simm1-bank1-capacity").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm1_bank1_capacity.setStatus('optional')
if mibBuilder.loadTexts: simm1_bank1_capacity.setDescription('Returns an indication of the capacity of Bank 1 of the SIMM installed in SIMM slot 1. See SIMM1-CAPACITY for a description. Additional information: Returns an indication of the capacity of the installed bank option in bytes. This object is not supported unless the SIMM1-BANK1-TYPE type is eReadOnlyMemory, eFlashMemory, eEDORandomAccessMemory, eSDRandomAccessMemory, eSRandomAccessMemory, or eFPMRandomAccessMemory.')
simm1_bank2_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 1, 6, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 7, 9, 10))).clone(namedValues=NamedValues(("eEmpty", 1), ("eUnknown", 2), ("eUnSupported", 3), ("eReadOnlyMemory", 4), ("eVolatileRandomAccessMemory", 5), ("eFlashMemory", 7), ("eRamRom", 9), ("eVolatileRAMOnBoardMemory", 10)))).setLabel("simm1-bank2-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm1_bank2_type.setStatus('optional')
if mibBuilder.loadTexts: simm1_bank2_type.setDescription('Returns an indication of the type of option installed in Bank 2 of SIMM slot 1. See SIMM1-TYPE for a description. Additional information: This object is used for describing the type of DIMM banks. Each physical DIMM slot has up to 2 banks.')
simm1_bank2_capacity = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 1, 6, 2, 2), Integer32()).setLabel("simm1-bank2-capacity").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm1_bank2_capacity.setStatus('optional')
if mibBuilder.loadTexts: simm1_bank2_capacity.setDescription('Returns an indication of the capacity of Bank 2 of the SIMM installed in SIMM slot 1. See SIMM1-CAPACITY for a description. Additional information: Returns an indication of the capacity of the installed bank option in bytes. This object is not supported unless the SIMM1-BANK2-TYPE type is eReadOnlyMemory, eFlashMemory, eEDORandomAccessMemory, eSDRandomAccessMemory, eSRandomAccessMemory, or eFPMRandomAccessMemory.')
simm2_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 2, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 7, 9, 10))).clone(namedValues=NamedValues(("eEmpty", 1), ("eUnknown", 2), ("eUnSupported", 3), ("eReadOnlyMemory", 4), ("eVolatileRandomAccessMemory", 5), ("eFlashMemory", 7), ("eRamRom", 9), ("eVolatileRAMOnBoardMemory", 10)))).setLabel("simm2-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm2_type.setStatus('optional')
if mibBuilder.loadTexts: simm2_type.setDescription("Returns an indication of the type of option installed in SIMM slot 2. See SIMM1-TYPE for a description. Additional information: This object is used for describing DIMMs instead of SIMMs on $product_str. eRamRom is used to denote $product_str's combo simm.")
simm2_capacity = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 2, 5), Integer32()).setLabel("simm2-capacity").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm2_capacity.setStatus('optional')
if mibBuilder.loadTexts: simm2_capacity.setDescription('Returns an indication of the capacity of the SIMM installed in SIMM slot 2. See SIMM1-CAPACITY for a description. Additional information: This object is used for describing DIMMs instead of SIMMs on $product_str. Returns an indication of the capacity of the installed option in bytes. This object is not supported unless the SIMM2-TYPE type is eVolatileRandomAccessMemory, eRamRom, or eFlashMemory. For eRamRom only the size of the Ram portion of the SIMM is returned.')
simm2_bank1_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 2, 6, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 7, 9, 10))).clone(namedValues=NamedValues(("eEmpty", 1), ("eUnknown", 2), ("eUnSupported", 3), ("eReadOnlyMemory", 4), ("eVolatileRandomAccessMemory", 5), ("eFlashMemory", 7), ("eRamRom", 9), ("eVolatileRAMOnBoardMemory", 10)))).setLabel("simm2-bank1-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm2_bank1_type.setStatus('optional')
if mibBuilder.loadTexts: simm2_bank1_type.setDescription('Returns an indication of the type of option installed in Bank 1 of SIMM slot 2. See SIMM1-TYPE for a description. Additional information: This object is used for describing the type of DIMM banks. Each physical DIMM slot has up to 2 banks.')
simm2_bank1_capacity = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 2, 6, 1, 2), Integer32()).setLabel("simm2-bank1-capacity").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm2_bank1_capacity.setStatus('optional')
if mibBuilder.loadTexts: simm2_bank1_capacity.setDescription('Returns an indication of the capacity of Bank 1 of the SIMM installed in SIMM slot 2. See SIMM1-CAPACITY for a description. Additional information: Returns an indication of the capacity of the installed bank option in bytes. This object is not supported unless the SIMM2-BANK1-TYPE type is eReadOnlyMemory, eFlashMemory, eEDORandomAccessMemory, eSDRandomAccessMemory, eSRandomAccessMemory, or eFPMRandomAccessMemory.')
simm2_bank2_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 2, 6, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 7, 9, 10))).clone(namedValues=NamedValues(("eEmpty", 1), ("eUnknown", 2), ("eUnSupported", 3), ("eReadOnlyMemory", 4), ("eVolatileRandomAccessMemory", 5), ("eFlashMemory", 7), ("eRamRom", 9), ("eVolatileRAMOnBoardMemory", 10)))).setLabel("simm2-bank2-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm2_bank2_type.setStatus('optional')
if mibBuilder.loadTexts: simm2_bank2_type.setDescription('Returns an indication of the type of option installed in Bank 2 of SIMM slot 2. See SIMM1-TYPE for a description. Additional information: This object is used for describing the type of DIMM banks. Each physical DIMM slot has up to 2 banks.')
simm2_bank2_capacity = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 2, 6, 2, 2), Integer32()).setLabel("simm2-bank2-capacity").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm2_bank2_capacity.setStatus('optional')
if mibBuilder.loadTexts: simm2_bank2_capacity.setDescription('Returns an indication of the capacity of Bank 2 of the SIMM installed in SIMM slot 2. See SIMM1-CAPACITY for a description. Additional information: Returns an indication of the capacity of the installed bank option in bytes. This object is not supported unless the SIMM2-BANK2-TYPE type is eReadOnlyMemory, eFlashMemory, eEDORandomAccessMemory, eSDRandomAccessMemory, eSRandomAccessMemory, or eFPMRandomAccessMemory.')
simm3_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 3, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 7, 9, 10))).clone(namedValues=NamedValues(("eEmpty", 1), ("eUnknown", 2), ("eUnSupported", 3), ("eReadOnlyMemory", 4), ("eVolatileRandomAccessMemory", 5), ("eFlashMemory", 7), ("eRamRom", 9), ("eVolatileRAMOnBoardMemory", 10)))).setLabel("simm3-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm3_type.setStatus('optional')
if mibBuilder.loadTexts: simm3_type.setDescription('Returns an indication of the type of option installed in SIMM slot 3. See SIMM1-TYPE for a description.')
simm3_capacity = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 3, 5), Integer32()).setLabel("simm3-capacity").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm3_capacity.setStatus('optional')
if mibBuilder.loadTexts: simm3_capacity.setDescription('Returns an indication of the capacity of the SIMM installed in SIMM slot 3. See SIMM1-CAPACITY for a description. Additional information: This object is used for describing DIMMs instead of SIMMs on $product_str. Returns an indication of the capacity of the installed option in bytes. This object is not supported unless the SIMM3-TYPE type is eVolatileRandomAccessMemory, eRamRom, or eFlashMemory. For eRamRom only the size of the Ram portion of the simm is returned.')
simm3_bank1_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 3, 6, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 7, 9, 10))).clone(namedValues=NamedValues(("eEmpty", 1), ("eUnknown", 2), ("eUnSupported", 3), ("eReadOnlyMemory", 4), ("eVolatileRandomAccessMemory", 5), ("eFlashMemory", 7), ("eRamRom", 9), ("eVolatileRAMOnBoardMemory", 10)))).setLabel("simm3-bank1-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm3_bank1_type.setStatus('optional')
if mibBuilder.loadTexts: simm3_bank1_type.setDescription('Returns an indication of the type of option installed in Bank 1 of SIMM slot 3. See SIMM1-TYPE for a description. Additional information: This object is used for describing the type of DIMM banks. Each physical DIMM slot has up to 2 banks.')
simm3_bank1_capacity = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 3, 6, 1, 2), Integer32()).setLabel("simm3-bank1-capacity").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm3_bank1_capacity.setStatus('optional')
if mibBuilder.loadTexts: simm3_bank1_capacity.setDescription('Returns an indication of the capacity of Bank 1 of the SIMM installed in SIMM slot 3. See SIMM1-CAPACITY for a description. Additional information: Returns an indication of the capacity of the installed bank option in bytes. This object is not supported unless the SIMM3-BANK1-TYPE type is eReadOnlyMemory, eFlashMemory, eEDORandomAccessMemory, eSDRandomAccessMemory, eSRandomAccessMemory, or eFPMRandomAccessMemory.')
simm3_bank2_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 3, 6, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 7, 9, 10))).clone(namedValues=NamedValues(("eEmpty", 1), ("eUnknown", 2), ("eUnSupported", 3), ("eReadOnlyMemory", 4), ("eVolatileRandomAccessMemory", 5), ("eFlashMemory", 7), ("eRamRom", 9), ("eVolatileRAMOnBoardMemory", 10)))).setLabel("simm3-bank2-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm3_bank2_type.setStatus('optional')
if mibBuilder.loadTexts: simm3_bank2_type.setDescription('Returns an indication of the type of option installed in Bank 2 of SIMM slot 3. See SIMM1-TYPE for a description. Additional information: This object is used for describing the type of DIMM banks. Each physical DIMM slot has up to 2 banks.')
simm3_bank2_capacity = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 3, 6, 2, 2), Integer32()).setLabel("simm3-bank2-capacity").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm3_bank2_capacity.setStatus('optional')
if mibBuilder.loadTexts: simm3_bank2_capacity.setDescription('Returns an indication of the capacity of Bank 2 of the SIMM installed in SIMM slot 3. See SIMM1-CAPACITY for a description. Additional information: Returns an indication of the capacity of the installed bank option in bytes. This object is not supported unless the SIMM3-BANK2-TYPE type is eReadOnlyMemory, eFlashMemory, eEDORandomAccessMemory, eSDRandomAccessMemory, eSRandomAccessMemory, or eFPMRandomAccessMemory.')
simm4_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 4, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 7, 9, 10))).clone(namedValues=NamedValues(("eEmpty", 1), ("eUnknown", 2), ("eUnSupported", 3), ("eReadOnlyMemory", 4), ("eVolatileRandomAccessMemory", 5), ("eFlashMemory", 7), ("eRamRom", 9), ("eVolatileRAMOnBoardMemory", 10)))).setLabel("simm4-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm4_type.setStatus('optional')
if mibBuilder.loadTexts: simm4_type.setDescription('Returns an indication of the type of option installed in SIMM slot 4. See SIMM1-TYPE for a description.')
simm4_capacity = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 4, 5), Integer32()).setLabel("simm4-capacity").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm4_capacity.setStatus('optional')
if mibBuilder.loadTexts: simm4_capacity.setDescription('Returns an indication of the capacity of the SIMM installed in SIMM slot 4. See SIMM1-CAPACITY for a description. Additional information: This object is used for describing DIMMs instead of SIMMs on $product_str. Returns an indication of the capacity of the installed option in bytes. This object is not supported unless the SIMM4-TYPE type is eVolatileRandomAccessMemory, eRamRom, or eFlashMemory. For eRamRom only the size of the Ram portion of the simm is returned.')
simm4_bank1_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 4, 6, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 7, 9, 10))).clone(namedValues=NamedValues(("eEmpty", 1), ("eUnknown", 2), ("eUnSupported", 3), ("eReadOnlyMemory", 4), ("eVolatileRandomAccessMemory", 5), ("eFlashMemory", 7), ("eRamRom", 9), ("eVolatileRAMOnBoardMemory", 10)))).setLabel("simm4-bank1-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm4_bank1_type.setStatus('optional')
if mibBuilder.loadTexts: simm4_bank1_type.setDescription('Returns an indication of the type of option installed in Bank 1 of SIMM slot 4. See SIMM1-TYPE for a description. Additional information: This object is used for describing the type of DIMM banks. Each physical DIMM slot has up to 2 banks.')
simm4_bank1_capacity = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 4, 6, 1, 2), Integer32()).setLabel("simm4-bank1-capacity").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm4_bank1_capacity.setStatus('optional')
if mibBuilder.loadTexts: simm4_bank1_capacity.setDescription('Returns an indication of the capacity of Bank 1 of the SIMM installed in SIMM slot 4. See SIMM1-CAPACITY for a description. Additional information: Returns an indication of the capacity of the installed bank option in bytes. This object is not supported unless the SIMM4-BANK1-TYPE type is eReadOnlyMemory, eFlashMemory, eEDORandomAccessMemory, eSDRandomAccessMemory, eSRandomAccessMemory, or eFPMRandomAccessMemory.')
simm4_bank2_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 4, 6, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 7, 9, 10))).clone(namedValues=NamedValues(("eEmpty", 1), ("eUnknown", 2), ("eUnSupported", 3), ("eReadOnlyMemory", 4), ("eVolatileRandomAccessMemory", 5), ("eFlashMemory", 7), ("eRamRom", 9), ("eVolatileRAMOnBoardMemory", 10)))).setLabel("simm4-bank2-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm4_bank2_type.setStatus('optional')
if mibBuilder.loadTexts: simm4_bank2_type.setDescription('Returns an indication of the type of option installed in Bank 2 of SIMM slot 4. See SIMM1-TYPE for a description. Additional information: This object is used for describing the type of DIMM banks. Each physical DIMM slot has up to 2 banks.')
simm4_bank2_capacity = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 4, 6, 2, 2), Integer32()).setLabel("simm4-bank2-capacity").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm4_bank2_capacity.setStatus('optional')
if mibBuilder.loadTexts: simm4_bank2_capacity.setDescription('Returns an indication of the capacity of Bank 2 of the SIMM installed in SIMM slot 4. See SIMM1-CAPACITY for a description. Additional information: Returns an indication of the capacity of the installed bank option in bytes. This object is not supported unless the SIMM4-BANK2-TYPE type is eReadOnlyMemory, eFlashMemory, eEDORandomAccessMemory, eSDRandomAccessMemory, eSRandomAccessMemory, or eFPMRandomAccessMemory.')
simm5_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 5, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 7, 9, 10))).clone(namedValues=NamedValues(("eEmpty", 1), ("eUnknown", 2), ("eUnSupported", 3), ("eReadOnlyMemory", 4), ("eVolatileRandomAccessMemory", 5), ("eFlashMemory", 7), ("eRamRom", 9), ("eVolatileRAMOnBoardMemory", 10)))).setLabel("simm5-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm5_type.setStatus('optional')
if mibBuilder.loadTexts: simm5_type.setDescription('Returns an indication of the type of option installed in SIMM slot 5. See SIMM1-TYPE for a description.')
simm5_capacity = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 5, 5), Integer32()).setLabel("simm5-capacity").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm5_capacity.setStatus('optional')
if mibBuilder.loadTexts: simm5_capacity.setDescription('Returns an indication of the capacity of the SIMM installed in SIMM slot 5. See SIMM1-CAPACITY for a description. Additional information: This object is used for describing DIMMs instead of SIMMs on $product_str. Returns an indication of the capacity of the installed option in bytes. This object is not supported unless the SIMM5-TYPE type is eVolatileRandomAccessMemory, eRamRom, or eFlashMemory. For eRamRom only the size of the Ram portion of the simm is returned.')
simm5_bank1_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 5, 6, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 7, 9, 10))).clone(namedValues=NamedValues(("eEmpty", 1), ("eUnknown", 2), ("eUnSupported", 3), ("eReadOnlyMemory", 4), ("eVolatileRandomAccessMemory", 5), ("eFlashMemory", 7), ("eRamRom", 9), ("eVolatileRAMOnBoardMemory", 10)))).setLabel("simm5-bank1-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm5_bank1_type.setStatus('optional')
if mibBuilder.loadTexts: simm5_bank1_type.setDescription('Returns an indication of the type of option installed in Bank 1 of SIMM slot 5. See SIMM1-TYPE for a description. Additional information: This object is used for describing the type of DIMM banks. Each physical DIMM slot has up to 2 banks.')
simm5_bank1_capacity = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 5, 6, 1, 2), Integer32()).setLabel("simm5-bank1-capacity").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm5_bank1_capacity.setStatus('optional')
if mibBuilder.loadTexts: simm5_bank1_capacity.setDescription('Returns an indication of the capacity of Bank 1 of the SIMM installed in SIMM slot 5. See SIMM1-CAPACITY for a description. Additional information: Returns an indication of the capacity of the installed bank option in bytes. This object is not supported unless the SIMM5-BANK1-TYPE type is eReadOnlyMemory, eFlashMemory, eEDORandomAccessMemory, eSDRandomAccessMemory, eSRandomAccessMemory, or eFPMRandomAccessMemory.')
simm5_bank2_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 5, 6, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 7, 9, 10))).clone(namedValues=NamedValues(("eEmpty", 1), ("eUnknown", 2), ("eUnSupported", 3), ("eReadOnlyMemory", 4), ("eVolatileRandomAccessMemory", 5), ("eFlashMemory", 7), ("eRamRom", 9), ("eVolatileRAMOnBoardMemory", 10)))).setLabel("simm5-bank2-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm5_bank2_type.setStatus('optional')
if mibBuilder.loadTexts: simm5_bank2_type.setDescription('Returns an indication of the type of option installed in Bank 2 of SIMM slot 5. See SIMM1-TYPE for a description. Additional information: This object is used for describing the type of DIMM banks. Each physical DIMM slot has up to 2 banks.')
simm5_bank2_capacity = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 1, 5, 6, 2, 2), Integer32()).setLabel("simm5-bank2-capacity").setMaxAccess("readonly")
if mibBuilder.loadTexts: simm5_bank2_capacity.setStatus('optional')
if mibBuilder.loadTexts: simm5_bank2_capacity.setDescription('Returns an indication of the capacity of Bank 2 of the SIMM installed in SIMM slot 5. See SIMM1-CAPACITY for a description. Additional information: Returns an indication of the capacity of the installed bank option in bytes. This object is not supported unless the SIMM5-BANK2-TYPE type is eReadOnlyMemory, eFlashMemory, eEDORandomAccessMemory, eSDRandomAccessMemory, eSRandomAccessMemory, or eFPMRandomAccessMemory.')
cancel_job = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 32767))).setLabel("cancel-job").setMaxAccess("writeonly")
if mibBuilder.loadTexts: cancel_job.setStatus('optional')
if mibBuilder.loadTexts: cancel_job.setDescription("Cancels the print job whose ID matches the value written to the CANCEL-JOB object. The host first learns the job ID using the CURRENT-JOB-PARSING-ID command. If the printer has completely processed the job, the printer responds with <badValue>. If the value of the CURRENT-JOB-PARSING-ID is smaller than the value written to the CANCEL-JOB object, then the printer responds with <badValue>. When read, returns the value of the last job ID what was canceled, or -1 to indicate no job has been canceled. Additional information: If the value written matches the ID of a job that is currently being canceled (for any reason), the printer responds with <noError>. $product_str uses job ID's in the range of 0..32767. Because the ID number can wrap to zero, CURRENT-JOB-PARSING-ID may be smaller than the value written to this object; when this occurs, no error will result as long as the ID is for a currently processing job. This object is write only, so the comment in the general description stating the job ID or -1 will be returned on a read does not apply.")
os_execute_file = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 19, 1), OctetString()).setLabel("os-execute-file").setMaxAccess("writeonly")
if mibBuilder.loadTexts: os_execute_file.setStatus('optional')
if mibBuilder.loadTexts: os_execute_file.setDescription("This object's input is a null-terminated string representing a fully-qualified path name for an executable file. This object causes the file to be executed by the OS. Additional information: This object's input is a null-terminated string of two or more whitespace-separated tokens. The first token is a path to a directory to make the current working directory. The second token is a path to an executable file to be executed. Any remaining whitespace-separated tokens are optional and will be passed as parameters to the executable. The paths to the directory and executable can be either PJL style (e.g., 1:\\app\\example) or UNIX-style (e.g., /hpmnt/dsk_ide1a/app/example). The executable is run in a separate process.")
form_feed = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("eInitiateAction", 1)))).setLabel("form-feed").setMaxAccess("writeonly")
if mibBuilder.loadTexts: form_feed.setStatus('optional')
if mibBuilder.loadTexts: form_feed.setDescription('Instructs the PDL processing sub-system to finishing processing the current page of the current job. Form feed is also known as close page or eject page. If the PDL processing sub-system is in a FORM-FEED-NEEDED state, this causes the device to flush or finish processing the current page of the current job. If the device is not in the FORM-FEED-NEEDED state, an <genErr> will occur.')
form_feed_needed = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 2, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("eFalse", 1), ("eTrue", 2)))).setLabel("form-feed-needed").setMaxAccess("readonly")
if mibBuilder.loadTexts: form_feed_needed.setStatus('optional')
if mibBuilder.loadTexts: form_feed_needed.setDescription("Indicates if the PDL processing sub-system has made marks on the current page and the source subsystem has been idle for a device specific amount of time. Additional information: $product_str will set this object to eTrue when it has made marks on the current page, the IO-TIMEOUT has expired while PCL was running in `backward-compatibility mode' (which is caused by jobs consisting purely of PCL data with no prepended PJL commands), and no data is pending on another I/O. Once it is set to eTrue, more data on the same I/O will cause this object to be set to eFalse, until the above conditions are met again.")
background_status_msg_line1_part1 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 37, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setLabel("background-status-msg-line1-part1").setMaxAccess("readwrite")
if mibBuilder.loadTexts: background_status_msg_line1_part1.setStatus('optional')
if mibBuilder.loadTexts: background_status_msg_line1_part1.setDescription("The string displayed on the device's front panel in place of the printer's built-in background status string. An example built-in background status string is '00 READY'. Additional information: The display size for the $product_str printers is 2 X 16. The value of this object and the current value of BACKGROUND-STATUS-MSG-LINE2-PART1 are displayed together on the 2-line display, but they must be set independently. If line 2 has been set, and the next message to be displayed only requires line 1, BACKGROUND-STATUS-MSG-LINE2-PART1 must be set to the null string to clear it. This object allows a message to be displayed when it is the highest priority message. Setting this object does not guarantee the message will be displayed; and reading it returns the value last written, not the currently displayed message (use prtConsoleDisplayBufferText to read the display). The priority assigned for displaying this message is one lower than the READY message. In other words, the only message that can be replaced by these objects is the READY message. To clear the message, write a null string to both this object and BACKGROUND-STATUS-MSG-LINE2-PART1.")
background_status_msg_line2_part1 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 37, 2, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setLabel("background-status-msg-line2-part1").setMaxAccess("readwrite")
if mibBuilder.loadTexts: background_status_msg_line2_part1.setStatus('optional')
if mibBuilder.loadTexts: background_status_msg_line2_part1.setDescription("The string displayed on the device's front panel in place of the printer's built-in background status string. An example built-in background status string is '00 READY'. Additional information: See BACKGROUND-STATUS-MSG-LINE1-PART1")
error_log_clear = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 38), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("eClearErrorLog", 1)))).setLabel("error-log-clear").setMaxAccess("writeonly")
if mibBuilder.loadTexts: error_log_clear.setStatus('optional')
if mibBuilder.loadTexts: error_log_clear.setDescription("Setting this object clears all the entries in the error log sub-tree. Additional information: Setting this object removes all errors that have been stored in the printer's non-volatile memory.")
clearable_warning = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3))).clone(namedValues=NamedValues(("eOn", 2), ("eJob", 3)))).setLabel("clearable-warning").setMaxAccess("readwrite")
if mibBuilder.loadTexts: clearable_warning.setStatus('optional')
if mibBuilder.loadTexts: clearable_warning.setDescription('Returns or controls how the device will track clearable warnings. A clearable warning indicates a transient event in the device. The device will continue after the transient event occurs. If CLEARABLE-WARNING is eOff, the device does not track clearable warnings. If CLEARABLE-WARNING is eOn, all clearable warnings will be tracked until cleared (acknowledged). If CLEARABLE-WARNING is eJob, a clearable warning generated due to an event that occurs because of the print job being processed will be automatically cleared when the device has finished processing the job. Example clearable warning events include the device altering resolution or page protection due to memory constraints. The POS will document what transient events are treated as clearable warnings. Additional information: If set to eOn, the warning is displayed until the GO key is pressed or the CONTINUE object is set. If set to eJob, the warning is displayed until the end of the job in which it was generated.')
error1_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 1, 1), Integer32()).setLabel("error1-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error1_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error1_time_stamp.setDescription("Contains some sort of time stamp indicating when error 1 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: This item contains the engine page count when the error occurred. If there is currently no error entry for this object, a '0' will be returned. Note that '0' may also be returned when there is a valid error, but a current page count was unavailable. If ERROR1-CODE object also returns '0', then an error has not yet been logged for this object. See ERROR1-CODE for an explanation of the order used for storing errors.")
error1_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 1, 2), Integer32()).setLabel("error1-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error1_code.setStatus('optional')
if mibBuilder.loadTexts: error1_code.setDescription("Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: Returns a device specific error code. If the error code returned is '0', then the printer has not yet logged an error for this object. When the maximum number of errors is reached (30 on LaserJet printers), and a new error occurs, the error in ERROR1-CODE will be replaced by the one in ERROR2-CODE, and so on until the last error object will be given the value of the new error. The error number is returned in the upper 16 bits. If it is 68, 69, 79, or 80, then the sub code is returned in the lower 16 bits (eg. 68 001C, where the subcode is already a hexadecimal value). If the error number is any other number, then the 1st sub code (XX) will be in bits 15-8 and the 2nd sub code (YY) will be in bits 7-0 (eg. 55.04.02) where XX=04 and YY=02). See the Control Panel ERS for specific information about the meaning of each code. Example: If the error is 68 001A, then the value returned will be 4456474. To break it down: 4456474 = 0x0044001A The upper 16 bits: 0x0044 = 68 The lower 16 bits: 0x001A = 001A Which is the error: 68 001A Example: If the error is 55.04.241, then the error code will be 3605745. To break it down: 3605745 = 0x003704F1 The upper 16 bits: 0x0037 = 55 The upper byte of the lower 16 bits: 0x04 = 04 The lower byte of the lower 16 bits: 0xF1 = 241 Which is the error: 55.04.241")
error2_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 2, 1), Integer32()).setLabel("error2-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error2_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error2_time_stamp.setDescription('Contains some sort of time stamp indicating when error 2 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error2_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 2, 2), Integer32()).setLabel("error2-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error2_code.setStatus('optional')
if mibBuilder.loadTexts: error2_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error3_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 3, 1), Integer32()).setLabel("error3-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error3_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error3_time_stamp.setDescription('Contains some sort of time stamp indicating when error 3 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error3_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 3, 2), Integer32()).setLabel("error3-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error3_code.setStatus('optional')
if mibBuilder.loadTexts: error3_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error4_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 4, 1), Integer32()).setLabel("error4-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error4_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error4_time_stamp.setDescription('Contains some sort of time stamp indicating when error 4 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error4_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 4, 2), Integer32()).setLabel("error4-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error4_code.setStatus('optional')
if mibBuilder.loadTexts: error4_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error5_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 5, 1), Integer32()).setLabel("error5-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error5_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error5_time_stamp.setDescription('Contains some sort of time stamp indicating when error 5 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error5_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 5, 2), Integer32()).setLabel("error5-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error5_code.setStatus('optional')
if mibBuilder.loadTexts: error5_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error6_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 6, 1), Integer32()).setLabel("error6-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error6_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error6_time_stamp.setDescription('Contains some sort of time stamp indicating when error 6 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error6_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 6, 2), Integer32()).setLabel("error6-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error6_code.setStatus('optional')
if mibBuilder.loadTexts: error6_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error7_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 7, 1), Integer32()).setLabel("error7-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error7_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error7_time_stamp.setDescription('Contains some sort of time stamp indicating when error 7 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error7_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 7, 2), Integer32()).setLabel("error7-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error7_code.setStatus('optional')
if mibBuilder.loadTexts: error7_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error8_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 8, 1), Integer32()).setLabel("error8-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error8_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error8_time_stamp.setDescription('Contains some sort of time stamp indicating when error 8 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error8_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 8, 2), Integer32()).setLabel("error8-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error8_code.setStatus('optional')
if mibBuilder.loadTexts: error8_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error9_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 9, 1), Integer32()).setLabel("error9-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error9_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error9_time_stamp.setDescription('Contains some sort of time stamp indicating when error 9 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error9_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 9, 2), Integer32()).setLabel("error9-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error9_code.setStatus('optional')
if mibBuilder.loadTexts: error9_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error10_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 10, 1), Integer32()).setLabel("error10-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error10_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error10_time_stamp.setDescription('Contains some sort of time stamp indicating when error 10 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error10_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 10, 2), Integer32()).setLabel("error10-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error10_code.setStatus('optional')
if mibBuilder.loadTexts: error10_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error11_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 11, 1), Integer32()).setLabel("error11-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error11_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error11_time_stamp.setDescription('Contains some sort of time stamp indicating when error 11 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error11_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 11, 2), Integer32()).setLabel("error11-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error11_code.setStatus('optional')
if mibBuilder.loadTexts: error11_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error12_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 12, 1), Integer32()).setLabel("error12-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error12_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error12_time_stamp.setDescription('Contains some sort of time stamp indicating when error 12 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error12_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 12, 2), Integer32()).setLabel("error12-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error12_code.setStatus('optional')
if mibBuilder.loadTexts: error12_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error13_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 13, 1), Integer32()).setLabel("error13-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error13_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error13_time_stamp.setDescription('Contains some sort of time stamp indicating when error 13 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error13_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 13, 2), Integer32()).setLabel("error13-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error13_code.setStatus('optional')
if mibBuilder.loadTexts: error13_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error14_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 14, 1), Integer32()).setLabel("error14-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error14_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error14_time_stamp.setDescription('Contains some sort of time stamp indicating when error 14 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error14_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 14, 2), Integer32()).setLabel("error14-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error14_code.setStatus('optional')
if mibBuilder.loadTexts: error14_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error15_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 15, 1), Integer32()).setLabel("error15-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error15_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error15_time_stamp.setDescription('Contains some sort of time stamp indicating when error 15 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error15_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 15, 2), Integer32()).setLabel("error15-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error15_code.setStatus('optional')
if mibBuilder.loadTexts: error15_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error16_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 16, 1), Integer32()).setLabel("error16-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error16_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error16_time_stamp.setDescription('Contains some sort of time stamp indicating when error 16 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error16_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 16, 2), Integer32()).setLabel("error16-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error16_code.setStatus('optional')
if mibBuilder.loadTexts: error16_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error17_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 17, 1), Integer32()).setLabel("error17-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error17_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error17_time_stamp.setDescription('Contains some sort of time stamp indicating when error 17 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error17_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 17, 2), Integer32()).setLabel("error17-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error17_code.setStatus('optional')
if mibBuilder.loadTexts: error17_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error18_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 18, 1), Integer32()).setLabel("error18-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error18_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error18_time_stamp.setDescription('Contains some sort of time stamp indicating when error 18 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error18_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 18, 2), Integer32()).setLabel("error18-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error18_code.setStatus('optional')
if mibBuilder.loadTexts: error18_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error19_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 19, 1), Integer32()).setLabel("error19-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error19_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error19_time_stamp.setDescription('Contains some sort of time stamp indicating when error 19 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error19_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 19, 2), Integer32()).setLabel("error19-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error19_code.setStatus('optional')
if mibBuilder.loadTexts: error19_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error20_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 20, 1), Integer32()).setLabel("error20-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error20_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error20_time_stamp.setDescription('Contains some sort of time stamp indicating when error 20 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error20_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 20, 2), Integer32()).setLabel("error20-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error20_code.setStatus('optional')
if mibBuilder.loadTexts: error20_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error21_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 21, 1), Integer32()).setLabel("error21-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error21_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error21_time_stamp.setDescription('Contains some sort of time stamp indicating when error 21 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error21_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 21, 2), Integer32()).setLabel("error21-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error21_code.setStatus('optional')
if mibBuilder.loadTexts: error21_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error22_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 22, 1), Integer32()).setLabel("error22-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error22_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error22_time_stamp.setDescription('Contains some sort of time stamp indicating when error 22 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error22_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 22, 2), Integer32()).setLabel("error22-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error22_code.setStatus('optional')
if mibBuilder.loadTexts: error22_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error23_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 23, 1), Integer32()).setLabel("error23-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error23_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error23_time_stamp.setDescription('Contains some sort of time stamp indicating when error 23 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error23_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 23, 2), Integer32()).setLabel("error23-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error23_code.setStatus('optional')
if mibBuilder.loadTexts: error23_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error24_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 24, 1), Integer32()).setLabel("error24-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error24_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error24_time_stamp.setDescription('Contains some sort of time stamp indicating when error 24 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error24_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 24, 2), Integer32()).setLabel("error24-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error24_code.setStatus('optional')
if mibBuilder.loadTexts: error24_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error25_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 25, 1), Integer32()).setLabel("error25-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error25_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error25_time_stamp.setDescription('Contains some sort of time stamp indicating when error 25 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error25_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 25, 2), Integer32()).setLabel("error25-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error25_code.setStatus('optional')
if mibBuilder.loadTexts: error25_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error26_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 26, 1), Integer32()).setLabel("error26-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error26_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error26_time_stamp.setDescription('Contains some sort of time stamp indicating when error 26 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error26_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 26, 2), Integer32()).setLabel("error26-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error26_code.setStatus('optional')
if mibBuilder.loadTexts: error26_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error27_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 27, 1), Integer32()).setLabel("error27-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error27_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error27_time_stamp.setDescription('Contains some sort of time stamp indicating when error 27 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error27_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 27, 2), Integer32()).setLabel("error27-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error27_code.setStatus('optional')
if mibBuilder.loadTexts: error27_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error28_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 28, 1), Integer32()).setLabel("error28-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error28_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error28_time_stamp.setDescription('Contains some sort of time stamp indicating when error 28 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error28_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 28, 2), Integer32()).setLabel("error28-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error28_code.setStatus('optional')
if mibBuilder.loadTexts: error28_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error29_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 29, 1), Integer32()).setLabel("error29-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error29_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error29_time_stamp.setDescription('Contains some sort of time stamp indicating when error 29 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error29_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 29, 2), Integer32()).setLabel("error29-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error29_code.setStatus('optional')
if mibBuilder.loadTexts: error29_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error30_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 30, 1), Integer32()).setLabel("error30-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error30_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error30_time_stamp.setDescription('Contains some sort of time stamp indicating when error 30 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error30_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 30, 2), Integer32()).setLabel("error30-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error30_code.setStatus('optional')
if mibBuilder.loadTexts: error30_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error31_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 31, 1), Integer32()).setLabel("error31-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error31_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error31_time_stamp.setDescription('Contains some sort of time stamp indicating when error 31 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error31_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 31, 2), Integer32()).setLabel("error31-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error31_code.setStatus('optional')
if mibBuilder.loadTexts: error31_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error32_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 32, 1), Integer32()).setLabel("error32-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error32_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error32_time_stamp.setDescription('Contains some sort of time stamp indicating when error 32 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error32_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 32, 2), Integer32()).setLabel("error32-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error32_code.setStatus('optional')
if mibBuilder.loadTexts: error32_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error33_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 33, 1), Integer32()).setLabel("error33-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error33_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error33_time_stamp.setDescription('Contains some sort of time stamp indicating when error 33 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error33_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 33, 2), Integer32()).setLabel("error33-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error33_code.setStatus('optional')
if mibBuilder.loadTexts: error33_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error34_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 34, 1), Integer32()).setLabel("error34-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error34_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error34_time_stamp.setDescription('Contains some sort of time stamp indicating when error 34 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error34_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 34, 2), Integer32()).setLabel("error34-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error34_code.setStatus('optional')
if mibBuilder.loadTexts: error34_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error35_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 35, 1), Integer32()).setLabel("error35-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error35_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error35_time_stamp.setDescription('Contains some sort of time stamp indicating when error 35 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error35_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 35, 2), Integer32()).setLabel("error35-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error35_code.setStatus('optional')
if mibBuilder.loadTexts: error35_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error36_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 36, 1), Integer32()).setLabel("error36-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error36_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error36_time_stamp.setDescription('Contains some sort of time stamp indicating when error 36 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error36_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 36, 2), Integer32()).setLabel("error36-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error36_code.setStatus('optional')
if mibBuilder.loadTexts: error36_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error37_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 37, 1), Integer32()).setLabel("error37-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error37_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error37_time_stamp.setDescription('Contains some sort of time stamp indicating when error 37 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error37_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 37, 2), Integer32()).setLabel("error37-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error37_code.setStatus('optional')
if mibBuilder.loadTexts: error37_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error38_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 38, 1), Integer32()).setLabel("error38-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error38_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error38_time_stamp.setDescription('Contains some sort of time stamp indicating when error 38 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error38_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 38, 2), Integer32()).setLabel("error38-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error38_code.setStatus('optional')
if mibBuilder.loadTexts: error38_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error39_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 39, 1), Integer32()).setLabel("error39-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error39_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error39_time_stamp.setDescription('Contains some sort of time stamp indicating when error 39 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error39_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 39, 2), Integer32()).setLabel("error39-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error39_code.setStatus('optional')
if mibBuilder.loadTexts: error39_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error40_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 40, 1), Integer32()).setLabel("error40-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error40_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error40_time_stamp.setDescription('Contains some sort of time stamp indicating when error 40 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error40_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 40, 2), Integer32()).setLabel("error40-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error40_code.setStatus('optional')
if mibBuilder.loadTexts: error40_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error41_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 41, 1), Integer32()).setLabel("error41-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error41_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error41_time_stamp.setDescription('Contains some sort of time stamp indicating when error 41 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error41_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 41, 2), Integer32()).setLabel("error41-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error41_code.setStatus('optional')
if mibBuilder.loadTexts: error41_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error42_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 42, 1), Integer32()).setLabel("error42-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error42_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error42_time_stamp.setDescription('Contains some sort of time stamp indicating when error 42 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error42_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 42, 2), Integer32()).setLabel("error42-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error42_code.setStatus('optional')
if mibBuilder.loadTexts: error42_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error43_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 43, 1), Integer32()).setLabel("error43-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error43_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error43_time_stamp.setDescription('Contains some sort of time stamp indicating when error 43 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error43_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 43, 2), Integer32()).setLabel("error43-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error43_code.setStatus('optional')
if mibBuilder.loadTexts: error43_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error44_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 44, 1), Integer32()).setLabel("error44-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error44_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error44_time_stamp.setDescription('Contains some sort of time stamp indicating when error 44 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error44_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 44, 2), Integer32()).setLabel("error44-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error44_code.setStatus('optional')
if mibBuilder.loadTexts: error44_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error45_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 45, 1), Integer32()).setLabel("error45-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error45_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error45_time_stamp.setDescription('Contains some sort of time stamp indicating when error 45 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error45_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 45, 2), Integer32()).setLabel("error45-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error45_code.setStatus('optional')
if mibBuilder.loadTexts: error45_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error46_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 46, 1), Integer32()).setLabel("error46-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error46_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error46_time_stamp.setDescription('Contains some sort of time stamp indicating when error 46 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error46_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 46, 2), Integer32()).setLabel("error46-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error46_code.setStatus('optional')
if mibBuilder.loadTexts: error46_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error47_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 47, 1), Integer32()).setLabel("error47-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error47_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error47_time_stamp.setDescription('Contains some sort of time stamp indicating when error 47 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error47_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 47, 2), Integer32()).setLabel("error47-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error47_code.setStatus('optional')
if mibBuilder.loadTexts: error47_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error48_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 48, 1), Integer32()).setLabel("error48-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error48_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error48_time_stamp.setDescription('Contains some sort of time stamp indicating when error 48 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error48_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 48, 2), Integer32()).setLabel("error48-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error48_code.setStatus('optional')
if mibBuilder.loadTexts: error48_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error49_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 49, 1), Integer32()).setLabel("error49-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error49_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error49_time_stamp.setDescription('Contains some sort of time stamp indicating when error 49 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error49_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 49, 2), Integer32()).setLabel("error49-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error49_code.setStatus('optional')
if mibBuilder.loadTexts: error49_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
error50_time_stamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 50, 1), Integer32()).setLabel("error50-time-stamp").setMaxAccess("readonly")
if mibBuilder.loadTexts: error50_time_stamp.setStatus('optional')
if mibBuilder.loadTexts: error50_time_stamp.setDescription('Contains some sort of time stamp indicating when error 50 occurred. Example time stamps include the actual time the error occurred (in seconds since Jan. 1, 1970), and the total engine page count. The device POS documents the meaning of the time stamp. Additional information: See ERROR1-TIME-STAMP.')
error50_code = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 11, 50, 2), Integer32()).setLabel("error50-code").setMaxAccess("readonly")
if mibBuilder.loadTexts: error50_code.setStatus('optional')
if mibBuilder.loadTexts: error50_code.setDescription('Contains a device specific error code. Each device POS should list what errors are logged to the error log and the meaning of each supported error code value. Additional information: See ERROR1-CODE.')
channelprinteralert = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 6, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: channelprinteralert.setStatus('optional')
if mibBuilder.loadTexts: channelprinteralert.setDescription("Identifies the values of prtAlertIndex, prtAlertSeverityLevel, prtAlertGroup, prtAlertGroupIndex, prtAlertLocation, and prtAlertCode for the latest critical evnet in the prtAlertTable. The binary string is defined as following: <table> ----------------------------------------------------- Field | Filed | Description Offset | Length | ----------------------------------------------------- 0 | 4 | the value of prtAlertIndex 4 | 4 | the value of prtAlertSeverityLevel 8 | 4 | the value of prtAlertGroup 12 | 4 | the value of prtAlertGroupIndex 16 | 4 | the value of prtAlertLocation 20 | 4 | the value of prtAlertCode ------------------------------------------------------ </table> Each field is in Big Endian style. Additional information: This object is used to pass alert information from the peripheral to the IIO card. The standard printer MIB contains the following description: printerAlert TRAP-TYPE ENTERPRISE printerV1Alert VARIABLES { prtAlertIndex, prtAlertSeverityLevel, prtAlertGroup, prtAlertGroupIndex, prtAlertLocation, prtAlertCode } DESCRIPTION 'This trap is sent whenever a critical event is added to the prtAlertTable.' In order to provide this information in the trap packet, the IIO card enables traps on channelPrinterAlert. When a critical alert is generated the peripheral fills the appropriate value into this object and sends it to the card. This object is a structure which contains 24 bytes of data. The structure is: struct structAlertInfo { sint32 prtAlertIndex; sint32 prtAlertSeverityLevel; sint32 prtAlertGroup; sint32 prtAlertGroupIndex; sint32 prtAlertLocation; sint32 prtAlertCode; } thisAlertData;")
install_date = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 8), OctetString().subtype(subtypeSpec=ValueSizeConstraint(13, 13)).setFixedLength(13)).setLabel("install-date").setMaxAccess("readwrite")
if mibBuilder.loadTexts: install_date.setStatus('optional')
if mibBuilder.loadTexts: install_date.setDescription("Identifies the date that the device was installed. The format of the string is 'YYYYMMDDHHmmZ'. Where: YYYY is the year. MM is the month (1-12). DD is the day (1-31). HH is the hour of the day (0-23). mm are the minutes (0-59). 'Z' designates Greenwich Mean Time; if 'Z' not specified, value is local time. Device POS must specify the conditions for setting this object. Additional information: Setting the <<hidden>> object will enable setting this object.")
timestamp = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 13), OctetString().subtype(subtypeSpec=ValueSizeConstraint(15, 15)).setFixedLength(15)).setMaxAccess("writeonly")
if mibBuilder.loadTexts: timestamp.setStatus('optional')
if mibBuilder.loadTexts: timestamp.setDescription("Sets the printer's current time in (UTC). The format of the string is 'YYYYMMDDHHMMSS'. Where YYYY is the year. MM is the month (1-12) DD is the day of the month (1-31) HH is the hour of the day (0-23) MM is the minutes (0-59) SS is the seconds (0-59) Device POS must specify the conditions for setting this object.")
service_id = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 19), OctetString().subtype(subtypeSpec=ValueSizeConstraint(5, 5)).setFixedLength(5)).setLabel("service-id").setMaxAccess("readwrite")
if mibBuilder.loadTexts: service_id.setStatus('optional')
if mibBuilder.loadTexts: service_id.setDescription("A read of this object will return the current SERVICE ID value in the printer. The format is 'YYDDD' where: YY = calendar year - 1990 DDD = (calendar month - 1) * 30 + (calendar day of the month or 30, if > 30) A write of this object will only succeed if the MANUFACTURING-CONTROL PML object has been set with the correct <<hidden>>. If the write operation is not allowed, this object will return an <genErr> status. Additional information: Setting the <<hidden>> object will enable setting this object.")
show_address = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 20, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 3))).clone(namedValues=NamedValues(("eOff", 1), ("eAuto", 3)))).setLabel("show-address").setMaxAccess("readwrite")
if mibBuilder.loadTexts: show_address.setStatus('optional')
if mibBuilder.loadTexts: show_address.setDescription('If this object is set to eAuto, and the device has an IP address, the IP address of the device will be shown with the READY message. If this object is set to eOff, the IP address will not be shown.')
serial_number = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 3, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 10))).setLabel("serial-number").setMaxAccess("readonly")
if mibBuilder.loadTexts: serial_number.setStatus('optional')
if mibBuilder.loadTexts: serial_number.setDescription('Identifies the serial number for the device. If the SERIAL-NUMBER object is set by the user, then setting the object does not need to be protected. If the SERIAL-NUMBER object is set at the factory, then the <<hidden>> object must be set correctly before the SERIAL-NUMBER object is writable. If this is a writable object, the POS should indicate the maximum supported string length. If possible, encode the serial number in a symbol set (like Roman-8) that matches the ASCII character set and limit the characters used to ASCII characters. Additional information: This value IS AFFECTED BY NVRAM resets, it is set to the default value of XXXXXXXXXX, when a NVRAM init is done.')
fw_rom_datecode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 3, 5), OctetString()).setLabel("fw-rom-datecode").setMaxAccess("readonly")
if mibBuilder.loadTexts: fw_rom_datecode.setStatus('optional')
if mibBuilder.loadTexts: fw_rom_datecode.setDescription('Identifies the base system firmware date code. The date code will be encoded in the yyyymmdd format. There may be several versions of the base system firmware. The date code associated with the version of the base system firmware that is being used is reported. There may be other date code objects for other specific modules such as fonts, localization modules, etc.; these other datecode objects are device specific.')
fw_rom_revision = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 3, 6), OctetString()).setLabel("fw-rom-revision").setMaxAccess("readonly")
if mibBuilder.loadTexts: fw_rom_revision.setStatus('optional')
if mibBuilder.loadTexts: fw_rom_revision.setDescription('This identifies the system code firmware ROM revision code. The format for a revision is major_revision.minor_revision. There may be other ROM revision code objects for other specific ROMs such as font ROMs, localization ROMs, etc; these other ROM revision code objects are device specific.')
device_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 3, 10), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setLabel("device-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: device_name.setStatus('optional')
if mibBuilder.loadTexts: device_name.setDescription('User defined device name. The POS should indicate the maximum supported string length. If the user entered string is too long, the device will store as much as possible and will return the <OKNearestLegal ValueSubstituted>.Additional information: The maximum supported string length is 32 characters. If the user entered string is too long, the device will store the first 32 characters and will return the <noError> status.')
device_location = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 3, 11), OctetString()).setLabel("device-location").setMaxAccess("readwrite")
if mibBuilder.loadTexts: device_location.setStatus('optional')
if mibBuilder.loadTexts: device_location.setDescription('User defined device location. The POS should indicate the maximum supported string length. If the user entered string is too long, the device will store as much as possible and will return the <OKNearestLegal ValueSubstituted>.Additional information: The maximum supported string length is 16 characters. If the user entered string is too long, the device will store the first 16 characters and will return the <noError> status.')
asset_number = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 3, 12), OctetString()).setLabel("asset-number").setMaxAccess("readwrite")
if mibBuilder.loadTexts: asset_number.setStatus('optional')
if mibBuilder.loadTexts: asset_number.setDescription('User defined asset number. The POS should indicate the maximum supported string length. If the user entered string is too long, the device will store as much as possible and will return the <OKNearestLegal ValueSubstituted>.Additional information: The maximum supported string length is 8 characters. If the user entered string is too long, the device will store the first 8 characters and will return the <noError> status.')
default_copies = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 1, 4), Integer32()).setLabel("default-copies").setMaxAccess("readwrite")
if mibBuilder.loadTexts: default_copies.setStatus('optional')
if mibBuilder.loadTexts: default_copies.setDescription('Returns or changes default copies. Default copies is the default values used by the PDL to control the number of copies of each page in the print job that are printed. The print job can override this value. The list of supported values should be documented in the device POS. Additional information: The supported values are 1 through 32000. Setting to an unsupported value causes the printer to substitute in a snap value and to return <noError> status. The snaps are as follows: <1 snaps to 1 >999 snaps to 32000')
default_lines_per_page = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 1, 11), Integer32()).setLabel("default-lines-per-page").setMaxAccess("readwrite")
if mibBuilder.loadTexts: default_lines_per_page.setStatus('optional')
if mibBuilder.loadTexts: default_lines_per_page.setDescription('Returns or changes the default number of lines per page. The POS indicates the supported values. An unsupported value causes the printer to use the closest supported value, causing the printer to return the <noError>. Additional information: The supported values in $product_str are 5 to 128. Setting to an unsupported value causes the the printer to substitute in a snap value and to return <noError> status. The snap values are as follow: <5 snaps to 5 >128 snaps to 128')
default_vmi = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 1, 12), Integer32()).setLabel("default-vmi").setMaxAccess("readwrite")
if mibBuilder.loadTexts: default_vmi.setStatus('optional')
if mibBuilder.loadTexts: default_vmi.setDescription('Returns or changes the default vertical motion index. The unit of measure for VMI is centipoints per line. The POS indicates the supported values. An unsupported value causes the printer to use the closest supported value, causing the printer to return <noError>.')
default_media_size = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 10, 11, 15, 17, 18, 19, 24, 25, 26, 27, 33, 34, 35, 39, 44, 45, 46, 71, 72, 80, 81, 89, 90, 91, 100, 101, 120, 258, 282, 32767))).clone(namedValues=NamedValues(("eUSExecutive", 1), ("eUSLetter", 2), ("eUSLegal", 3), ("eFoolscap", 10), ("eLedger", 11), ("eStatement", 15), ("eROC16K", 17), ("eJISExecutive", 18), ("eROC8K", 19), ("eISOandJISA6", 24), ("eISOandJISA5", 25), ("eISOandJISA4", 26), ("eISOandJISA3", 27), ("ePRC8K270X390", 33), ("ePRC16K195X270", 34), ("ePRC8K260X368", 35), ("eISORA3", 39), ("eJISB6", 44), ("eJISB5", 45), ("eJISB4", 46), ("eJapanesePostcardSingle", 71), ("eJapanesePostcardDouble", 72), ("eMonarch", 80), ("eCommercial10", 81), ("ePRC16K184X260", 89), ("eInternationalDL", 90), ("eInternationalC5", 91), ("eInternationalB5", 100), ("eCustom", 101), ("eTabloidExtra", 120), ("eUSLetterR", 258), ("eISOandJISA4R", 282), ("eUnknownMediaSize", 32767)))).setLabel("default-media-size").setMaxAccess("readwrite")
if mibBuilder.loadTexts: default_media_size.setStatus('optional')
if mibBuilder.loadTexts: default_media_size.setDescription("This indicates the default media size. A write of an unsupported value causes an <ErrorInvalidOrUnsupported Value>. Complete list of supported media sizes along with their dimensions are listed in the ''Media Size Table'' near the end of this document. (for a full list of media size enums see the end of this file) ")
cold_reset_media_size = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 26))).clone(namedValues=NamedValues(("eUSLetter", 2), ("eISOandJISA4", 26)))).setLabel("cold-reset-media-size").setMaxAccess("readwrite")
if mibBuilder.loadTexts: cold_reset_media_size.setStatus('optional')
if mibBuilder.loadTexts: cold_reset_media_size.setDescription('Returns or sets the media size that is used as the DEFAULT-MEDIA-SIZE when a cold reset occurs.')
reprint = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 1, 36), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("eOff", 1), ("eOn", 2), ("eAuto", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: reprint.setStatus('optional')
if mibBuilder.loadTexts: reprint.setDescription('Returns or changes the reprint (jam recovery) setting. If eOn, then the device will reprint pages jammed pages. If eOff, the device will not attempt to reprint jammed pages. If eAuto, a device dependent algorithm (which should be documented in the POS) will be used to determine if the page gets reprinted. This object controls all PDLs, unless a specific PDL supports its own reprint control mechanism. To date, only PostScript has a PDL reprint control mechanism.')
pcl_total_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 3, 5), Integer32()).setLabel("pcl-total-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: pcl_total_page_count.setStatus('optional')
if mibBuilder.loadTexts: pcl_total_page_count.setDescription('Total number of PCL pages printed by the device. Additional information: In $product_str the PCL page count is kept in NVRAM, and the NVRAM value is updated at least every 10 pages. NOTE: The value returned by this object will be incremented every page but if power is lost between NVRAM updates, up to 9 pages of the page count may be lost. The page count counter will be reset to zero after 16,777,215 (2^24-1) pages. The page count is incremented when a sheet of media is pulled from an input tray. A duplex printed sheet will cause this counter to be incremented by two.')
pcl_default_font_height = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 3, 13), Integer32()).setLabel("pcl-default-font-height").setMaxAccess("readwrite")
if mibBuilder.loadTexts: pcl_default_font_height.setStatus('optional')
if mibBuilder.loadTexts: pcl_default_font_height.setDescription("Returns or changes the default PCL height. Height is an approximate measure of the body of the type in centipoints. A centipoint is 1/7200 inch. Height applies only to proportional fonts. Point size, in points, can be converted to font height, in centipoints, by multiplying the point size by 100. The POS indicates the supported values. An unsupported value causes the printer to use the closest supported value, causing the printer to return <noError>. ''Closest'' means the smallest absolute difference. Additional information: Supported values range from 400 to 99975, in increments of 25 units. Setting to an unsupported value causes the printer to substitute in a snap value and to return <noError> status. The snap values are as follow: <=400 snaps to 400 >=99975 snaps to 99975 Unsupported values in the range 400 to 99975 snap DOWN to the previous supported value (i.e 25293 snaps to 25275 or 75038 snaps to 75025).")
pcl_default_font_source = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 3, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 11, 12, 13, 14))).clone(namedValues=NamedValues(("eInternal", 1), ("ePermanentSoft", 2), ("eRomSimm2", 11), ("eRomSimm3", 12), ("eRomSimm4", 13), ("eRomSimm5", 14)))).setLabel("pcl-default-font-source").setMaxAccess("readwrite")
if mibBuilder.loadTexts: pcl_default_font_source.setStatus('optional')
if mibBuilder.loadTexts: pcl_default_font_source.setDescription('Returns or changes the value of the default font source variable in NVRAM.')
pcl_default_font_number = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 3, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setLabel("pcl-default-font-number").setMaxAccess("readwrite")
if mibBuilder.loadTexts: pcl_default_font_number.setStatus('optional')
if mibBuilder.loadTexts: pcl_default_font_number.setDescription('Returns or changes the value of the default font number variable in NVRAM. Writing an unsupported value causes the printer to generate an <badValue>. Additional information: Valid numbers or 0 - 255, but only font numbers that are included in the PCL FONT LIST are selectable.')
pcl_default_font_width = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 3, 16), Integer32()).setLabel("pcl-default-font-width").setMaxAccess("readwrite")
if mibBuilder.loadTexts: pcl_default_font_width.setStatus('optional')
if mibBuilder.loadTexts: pcl_default_font_width.setDescription("Returns or changes the default PCL font width. Width is expressed as the width of a character in centipoints. A centipoint is 1/7200 inch. Width applies only to fixed space fonts. Pitch, in character per inch, can be converted to font width, in centipoints, by dividing 7200 by the pitch. The POS indicates the supported values. An unsupported value causes the printer to use the closest supported value, causing the printer to return <noError>. ''Closest'' means the smallest absolute difference. Additional information: Setting to an unsupported value causes the printer to substitue in a snap value (listed below) and return <noError> status. The snap values for pitch are as follow: <=44 snaps to 44 >=9999 snaps to 9999")
postscript_total_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 4, 5), Integer32()).setLabel("postscript-total-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: postscript_total_page_count.setStatus('optional')
if mibBuilder.loadTexts: postscript_total_page_count.setDescription('Total number of PostScript pages printed by the device. Additional information: This object is only supported if the PostScript option is installed. In $product_str the count is kept in NVRAM, and the NVRAM value is updated at least every 10 sheets. NOTE: The value returned by this object will be incremented every sheet but if power is lost between NVRAM updates up to 9 sheets of the count may be lost. The counter will be reset to zero after 16,777,215 (2^24-1) pages. ')
postscript_print_errors = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 4, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("eOff", 1), ("eOn", 2)))).setLabel("postscript-print-errors").setMaxAccess("readwrite")
if mibBuilder.loadTexts: postscript_print_errors.setStatus('optional')
if mibBuilder.loadTexts: postscript_print_errors.setDescription('Returns or changes the value of the print PostScript errors setting. If eOn, PostScript prints an error page showing the error encountered and the stack at the time of the error. Additional information: This object is only supported if the PostScript option is installed.')
collated_originals_support = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 42), OctetString()).setLabel("collated-originals-support").setMaxAccess("readonly")
if mibBuilder.loadTexts: collated_originals_support.setStatus('optional')
if mibBuilder.loadTexts: collated_originals_support.setDescription("Indicates that the printer can create multiple, collated 'originals' of a job and shows the level of support for this capability. cCollatedOriginals - The printer can create multiple collated originals, or 'mopies'. This is the base functionality required by the other collection items. cProofAndHold - The printer saves a copy of the job while printing the first mopy. Later, this held job can be released and the rest of the mopies will print. cSecurityPrint - The printer spools the job and holds the job until the user releases the job by entering a <<hidden>> at the printer's control panel. cAutoHighlight - The printer prints the job a number of times with a mail distribution list prepended to each job. A different mailing address is highlighted on each mopy. cCollatedAtSpeed - The printer can create multiple collated copies where all copies after the original are printed at engine speed. Additional information: Indicates that the printer can create multiple, collated originals of a job and shows the level of support for this capability. cCollatedOriginals - The printer can create multiple collated originals, or mopies. This is the base functionality required by the other collection items. Without disk, this object will not exist. This will be set at boot-up initialization. If the disk fails, in any way, then the disk error functionality will handle the situation. A color printer (LaserJet 4550) that only has a RAMDISK installed is treated like there is no disk and therefore this object will not exist. The value of this object will be: !cCollatedOriginals - Only FLASH installed cCollatedOriginals - IDE is installed combined with/without FLASH cCollatedOriginals - RAMDISK is On combined with/without FLASH")
host_application_available_memory = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 59), Integer32()).setLabel("host-application-available-memory").setMaxAccess("readonly")
if mibBuilder.loadTexts: host_application_available_memory.setStatus('optional')
if mibBuilder.loadTexts: host_application_available_memory.setDescription('Returns the amount of memory, in bytes, that the device has available for job-related processing or resources. Also known as driver work space (DWS) memory.')
socket_ping_job_events_version = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 7, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 10))).setLabel("socket-ping-job-events-version").setMaxAccess("readonly")
if mibBuilder.loadTexts: socket_ping_job_events_version.setStatus('optional')
if mibBuilder.loadTexts: socket_ping_job_events_version.setDescription("This object reports the current version of the Socket Ping Job Events capability. The version number is returned as a string in the format MAJOR.MINOR.SUB version. (eg. 1.0.1) If socket ping is not supported by this printer then the object is either not implemented or it returns a '0'.")
job_info_change_id = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(16, 16)).setFixedLength(16)).setLabel("job-info-change-id").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_change_id.setStatus('optional')
if mibBuilder.loadTexts: job_info_change_id.setDescription('Returns the current value of an object in the job-info sub-tree whose value has changed. Define the object identifier for the object whose value has changed as job-info.required-field.optional-field.job-id. job-info represents the OID prefix of all objects in the job-info sub-tree. Required-field represents the OID field value that follows the job-info prefix. Since some objects in the job-info sub-tree have two OID fields between the job-info prefix and the job-id, the optional-field represents the OID field between the required-field and the job-id field, if present. Using this definition, the format for the JOB-INFO-CHANGE-ID binary value can be described as follows: Bytes 0-3 : required-field Bytes 4-7 : optional-field, or all zeros if there is no optional field Bytes 8-11: Job ID. Bytes 12-n : The value of the object job-info. required-field.optional-field.job-id. All multi-byte values stored in Motorola (big-endian) format, where the most significant byte occurs first. Additional information: $product_str will use this object to report changes to the job-info-pages-printed and job-info-state objects. The required-field (bytes 0 through 3) will designate whether the change to be reported involves the pages printed (13) or job state (15). The optional-field (bytes 4 through 7) will always be zeroes. The value-field (bytes 12 through 15) will contain the new value for pages printed or job state cast to a 32-bit integer. Note: It is possible that traps generated by this object have the same value. This is caused by the architecture of $product_str. In the $product_str System, it is highly probable that the object value will change so rapidly that when the trap is processed, PML will read the same value twice. This is timing related and will generally be seen at the end of a job.')
hold_job_timeout = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 1, 10), Integer32()).setLabel("hold-job-timeout").setMaxAccess("readwrite")
if mibBuilder.loadTexts: hold_job_timeout.setStatus('optional')
if mibBuilder.loadTexts: hold_job_timeout.setDescription('The time, in minutes, that the printer will wait before automatically deleting a held job. This allows the printer to automatically clean up jobs that have been forgotten (held but never released). Additional information: This only applies to temporary held jobs, i.e., HOLD=ON or PROOF. This is a global timer that only affects the jobs that are sent after it is set. A value of 0 means never delete the jobs. Setting it to an unsupported value causes the printer to substitute a value (listed below) and to return <noError> status. LaserJet 4600 supports values of 0, 60, 240, 1440, and 10080 The values are as follows: <=0 snap to 0 (disabled) >=1 and <=150 snap to 60 (60 minutes) >=151 and <=840 snap to 240 (4 hours) >=841 and <=2880 snap to 1440 (1 day) >=2881 snap to 10080 (1 week)')
current_job_parsing_id = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 2147483647))).setLabel("current-job-parsing-id").setMaxAccess("readonly")
if mibBuilder.loadTexts: current_job_parsing_id.setStatus('optional')
if mibBuilder.loadTexts: current_job_parsing_id.setDescription("Contains the printer assigned job identification for the job currently being processed by the processing PDL sub-system. The job ID is a monotonically increasing number. The job ID may be reset to zero at power-up and may roll over to zero after reaching some maximum value. Additional information: A value of -1 is returned when the printer is not parsing a job. When data for a new job is detected, this object is updated (the job may or may not turn out to be a Null Job); however, the trap does not occur until the printer determines that it is not a Null Job. (A job is considered to be a 'null job' if it has no name or job attribute, causes no pages to be printed, and consumes MIN_JOB_SIZE (9) or fewer bytes.) A trapped value will differ from the previous value by one or more. See the Job Boundary ERS for details on what constitutes a job boundary. Some job-info- objects are created when the first data bytes are received. If the printer determines that the job is a Null Job, the job-info- objects related to the Null Job are deleted. $product_str retains the job-info- objects for the MAX_JOBS_IN_LIST (32) most recent jobs that are not Null Jobs. The first job received after power-up will have job ID 1, and the job ID will increment to 2,147,483,647 before rolling to zero. To distinguish whether a power cycle or a rollover causes a reduction in the job ID value, the object prtGeneralConfigChanges can be watched and if it increments at the same time as the drop is observed in the value of CURRENT-JOB-PARSING-ID, then a power cycle is the most likely cause.")
job_info_name1 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 1), OctetString()).setLabel("job-info-name1").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_name1.setStatus('optional')
if mibBuilder.loadTexts: job_info_name1.setDescription("Contains the first part of this print job's name. The last OID field for this dynamic object contains the job ID. Additional information: The job name is the string specified by the NAME= parameter of the @PJL JOB command which allows a maximum of 80 characters. The first 40 characters are in this object and the second 40 are in job-info-name2. The symbol set is Roman-8. When jobs are nested, the value assigned to this object is the name provided by the most recent @PJL JOB NAME= command. If no name is provided, a null string is returned.")
job_info_name2 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 2), OctetString()).setLabel("job-info-name2").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_name2.setStatus('optional')
if mibBuilder.loadTexts: job_info_name2.setDescription("Contains the second part of this print job's name. The last OID field for this dynamic object contains the job ID. Additional information: See job-info-name1.")
job_info_stage = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 10), OctetString()).setLabel("job-info-stage").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_stage.setStatus('optional')
if mibBuilder.loadTexts: job_info_stage.setDescription("Indicates what printer sub-systems are currently processing this print job. The last OID field for this dynamic object contains the job ID. cSourceSubsystem - some of the job is in the printer I/O subsystem. cProcessingSubsystem - some of the job is in the printer imaging processing subsystem. cDestinationSubsystem - some of the job is being printed. Additional information: For $product_str, cSourceSubsystem and cProcessingSubsystem will always be set and cleared together. They are set when the beginning of the job is detected, and they are cleared when the end of the job is parsed. A non-printing job will never set cDestinationSubsystem. When a page is ready to be printed (the intermediate has been built and the page is `closed' by the personality), cDestinationSubsystem will be set even if a page of a previous job is currently being printed. cDestinationSubsystem remains set until the last page of the job has finished printing. If a page requires extensive processing and allows all the previous pages of a job to complete printing, cDestinationSubsystem will remain set until the last page of the job has finished printing.")
job_info_io_source = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 11), Integer32()).setLabel("job-info-io-source").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_io_source.setStatus('optional')
if mibBuilder.loadTexts: job_info_io_source.setDescription('Indicates which I/O source, the print job was received over. The value maps to port numbering scheme supported in the DEVICE/SOURCE-SUBSYSTEM/IO/PORTS sub-tree.')
job_info_pages_processed = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 12), Integer32()).setLabel("job-info-pages-processed").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_pages_processed.setStatus('optional')
if mibBuilder.loadTexts: job_info_pages_processed.setDescription("Indicates the number of pages processed by the processing subsystem. If neither the cSourceSubsystem or the cProcessingSubsystem items are in the associated JOB-INFO-STAGE object, then this object contains the total number of pages processed for this job. The last OID field for this dynamic object contains the job ID. Additional information: This object is incremented by one when a page is processed (`closed' by the personality), regardless of the number of pages that are printed as a result of the single page processed. In other words, it does not matter how many identical copies of a page are printed or no pages may be printed if operating in silent run mode, this count is incremented exactly once. A page is considered processed when all of the input data has been processed for a page (that is, when the intermediate has been produced, and the personality has `closed' the page. At this time, the image of the page is not necessarily completely formed.)")
job_info_pages_printed = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 13), Integer32()).setLabel("job-info-pages-printed").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_pages_printed.setStatus('optional')
if mibBuilder.loadTexts: job_info_pages_printed.setDescription('Indicates the number of pages printed by the destination subsystem. If none of the cSourceSubsystem, cProcessingSubsystem or cDestinationSubsystem items are in the associated JOB-INFO-STAGE object, then this object contains the total number of pages printed for this job. This value may increase by two each time for duplex jobs. The last OID field for this dynamic object contains the job ID. Additional information: If multiple copies of a page are printed, each copy is counted individually.')
job_info_size = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 14), Integer32()).setLabel("job-info-size").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_size.setStatus('optional')
if mibBuilder.loadTexts: job_info_size.setDescription('Indicates the number of bytes of data processed by the processing subsystem. If neither of the cSourceSubsystem or cProcessingSubsystem items are in the associated JOB-INFO-STAGE object, then this object contains the size of this job, in bytes. The last OID field for this dynamic object contains the job ID. Additional information: This count may be incremented by values other than one as blocks of data are processed; blocks of data (which may be as large as 2K bytes) will be processed in a varying amount of time. During the processing of a job and even when a job completes, an exact count of the number of I/O bytes processed by the job is not necessarily to be expected.')
job_info_state = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3, 4, 5, 7, 10, 11, 12, 13))).clone(namedValues=NamedValues(("eAborted", 3), ("eWaitingForResources", 4), ("ePrinted", 5), ("eTerminating", 7), ("eCancelled", 10), ("eProcessing", 11), ("eScanning", 12), ("eSending", 13)))).setLabel("job-info-state").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_state.setStatus('optional')
if mibBuilder.loadTexts: job_info_state.setDescription('Indicates the state of the job. The last OID field for this dynamic object contains the job ID. eAborted - the print job was aborted. eWaitingForResources - the print job requires resources that are not currently available. Example resources that can cause the job to wait include the print engine or PDL processor being unavailable. The print engine could be unavailable due to paper out, paper jam, staple out, stapler jam, marking agent low, output bin full, etc. The PDL processor could be unavailable due to an off-line condition. Each printer specific object specification should state which conditions cause a job to be waiting for resources and also state which objects can be retrieved by an application to determine the exact cause of a resource being unavailable. ePrinted - the job has printed. The related JOB-INFO- OUTCOME object indicates if any problems were encountered while the job was processed. eRetained - the job can be reprinted. eTerminating - the job was aborted or cancelled and is currently is terminating. eInterrupted - the job has been interrupted. The job can be continued. ePaused - the job has been paused. The job can be continuted. eCancelled - the job has been cancelled. eProcessing - the job is currently being printed normally. ')
job_info_outcome = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3))).clone(namedValues=NamedValues(("eOk", 3)))).setLabel("job-info-outcome").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_outcome.setStatus('optional')
if mibBuilder.loadTexts: job_info_outcome.setDescription('Indicates if any warning or error conditions were encountered while processing the assoicated job. The last OID field for this dynamic object contains the job ID. Additional information: In $product_str, warnings and errors are not recorded in this object. Although no meaningful information can be obtained from this object, it is kept around for the compatibility needs of existing software.')
job_info_outbins_used = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 20), OctetString()).setLabel("job-info-outbins-used").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_outbins_used.setStatus('optional')
if mibBuilder.loadTexts: job_info_outbins_used.setDescription("Indicates which output bins this job has delivered printed pages to. The last OID field for this dynamic object contains the job ID. Additional information: The bins designated by this collection include the printer's own output bins as well as the `logical output bins' associated with any attached external paper handling devices. (The mapping of the physical output bins of an external paper handling device to its `logical output bins' depends on the usage mode of the device. For instance, in `mailbox mode', there is one-to-one correspondence of `logical output bins' to physical bins, whereas in `stacker mode', one `logical output bin' may map to several physical bins.) Although LaserJet 4100 and LaserJet 4550 both come with two output bins -- a face-down bin on the top of the device and a face-up bin at the back of the device -- firmware will not provide a means of selecting between the two, nor will it report which of the two gets used. For this reason, bit 0 (cOutbin1) of this collection is being used to designate both of these bins together.")
job_info_physical_outbins_used = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 22), OctetString()).setLabel("job-info-physical-outbins-used").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_physical_outbins_used.setStatus('optional')
if mibBuilder.loadTexts: job_info_physical_outbins_used.setDescription("Indicates which physical output bins this job has delivered printed pages to. The last OID field for this dynamic object contains the job ID. Additional information: The output bins designated by the bits of this collection include the standard output bin(s) plus the physical bins of attached external paper handling devices. The configuration (if any) of external paper handling devices will determine the mapping of individual bits in this collection to the physical bins of the devices. For LaserJet 4100 and LaserJet 4550, the cOutbin1 is used for Face Down bin, cOutbin2 is used for Face Up bin, and 3-15 are used for the 13 optional output bins (12 plus an extension to the Face Up bin) for backwards compatibility with LaserJet 8000/LaserJet 8100 and forwards compatibility with LaserJet 8150. However, since they cannot distinguish output going to the Face Up or Face Down bin in the firmware because the diverter can only be moved manually, bit 0 (cOutbin1) is being used to designate both of these bits together. If an external paper handling output device is connected, then bit 2 (cOutbin3) will represent the device's first output bin (and so on).")
job_info_attr_1 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 23, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setLabel("job-info-attr-1").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_attr_1.setStatus('optional')
if mibBuilder.loadTexts: job_info_attr_1.setDescription('Returns the value that was set in PJL via the SET JOBATTR= command. Attribute objects are saved sequentially, starting with 1, after the start of a job. If more attributes are set than there are objects to store them, the excess JOBATTR values are ignored. If the corresponding SET JOBATTR= command has not been received when a get is done for this object, a status of <noSuchName> will be returned.')
job_info_attr_2 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 23, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setLabel("job-info-attr-2").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_attr_2.setStatus('optional')
if mibBuilder.loadTexts: job_info_attr_2.setDescription('Returns the value that was set in PJL via the SET JOBATTR= command. See JOB-INFO-ATTR-1 for details.')
job_info_attr_3 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 23, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setLabel("job-info-attr-3").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_attr_3.setStatus('optional')
if mibBuilder.loadTexts: job_info_attr_3.setDescription('Returns the value that was set in PJL via the SET JOBATTR= command. See JOB-INFO-ATTR-1 for details.')
job_info_attr_4 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 23, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setLabel("job-info-attr-4").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_attr_4.setStatus('optional')
if mibBuilder.loadTexts: job_info_attr_4.setDescription('Returns the value that was set in PJL via the SET JOBATTR= command. See JOB-INFO-ATTR-1 for details.')
job_info_attr_5 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 23, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setLabel("job-info-attr-5").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_attr_5.setStatus('optional')
if mibBuilder.loadTexts: job_info_attr_5.setDescription('Returns the value that was set in PJL via the SET JOBATTR= command. See JOB-INFO-ATTR-1 for details.')
job_info_attr_6 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 23, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setLabel("job-info-attr-6").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_attr_6.setStatus('optional')
if mibBuilder.loadTexts: job_info_attr_6.setDescription('Returns the value that was set in PJL via the SET JOBATTR= command. See JOB-INFO-ATTR-1 for details.')
job_info_attr_7 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 23, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setLabel("job-info-attr-7").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_attr_7.setStatus('optional')
if mibBuilder.loadTexts: job_info_attr_7.setDescription('Returns the value that was set in PJL via the SET JOBATTR= command. See JOB-INFO-ATTR-1 for details.')
job_info_attr_8 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 23, 8), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setLabel("job-info-attr-8").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_attr_8.setStatus('optional')
if mibBuilder.loadTexts: job_info_attr_8.setDescription('Returns the value that was set in PJL via the SET JOBATTR= command. See JOB-INFO-ATTR-1 for details.')
job_info_attr_9 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 23, 9), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setLabel("job-info-attr-9").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_attr_9.setStatus('optional')
if mibBuilder.loadTexts: job_info_attr_9.setDescription('Returns the value that was set in PJL via the SET JOBATTR= command. See JOB-INFO-ATTR-1 for details.')
job_info_attr_10 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 23, 10), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setLabel("job-info-attr-10").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_attr_10.setStatus('optional')
if mibBuilder.loadTexts: job_info_attr_10.setDescription('Returns the value that was set in PJL via the SET JOBATTR= command. See JOB-INFO-ATTR-1 for details.')
job_info_attr_11 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 23, 11), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setLabel("job-info-attr-11").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_attr_11.setStatus('optional')
if mibBuilder.loadTexts: job_info_attr_11.setDescription('Returns the value that was set in PJL via the SET JOBATTR= command. See JOB-INFO-ATTR-1 for details.')
job_info_attr_12 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 23, 12), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setLabel("job-info-attr-12").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_attr_12.setStatus('optional')
if mibBuilder.loadTexts: job_info_attr_12.setDescription('Returns the value that was set in PJL via the SET JOBATTR= command. See JOB-INFO-ATTR-1 for details.')
job_info_attr_13 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 23, 13), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setLabel("job-info-attr-13").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_attr_13.setStatus('optional')
if mibBuilder.loadTexts: job_info_attr_13.setDescription('Returns the value that was set in PJL via the SET JOBATTR= command. See JOB-INFO-ATTR-1 for details.')
job_info_attr_14 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 23, 14), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setLabel("job-info-attr-14").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_attr_14.setStatus('optional')
if mibBuilder.loadTexts: job_info_attr_14.setDescription('Returns the value that was set in PJL via the SET JOBATTR= command. See JOB-INFO-ATTR-1 for details.')
job_info_attr_15 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 23, 15), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setLabel("job-info-attr-15").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_attr_15.setStatus('optional')
if mibBuilder.loadTexts: job_info_attr_15.setDescription('Returns the value that was set in PJL via the SET JOBATTR= command. See JOB-INFO-ATTR-1 for details.')
job_info_attr_16 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 23, 16), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setLabel("job-info-attr-16").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_attr_16.setStatus('optional')
if mibBuilder.loadTexts: job_info_attr_16.setDescription('Returns the value that was set in PJL via the SET JOBATTR= command. See JOB-INFO-ATTR-1 for details.')
job_info_requested_originals = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 24), Integer32()).setLabel("job-info-requested-originals").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_requested_originals.setStatus('optional')
if mibBuilder.loadTexts: job_info_requested_originals.setDescription('The number of requested collated copies. The value was supplied with the job via PJL QTY variable, the PostScript Collate and NumCopies entries in the page device dictionary, or via some other mechansim.')
job_info_page_count_current_original = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 25), Integer32()).setLabel("job-info-page-count-current-original").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_page_count_current_original.setStatus('optional')
if mibBuilder.loadTexts: job_info_page_count_current_original.setDescription('The page number being printed within the current copy of a collated multi-copy job. This value changes when the job-info-pages-printed changes. The job-info-pages-printed indicates the total number of pages printed in the job, while this object indicates the number of pages printed for this copy of a collated multi-copy job. Additional information: The number of pages in the current copy of a collated multi-copy job which have been completely printed and safely delivered to the output bin.')
job_info_pages_in_original = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 26), Integer32()).setLabel("job-info-pages-in-original").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_pages_in_original.setStatus('optional')
if mibBuilder.loadTexts: job_info_pages_in_original.setDescription('Number of pages in a single copy of a collated multi-copy job.')
job_info_printed_originals = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 27), Integer32()).setLabel("job-info-printed-originals").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_printed_originals.setStatus('optional')
if mibBuilder.loadTexts: job_info_printed_originals.setDescription('Number of collated copies completely printed and delivered to the output bin at time of query.')
job_info_accounting_media_size = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 28, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 11, 17, 18, 19, 25, 26, 27, 45, 46, 65, 72, 80, 81, 90, 91, 100, 101, 258, 282, 32767))).clone(namedValues=NamedValues(("eUSExecutive", 1), ("eUSLetter", 2), ("eUSLegal", 3), ("eLedger", 11), ("eROC16K", 17), ("eJISExecutive", 18), ("eROC8K", 19), ("eISOandJISA5", 25), ("eISOandJISA4", 26), ("eISOandJISA3", 27), ("eJISB5", 45), ("eJISB4", 46), ("eISOB5", 65), ("eJapansePostcardDouble", 72), ("eMonarch", 80), ("eCommercial10", 81), ("eInternationalDL", 90), ("eInternationalC5", 91), ("eInternationalB5", 100), ("eCustom", 101), ("eUSLetterR", 258), ("eISOandJISA4R", 282), ("eUnknownMediaSize", 32767)))).setLabel("job-info-accounting-media-size").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_accounting_media_size.setStatus('optional')
if mibBuilder.loadTexts: job_info_accounting_media_size.setDescription('Contains the media size of the printed job. The media size of the first page will decide the media size of the entire job. The return value of this object will only be valid when the printer finishes printing the entire job.')
job_info_accounting_media_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 28, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 16, 17, 18, 19, 20))).clone(namedValues=NamedValues(("eUnknownMedia", 1), ("eStandardType", 2), ("ePreprinted", 3), ("eBond", 4), ("eLetterhead", 5), ("eTransparency", 7), ("eLabels", 8), ("eRecycled", 9), ("eColored", 10), ("eCardStock", 11), ("eRough", 12), ("ePrepunched", 13), ("eHeavy", 14), ("eUserType1", 16), ("eUserType2", 17), ("eUserType3", 18), ("eUserType4", 19), ("eUserType5", 20)))).setLabel("job-info-accounting-media-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_accounting_media_type.setStatus('optional')
if mibBuilder.loadTexts: job_info_accounting_media_type.setDescription('Contains the media type of the printed job. The media type of the first page will decide the media type of the entire job. The return value of this object will only be valid when the printer finishes printing the entire job.')
job_info_accounting_finishing_options = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 28, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("eNoFinish", 1), ("eOffset", 2), ("ePunch", 3), ("eStapler", 4), ("eFinisher", 5)))).setLabel("job-info-accounting-finishing-options").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_accounting_finishing_options.setStatus('optional')
if mibBuilder.loadTexts: job_info_accounting_finishing_options.setDescription('Contains the finishing option used on the printed job. The finishing option specified for the first page will decide the finishing option of the entire job. The return value of this object will only be valid when the printer finishes printing the entire job.')
job_info_accounting_media_simplex_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 28, 4), Integer32()).setLabel("job-info-accounting-media-simplex-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_accounting_media_simplex_count.setStatus('optional')
if mibBuilder.loadTexts: job_info_accounting_media_simplex_count.setDescription('Contains the total number of simplex pages printed in a particular job.')
job_info_accounting_media_duplex_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 28, 5), Integer32()).setLabel("job-info-accounting-media-duplex-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_accounting_media_duplex_count.setStatus('optional')
if mibBuilder.loadTexts: job_info_accounting_media_duplex_count.setDescription('Contains the total number of duplex pages printed in a particular job.')
job_info_accounting_grayscale_impression_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 28, 6), Integer32()).setLabel("job-info-accounting-grayscale-impression-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_accounting_grayscale_impression_count.setStatus('optional')
if mibBuilder.loadTexts: job_info_accounting_grayscale_impression_count.setDescription('Contains the total number of monochrome pages printed in a particular job.')
job_info_accounting_color_impression_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 28, 7), Integer32()).setLabel("job-info-accounting-color-impression-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_accounting_color_impression_count.setStatus('optional')
if mibBuilder.loadTexts: job_info_accounting_color_impression_count.setDescription('Contains the total number of color pages printed in a particular job.')
job_info_accounting_black_dots = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 28, 8), Integer32()).setLabel("job-info-accounting-black-dots").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_accounting_black_dots.setStatus('optional')
if mibBuilder.loadTexts: job_info_accounting_black_dots.setDescription('Contains the total number of black pixels used in a particular job. Additional information: This object reports kilodots. (A kilodot is 1000 dots.)')
job_info_accounting_yellow_dots = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 28, 9), Integer32()).setLabel("job-info-accounting-yellow-dots").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_accounting_yellow_dots.setStatus('optional')
if mibBuilder.loadTexts: job_info_accounting_yellow_dots.setDescription('Contains the total number of yellow pixels used in a particular job. Additional information: This object reports kilodots. (A kilodot is 1000 dots.)')
job_info_accounting_cyan_dots = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 28, 10), Integer32()).setLabel("job-info-accounting-cyan-dots").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_accounting_cyan_dots.setStatus('optional')
if mibBuilder.loadTexts: job_info_accounting_cyan_dots.setDescription('Contains the total number of cyan pixels used in a particular job. Additional information: This object reports kilodots. (A kilodot is 1000 dots.)')
job_info_accounting_magenta_dots = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 28, 11), Integer32()).setLabel("job-info-accounting-magenta-dots").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_accounting_magenta_dots.setStatus('optional')
if mibBuilder.loadTexts: job_info_accounting_magenta_dots.setDescription('Contains the total number of magenta pixels used in a particular job. Additional information: This object reports kilodots. (A kilodot is 1000 dots.)')
job_info_accounting_job_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 5, 28, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 1000))).clone(namedValues=NamedValues(("ePrintJob", 1), ("eIPPJob", 2), ("eCopyJob", 3), ("eCopyInterruptJob", 4), ("eJetSendJob", 5), ("eInternalPage", 6), ("eCleaningPage", 7), ("eAutoCleaningPage", 8), ("eDigitalSendJob", 9), ("eWebPrintJob", 10), ("eFaxPrintJob", 11), ("eRetrievedJob", 12), ("ePhotoCardPrintJob", 13), ("eUnknownJob", 1000)))).setLabel("job-info-accounting-job-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: job_info_accounting_job_type.setStatus('optional')
if mibBuilder.loadTexts: job_info_accounting_job_type.setDescription('Keeps track of what type of job is processed. ')
held_job_user_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 7, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setLabel("held-job-user-name").setMaxAccess("readonly")
if mibBuilder.loadTexts: held_job_user_name.setStatus('optional')
if mibBuilder.loadTexts: held_job_user_name.setDescription('User name that is obtained by the driver through some internal method or from user input. Additional information: The following is a general description for all the JOB-MANAGEMENT objects described below. These object describe the attributes of the dynamic list containing all the stored jobs on the disk available for printing or deleting via the job-management feature. The jobs on this list are not deleted from the disk unless explicitly specified by the user. A print job may be specified for retention by PJL commands in the data stream. Following is a list of the PJL commands (i.e. these comments describe the PJL implementation and in some cases do not reflect the PML implementation.): @PJL SET HOLD=OFF|ON|PROOF|STORE|PRINT GENERAL DESCRIPTION: This variable specifies the retention classification of the job. The values indicate whether the job is printed immediately and/or stored. OFF: The job is printed but not retained on disk after printing. This is the default value. ON: This setting may be thought of as free proof-and-hold. The requested number of copies will be printed and the job will be temporarily stored on disk. The job will then be available for printing additional copies through the control panel and through PML. There will be a limit to the number of these temporary jobs that may be stored, and when the limit is exceeded the oldest job will be removed from the disk. PROOF: One copy of the job is printed and remaining copies are stored on disk. The job is then available to select for printing via the control panel menus or PML. The job will be deleted from the disk when the disk space is needed for another proof and hold job but only after the additional copies have been printed. It will also be deleted when the user sends down another proof and hold job with the same job name, or the user explicitly deletes the job. STORE: The job is not printed immediately but is retained on disk. The job is available to select for printing via the control panel DEFAULT VALUE: OFF The value will be stored in RAM only, not in NVRAM. The legal PJL commands are SET and INQUIRE. DEFAULT is not allowed. The variable will appear in the PJL INFO VARIABLES list. @PJL SET USERNAME=<80 bytes> GENERAL DESCRIPTION: Eighty-character user name that is obtained by the driver through some internal method or from user input. If the job stream does not contain a USERNAME the default value will be NO USER NAME. The driver is responsible for determining the size of the printers control panel and sending a string of appropriate length. DEFAULT VALUE: NO USER NAME The value will be stored in RAM only, not in NVRAM. The legal PJL commands are SET and, INQUIRE. DEFAULT is not allowed. The variable will appear in the PJL INFO VARIABLES list. @PJL SET JOBNAME=<80 bytes> GENERAL DESCRIPTION: Eighty-character job name that may be generated by the driver or obtained from user input. This value may be used in conjunction with the USERNAME to select a job from the front panel. If the job stream does not contain a JOBNAME, the printer will assume no job name; each subsequent job that is sent down by the same user would replace the users last job on disk. The driver is responsible for determining the size of the printers control panel and sending a string of appropriate length. NOTE: The limit of 80 bytes is a PJL limit. The limit for PML will be 40 bytes. DEFAULT VALUE: NULL STRING The value will be stored in RAM only, not in NVRAM. The legal PJL commands are SET and INQUIRE. DEFAULT is not allowed. The variable will appear in the PJL INFO VARIABLES list. @PJL SET HOLDTYPE=PUBLIC|PRIVATE GENERAL DESCRIPTION: This variable specifies the privacy level of the job. PUBLIC: The job does not require a PIN in order to be released for printing. This is the default value. PRIVATE: The job requires a PIN in order to be released for printing. The PIN is specified by the HOLDKEY variable. If the HOLDTYPE is set to PRIVATE, a HOLDKEY value must be specified. If no HOLDKEY is specified, the job will be considered PUBLIC. DEFAULT VALUE: PUBLIC The value will be stored in RAM only, not in NVRAM. The legal PJL commands are SET and INQUIRE. DEFAULT is not allowed. The variable will appear in the PJL INFO VARIABLES list. @PJL SET HOLDKEY=4 digits, 0000...9999 GENERAL DESCRIPTION: A 4-digit string, each digit 0-9, that is specified in the job stream and then required to be entered in order to release the job for printing. If a HOLDTYPE is PRIVATE, a HOLDKEY value must be specified. If no HOLDKEY value is specified, a job will be considered PUBLIC. DEFAULT VALUE: NULL STRING The value will be stored in RAM only, not in NVRAM. The legal PJL commands are SET and INQUIRE. DEFAULT is not allowed. The variable will appear in the PJL INFO VARIABLES list. The format for the OID is as follows: 3.4.1.6.x.y <-----> | | | | \\ / | job-id in the system / \\ PML_JOB_MGNT_ROOT_OID \\ job attribute 1..6 x values are 1..6: 1) user-name: user name that is obtained by the driver through some internal method or from user input. 2) job-name: job name may be generated by the driver or obtained from user input. 3) hold: indicates the retention classification of the job. The values indicate whether the job is printed immediately and/or stored. There are 4 options: OFF: The job is printed but not retained on the disk. ON: The job is printed and stored temporarily on the disk. STORE: The job is not printed but stored on the disk. PROOF: One copy of the job is printed and the remaining copies are stored on the disk. 4) holdtype: The variable specifies the security level of the job. PUBLIC: The job does not require a PIN in order to release the job for printing. PRIVATE: The job requires a PIN in order to be released for printing. 5) quantity: number of copies to print. Valid values are 0..999. 6) pin: A 4 digit string, each digit is 0-9, that is specified in the job stream and then required to be entered in order to release the job for printing. y : an unsigned 32-bit number which uniquely identifies the job. The job id for the job remains the same for the job until it is deleted from the held jobs list. For example, the OID below is asking for the user name of the job whose id is 3. The application would issue: OID get 3.4.1.6.1.3 For example, the <getnext> OID values sequence is as follows: OID returned OID getnext 3.4.1.6 3.4.1.6.1.1 getnext 3.4.1.6.1.1 3.4.1.6.1.2 getnext 3.4.1.6.2.1 3.4.1.6.2.2 *** getnext 3.4.1.6.1.6 3.4.1.6.1.7 getnext 3.4.1.6.2.1 3.4.1.6.2.2 getnext 3.4.1.6.5.2828 3.4.1.6.6.1 NOTE: for example above, job id 2828 is the last job in the list of held jobs. *** Also supported is <getnext> on the job attributes: OID returned OID getnext 3.4.1.6.1 3.4.1.6.1.1 getnext 3.4.1.6.6 3.4.1.6.6.1')
held_job_job_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 7, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setLabel("held-job-job-name").setMaxAccess("readonly")
if mibBuilder.loadTexts: held_job_job_name.setStatus('optional')
if mibBuilder.loadTexts: held_job_job_name.setDescription('The job name may be generated by the driver or obtained from user input.')
held_job_retention = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 7, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("eHoldOff", 1), ("eHoldOn", 2), ("eHoldStore", 3), ("eHoldProof", 4)))).setLabel("held-job-retention").setMaxAccess("readonly")
if mibBuilder.loadTexts: held_job_retention.setStatus('optional')
if mibBuilder.loadTexts: held_job_retention.setDescription('Indicates the retention classification of the job. The values indicate whether the job is printed immediately or stored. There are 4 options: eHoldOff: The job is printed but not retained on the disk. eHoldOn: The job is printed and stored temporarily on the disk. eHoldStore: The job is not printed but stored on the disk. eHoldProof: One copy of the job is printed and the remaining copies are stored on the disk. ')
held_job_security = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 7, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("eHoldTypePublic", 1), ("eHoldTypePrivate", 2)))).setLabel("held-job-security").setMaxAccess("readonly")
if mibBuilder.loadTexts: held_job_security.setStatus('optional')
if mibBuilder.loadTexts: held_job_security.setDescription('The variable specifies the security level of the job. eHoldTypePublic: The job does not require a PIN in order to release the job for printing. eHoldTypePrivate: The job requires a PIN in order to be released for printing. ')
held_job_quantity = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 7, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 999))).setLabel("held-job-quantity").setMaxAccess("readonly")
if mibBuilder.loadTexts: held_job_quantity.setStatus('optional')
if mibBuilder.loadTexts: held_job_quantity.setDescription('Number of copies to print.')
held_job_pin = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 7, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 4))).setLabel("held-job-pin").setMaxAccess("readonly")
if mibBuilder.loadTexts: held_job_pin.setStatus('optional')
if mibBuilder.loadTexts: held_job_pin.setDescription('A string that is specified in the job stream and then required to be entered in order to release the job for printing. PIN stands for Personal Identification Number. Additional information: Must be a 4 digit string, each digit must be 0..9 or a null string if there is no pin. For security purposes, you can no longer get the value of the PIN.')
held_job_print = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 7, 2, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(12, 12)).setFixedLength(12)).setLabel("held-job-print").setMaxAccess("writeonly")
if mibBuilder.loadTexts: held_job_print.setStatus('optional')
if mibBuilder.loadTexts: held_job_print.setDescription('Instructs the printer to schedule the specified held job for printing with the specified number of copies. The job-id is used to identify which job to print. A held job can also be printed from the control panel. Additional information: Bytes 0-3 is the job id of the job to print. Bytes 4-7 is the number of copies to print. Bytes 8-11 (optional) contain the PIN for a Private job. ')
held_job_delete = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 7, 2, 2), Integer32()).setLabel("held-job-delete").setMaxAccess("writeonly")
if mibBuilder.loadTexts: held_job_delete.setStatus('optional')
if mibBuilder.loadTexts: held_job_delete.setDescription('Instructs the printer to delete the specified held job from the list. The job-id is used to identify which job to delete. A held job can also be deleted from the control panel. Additional information: Setting this to a value that is not a Held Job on the system or is a Private Held Job returns <ErrUnsupValue>. To delete a private Held Job, you must use the PML object HELD-JOB-PRINT with a quantity of 0 and supply the correct HELD-JOB-PIN with the request. (See HELD-JOB-PRINT)')
held_job_set_queue_size = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 7, 2, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setLabel("held-job-set-queue-size").setMaxAccess("readwrite")
if mibBuilder.loadTexts: held_job_set_queue_size.setStatus('optional')
if mibBuilder.loadTexts: held_job_set_queue_size.setDescription('Sets the maximum number of jobs which can be stored in the held job list. Additional information: Sets the size of the temporary job lists queue.')
held_job_enable = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 6, 7, 2, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("eDisabled", 1), ("eEnabled", 2)))).setLabel("held-job-enable").setMaxAccess("readwrite")
if mibBuilder.loadTexts: held_job_enable.setStatus('optional')
if mibBuilder.loadTexts: held_job_enable.setDescription('Enables or disables Job Retention (Job Hold). The valid values are eDisabled and eEnabled. (Specifying an invalid mode causes an <badValue> error to be returned.) When eDisabled is specified all Job Retention (Hold, Proof, Store, PIN Printing) is disabled. When eEnabled is specified, the Job Retention characteristics of a given job are defined by the PJL variable SET HOLD. Additional information: When disabled, held jobs are not removed, but must be explicitly removed through the Control Panel or the PML object HELD-JOB-DELETE.')
mopy_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 2, 4, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 4, 5))).clone(namedValues=NamedValues(("eOff", 1), ("eStandard", 4), ("eEnhanced", 5)))).setLabel("mopy-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mopy_mode.setStatus('optional')
if mibBuilder.loadTexts: mopy_mode.setDescription('Controls or reports how mopies are generated. eOff turns off the mopy feature. eAuto allows the device to determine the best method for generating mopies based on the device configuration. eStandard spools the I/O data and replays the I/O data multiple times to generate the mopies. eEnhanced spools the rasterized page images and replays the rasterized data to generate the mopies. Typically, the rasterized data will be larger and will allow all but the first mopy to print at speed. If the job being mopied is not extremely complex, then the first mopy will print at speed also.')
default_vertical_black_resolution = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 1, 8), Integer32()).setLabel("default-vertical-black-resolution").setMaxAccess("readwrite")
if mibBuilder.loadTexts: default_vertical_black_resolution.setStatus('optional')
if mibBuilder.loadTexts: default_vertical_black_resolution.setDescription('Returns or changes the value of the default vertical black resolution. The units are dots per inch. Additional information: In $product_str changing this OID also causes DEFAULT-HORIZONTAL-BLACK RESOLUTION to change. DEFAULT-HORIZONTAL-BLACK-RESOLUTION and DEFAULT-VERTICAL-BLACK-RESOLUTION must always be the same. The supported values are: LaserJet 4100: 300, 600, 1200 Color Products: 600 LaserJet 9000: 300, 600 Setting to an unsupported value causes the printer to substitute in a snap value and to return <noError> status. The snap values are as follow: LaserJet 4100: 450 >= n < 900 snaps to 600 n >= 900 snaps to 1200 Color Products: n snaps to 600 LaserJet 9000: n < 450 snaps to 300 n >=450 snaps to 600 ')
default_horizontal_black_resolution = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 1, 9), Integer32()).setLabel("default-horizontal-black-resolution").setMaxAccess("readwrite")
if mibBuilder.loadTexts: default_horizontal_black_resolution.setStatus('optional')
if mibBuilder.loadTexts: default_horizontal_black_resolution.setDescription('Returns or changes the value of the default horizontal black resolution. The units are dots per inch. Additional information: In $product_str changing this object also causes DEFAULT-VERTICAL-BLACK RESOLUTION to change. DEFAULT-HORIZONTAL-BLACK-RESOLUTION and DEFAULT-VERTICAL-BLACK-RESOLUTION must always be the same. The supported values are: LaserJet 4100: 300, 600, 1200 Color Products: 600 LaserJet 9000: 300, 600 Setting to an unsupported value causes the printer to substitute in a snap value and to return <noError> status. The snap values are as follow: LaserJet 4100: n < 450 snaps to 300 450 >= n < 900 snaps to 600 n >= 900 snaps to 1200 Color Products: n snaps to 600 LaserJet 9000: n < 450 snaps to 300 n >=450 snaps to 600 ')
default_page_protect = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2))).clone(namedValues=NamedValues(("eOn", 2)))).setLabel("default-page-protect").setMaxAccess("readwrite")
if mibBuilder.loadTexts: default_page_protect.setStatus('optional')
if mibBuilder.loadTexts: default_page_protect.setDescription('Returns or changes the default page protection behavior. If eOff, the device does not reserve memory for holding the entire raster form of a processed page. If eOn, then memory is reserved. If eAuto, the device determines the amount of memory to reserve. Additional information: Setting to eOn causes MET to be more conservative, but it will not allocate memory to hold an entire rasterized page.')
default_bits_per_pixel = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 1, 39), Integer32()).setLabel("default-bits-per-pixel").setMaxAccess("readwrite")
if mibBuilder.loadTexts: default_bits_per_pixel.setStatus('optional')
if mibBuilder.loadTexts: default_bits_per_pixel.setDescription('Controls the number of levels used (per pixel) when printing grayscale or color images.')
date_display = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(4, 5, 6))).clone(namedValues=NamedValues(("eDateDisplayMMM-DD-YYYY", 4), ("eDateDisplayDD-MMM-YYYY", 5), ("eDateDisplayYYYY-MMM-DD", 6)))).setLabel("date-display").setMaxAccess("readwrite")
if mibBuilder.loadTexts: date_display.setStatus('optional')
if mibBuilder.loadTexts: date_display.setDescription('Controls front-panel date display format. Additional information: Controls front-panel date display format.')
date_and_time = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 17), OctetString()).setLabel("date-and-time").setMaxAccess("readwrite")
if mibBuilder.loadTexts: date_and_time.setStatus('optional')
if mibBuilder.loadTexts: date_and_time.setDescription('A C structure containing the following fields: typedef struct { ubyte yr; /* year: 0 to 99 */ ubyte mon; /* month: 1 to 12 */ ubyte day; /* day: 1 to 31 */ ubyte wday; /* Day of week: 1 to 07 */ ubyte hr; /* hour: 0 to 23 */ ubyte min; /* minute: 0 to 59 */ ubyte sec; /* second: 0 to 59 */ } date_t; where ubyte is an unsigned byte (0-255). Additional information: A C structure containing the following fields: typedef struct { ubyte yr; /* year: 3 to 99 */ ubyte mon; /* month: 1 to 12 */ ubyte day; /* day: 1 to 31 */ ubyte wday; /* Day of week: 1 to 07 */ ubyte hr; /* hour: 0 to 23 */ ubyte min; /* minute: 0 to 59 */ ubyte sec; /* second: 0 to 59 */ } date_t; where ubyte is an unsigned byte (0-255).')
time_display = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 28), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("eTimeDisplayTwelveHour", 1), ("eTimeDisplayTwentyFourHour", 2)))).setLabel("time-display").setMaxAccess("readwrite")
if mibBuilder.loadTexts: time_display.setStatus('optional')
if mibBuilder.loadTexts: time_display.setDescription('Controls front-panel time display format. Set to eTimeDisplayTwelveHour for AM/PM display. Set to eTimeDisplayTwentyFourHour for military-type display. Additional information: Controls front-panel time display format. Set to eTimeDisplayTwelveHour for AM/PM display. Set to eTimeDisplayTwentyFourHour for military-type display.')
mio1_model_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 3, 1, 2), OctetString()).setLabel("mio1-model-name").setMaxAccess("readonly")
if mibBuilder.loadTexts: mio1_model_name.setStatus('optional')
if mibBuilder.loadTexts: mio1_model_name.setDescription('Returns product information identifying the I/O card. Example: XXXX.')
mio1_manufacturing_info = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 3, 1, 3), OctetString()).setLabel("mio1-manufacturing-info").setMaxAccess("readonly")
if mibBuilder.loadTexts: mio1_manufacturing_info.setStatus('optional')
if mibBuilder.loadTexts: mio1_manufacturing_info.setDescription('Returns information describing the manufacture of the I/O card installed in MIO/EIO slot 1. May include serial number and firmware revision. Additional information: The format of the string returned is determined by the manufacturer of the EIO device. There is no standard for content of the string.')
mio1_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 8, 12))).clone(namedValues=NamedValues(("eEmpty", 1), ("eUnknown", 2), ("eDiskDrive", 8), ("eIOCard", 12)))).setLabel("mio1-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: mio1_type.setStatus('optional')
if mibBuilder.loadTexts: mio1_type.setDescription('Returns an indication of the type of option installed in MIO/EIO slot 1. See SIMM1-TYPE for an explanation of the enumerations.')
mio4_model_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 3, 4, 2), OctetString()).setLabel("mio4-model-name").setMaxAccess("readonly")
if mibBuilder.loadTexts: mio4_model_name.setStatus('optional')
if mibBuilder.loadTexts: mio4_model_name.setDescription('Returns product information identifying the I/O card. Example: XXXX. Additional information: Returns a string describing the firmware version of the embedded JDI LAN card.')
mio4_manufacturing_info = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 3, 4, 3), OctetString()).setLabel("mio4-manufacturing-info").setMaxAccess("readonly")
if mibBuilder.loadTexts: mio4_manufacturing_info.setStatus('optional')
if mibBuilder.loadTexts: mio4_manufacturing_info.setDescription('Returns information describing the manufacture of the I/O card installed in MIO/EIO interface 4. May include serial number and firmware revision. Additional information: Returns a string describing the embedded JDI LAN card.')
mio4_type = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 4, 3, 4, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 12))).clone(namedValues=NamedValues(("eEmpty", 1), ("eIOCard", 12)))).setLabel("mio4-type").setMaxAccess("readonly")
if mibBuilder.loadTexts: mio4_type.setStatus('optional')
if mibBuilder.loadTexts: mio4_type.setDescription('Returns an indication of the type of option installed in MIO/EIO interface 4. See SIMM1-TYPE for an explanation of the enumerations. Additional information: Returns eEmpty if the embedded JDI LAN card is disabled. Returns eIOCard if the embedded JDI LAN card is enabled.')
io_timeout = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 300))).setLabel("io-timeout").setMaxAccess("readwrite")
if mibBuilder.loadTexts: io_timeout.setStatus('optional')
if mibBuilder.loadTexts: io_timeout.setDescription('The amount of time, in seconds, to wait for more print job data to be received before an I/O timeout occurs. The I/O channel being timed is the I/O channel that received the data associated with the current print job. If an I/O timeout occurs, the PDL processing sub-system assumes all the data associated with the current print job has been received, and processes the end of job in a PDL specific manner. The POS specifies the supported values. Additional information: If an I/O timeout occurs,the PDL processing sub-system will consider it an end of job condition only if there is data from another I/O subsystem waiting to be processed. The supported values are 5 to 300 seconds. Setting to a value outside the supported range returns <noError> status and the value will be snapped to the nearest supported value.')
io_switch = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("eYes", 1)))).setLabel("io-switch").setMaxAccess("readonly")
if mibBuilder.loadTexts: io_switch.setStatus('optional')
if mibBuilder.loadTexts: io_switch.setDescription('Indicates if the device will switch between I/O channels when a job boundary is encountered and print job data is available on another I/O channel.')
port1_parallel_speed = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 2, 1, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("eSlow", 1), ("eFast", 2)))).setLabel("port1-parallel-speed").setMaxAccess("readwrite")
if mibBuilder.loadTexts: port1_parallel_speed.setStatus('optional')
if mibBuilder.loadTexts: port1_parallel_speed.setDescription('Returns or changes the maximum parallel I/O port speed, for port 1. This object is only supported if this port is a parallel port. An eSlow setting causes a 10 us busy pulse per received byte of data. An eFast setting causes a 1.5 us busy pulse per received byte of data. In rare cases, setting this value to eFast can cause the parallel port to no longer transfer data reliably. Additional information: When the value of this object is changed, it takes effect immediately. It is recommended that the printer be offline and not in a job when this object is changed.')
port1_parallel_bidirectionality = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 2, 1, 3, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("eUnidirectional", 1), ("eBidirectional", 2)))).setLabel("port1-parallel-bidirectionality").setMaxAccess("readwrite")
if mibBuilder.loadTexts: port1_parallel_bidirectionality.setStatus('optional')
if mibBuilder.loadTexts: port1_parallel_bidirectionality.setDescription('Returns or changes whether the parallel I/O port supports bidirectional communication for port 1. This object is only supported if this port is a parallel port. Additional information: A get on this object returns the current mode for the parallel port. Setting this object specifies whether or not bidirectional communications will be allowed.')
channelnumberofchannels = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 6, 1), Integer32()).setMaxAccess("writeonly")
if mibBuilder.loadTexts: channelnumberofchannels.setStatus('optional')
if mibBuilder.loadTexts: channelnumberofchannels.setDescription('An indication of how many print data channels the I/O card supports. Additional information: This object is used by the IIO card to tell the peripheral firmware how many logical channels will be used by the card. This object can only be set once per physical channel. If an attempt is made to set it a subsequent time it is ignored and an return code of <ErrorActionCanNotBePerformedNow.is returned. If new firmware is dynamically downloaded to the Blazers Plus card, and if that firmware uses more logical channels, it is necessary to reset the printer.')
rpc_bind_protocol_address = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 13, 1, 2), OctetString()).setLabel("rpc-bind-protocol-address").setMaxAccess("readonly")
if mibBuilder.loadTexts: rpc_bind_protocol_address.setStatus('optional')
if mibBuilder.loadTexts: rpc_bind_protocol_address.setDescription('Array of Berkeley sockets style protocol addresses used to bind RPC to a communications protocol family. Setting an instance of this array object to a zero length binary value disables the transport protocol indicated by that instance. All multi-bytes fields are in network (or big-endian) order. Bytes 1 and 2 indicate the transport protocol. Some of the transport protocol mapping information can be found in RFC 1010 Assigned Numbers. A list of interesting transport protocol number mappings include: <table> Protocol | Number ---------+------- UDP/IP | 17 IPX | 1000 MLC | 4660 </table> Bytes 3 and 4 indicate the address family. The address family uses the same mapping as the BSD sockets address family. A list of interesting address family mappings include: <table> Address Family | Number ---------------+------- Internet | 2 NetWare | 6 MLC | 22136 </table> The format the fifth and following bytes is dependent on the address family. For the Internet address family, bytes 5 and 6 contain the port number, bytes 7 through 10 contain the IP address, and the following eight bytes are unused. For NetWare, bytes 5 through 8 are the network number, bytes 9 through 14 are the node number, and bytes 15 and 16 are the socket number. Additional information: The length of the binary value is zero if the instance of an object in the rpc-bind-protocol-address array is not in use. Returns <noSuchName> status if attempting to access this object and there is no storage device is installed.')
rpc_bound_protocol_address = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 13, 2, 3), OctetString()).setLabel("rpc-bound-protocol-address").setMaxAccess("readonly")
if mibBuilder.loadTexts: rpc_bound_protocol_address.setStatus('optional')
if mibBuilder.loadTexts: rpc_bound_protocol_address.setDescription('An array of Berkeley sockets style protocol addresses that the NFS service has been bound to successful. The format is that same as the array of RPC-BIND-PROTOCOL-ADDRESS of objects. Additional information: Returns <noSuchName> status if attempting to access this object and there is no storage device is installed.')
file_system_max_open_files = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 10, 1, 2), Integer32()).setLabel("file-system-max-open-files").setMaxAccess("readonly")
if mibBuilder.loadTexts: file_system_max_open_files.setStatus('optional')
if mibBuilder.loadTexts: file_system_max_open_files.setDescription('The number of open files allowed at one time. Opening a file when the maximum number of files are currently open will fail. Additional information: Indicates the number of open files a personality (e.g. PCL or PostScript) is guaranteed to be able to open before the file system runs out of file handles. This object is between 1 and 500. Returns <noSuchName> status if attempting to access this object and there is no storage device is installed.')
file_system_set_system_partition_writeable = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 10, 1, 6), OctetString()).setLabel("file-system-set-system-partition-writeable").setMaxAccess("writeonly")
if mibBuilder.loadTexts: file_system_set_system_partition_writeable.setStatus('optional')
if mibBuilder.loadTexts: file_system_set_system_partition_writeable.setDescription('This object allows the system partition to be written to. It consists of a C structure containing the following fields: typedef struct { ubyte <<hidden>>[8]; ubyte volumenumber; } fs_writeable_system_partition_t; which is described below: Bytes 0 - 7: contain the <<hidden>> Byte 8 : is the volume number Access to this command is controlled by the <<hidden>>. If the <<hidden>> supplied is incorrect the command will fail. The volumenumber is a volume number of an existing system partition. Additional information: Returns <noSuchName> status if attempting to access this object and there is no storage device is installed. Returns <badValue> if the <<hidden>> is incorrect or if the volume requested is not present.')
file_system_set_system_partition_readonly = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 10, 1, 7), Integer32()).setLabel("file-system-set-system-partition-readonly").setMaxAccess("writeonly")
if mibBuilder.loadTexts: file_system_set_system_partition_readonly.setStatus('optional')
if mibBuilder.loadTexts: file_system_set_system_partition_readonly.setDescription('Changes a system partition to be READ-ONLY. The value is the volume number to change. If the volume number specified is NOT a system partition an error is returned. Additional information: Returns <noSuchName> status if attempting to access this object and there is no storage device is installed. Returns <badValue> if the volume requested is not present.')
file_system_delete_files = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 10, 1, 8), OctetString()).setLabel("file-system-delete-files").setMaxAccess("writeonly")
if mibBuilder.loadTexts: file_system_delete_files.setStatus('optional')
if mibBuilder.loadTexts: file_system_delete_files.setDescription('Setting this object causes the specified filename to be deleted, after first validating that the authentication data is correct for the specified user ID. The format for this object is a C structure: typedef struct { sint32 UserId; uint16 AuthenticationDataLen ubyte AuthenticationData[] char Filename[]; } fs_delete_files_t; which is described below: Bytes 0 - 3: contains a user id represented as a multi-byte value that is stored in big-endian format, where the most significant byte occurs first. Bytes 4 - 5 : Length of the Athentication data that follows starting at offset 6. Stored as a multi-byte value that is stored in big-endian format, where the most significant byte occurs first. Bytes 6 - 6+AuthenticationDataLen : a ubyte array containing the Authentication data used to verify access for this operation. Bytes starting at offset (6+AuthenticationDataLen+1): A null terminated character array representing the ASCII file name to be deleted. The length of the string will be limited by the remaining space in the object. This string represents a fully-qualified path name which may specify a filename or a regular expression that may match multiple files (e.g <path>/*.exe). Access to this command is controlled by the UserId and the authentication data. If the UserID or authentication data supplied is incorrect the command will fail. The device POS will specify any limitations to the length of the filename string, what constitutes a correct user ID, what constitutes correct authentication data, and the significance of any return values. Additional information: Setting this object causes the specified filename to be deleted, after first validating that the authentication data is correct for the specified user ID. This object is always present. The format for this object is a C structure: typedef struct { sint32 UserId; uint16 AuthenticationDataLen ubyte AuthenticationData[] char Filename[]; } fs_delete_files_t; Bytes 0 - 3: contains a user id represented as a multi-byte value that is stored in big-endian format, where the most significant byte occurs first. Bytes 4 - 5 : Length of the authentication data that follows starting at offset 6. Stored as a multi-byte value that is stored in big-endian format, where the most significant byte occurs first. Bytes 6 - 6+AuthenticationDataLen : a ubyte array containing the authentication data used to verify access for this operation. Bytes starting at offset (6+AuthenticationDataLen+1): A null terminated character array representing the ASCII file name to be deleted. The length of the string will be limited by the remaining space in the object. This string represents a fully-qualified path name which may specify a filename or a file-type regular expression that may match multiple files (e.g., <path>/*.exe). Access to this command is controlled by the UserId and the authentication data. If the UserID or authentication data supplied is incorrect the command will fail. The device POS will specify any limitations to the length of the filename string, what constitutes a correct user ID, what constitutes correct authentication data, and the significance of any return values.')
file_system2_initialize_volume = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 10, 3, 2, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2))).clone(namedValues=NamedValues(("eInitializing", 2)))).setLabel("file-system2-initialize-volume").setMaxAccess("writeonly")
if mibBuilder.loadTexts: file_system2_initialize_volume.setStatus('optional')
if mibBuilder.loadTexts: file_system2_initialize_volume.setDescription('Setting this object to eInitializing causes file system 2 to be initialized. Reading this object indicates if the file system is currently being initialized. Additional information: The hrDeviceIndex value for the mass storage device is the same value that is used to index into the FILE-SYSTEM sub-tree. Since this product supports up to 3 physical mass storage device, and since the hrDeviceIndex for the mass storage devices will start at 2 if the mass storage device is installed, the FILE-SYSTEM2-INITIALIZE-VOLUME object will be the object that allows the mass storage device to be initialized that is the 1st device.')
file_system3_initialize_volume = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 10, 3, 3, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2))).clone(namedValues=NamedValues(("eInitializing", 2)))).setLabel("file-system3-initialize-volume").setMaxAccess("writeonly")
if mibBuilder.loadTexts: file_system3_initialize_volume.setStatus('optional')
if mibBuilder.loadTexts: file_system3_initialize_volume.setDescription('Setting this object to eInitializing causes file system 3 to be initialized. Reading this object indicates if the file system is currently being initialized. Additional information: The hrDeviceIndex value for the mass storage device is the same value that is used to index into the FILE-SYSTEM sub-tree. Since this product supports up to 3 physical mass storage device, and since the hrDeviceIndex for the mass storage devices will start at 2 if the mass storage device is installed, the FILE-SYSTEM3-INITIALIZE-VOLUME object will be the object that allows the mass storage device to be initialized that is the 2nd device.')
file_system4_initialize_volume = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 10, 3, 4, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2))).clone(namedValues=NamedValues(("eInitializing", 2)))).setLabel("file-system4-initialize-volume").setMaxAccess("writeonly")
if mibBuilder.loadTexts: file_system4_initialize_volume.setStatus('optional')
if mibBuilder.loadTexts: file_system4_initialize_volume.setDescription('Setting this object to eInitializing causes file system 4 to be initialized. Reading this object indicates if the file system is currently being initialized. Additional information: The hrDeviceIndex value for the mass storage device is the same value that is used to index into the FILE-SYSTEM sub-tree. Since this product supports up to 3 physical mass storage devices, and since the hrDeviceIndex for the mass storage devices will start at 2 if the mass storage device is installed, the FILE-SYSTEM4-INITIALIZE-VOLUME object will be the object that allows the mass storage device to be initialized that is the 3rd device.')
mass_storage_resource_change_counter = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 12, 3, 1), Integer32()).setLabel("mass-storage-resource-change-counter").setMaxAccess("readonly")
if mibBuilder.loadTexts: mass_storage_resource_change_counter.setStatus('optional')
if mibBuilder.loadTexts: mass_storage_resource_change_counter.setDescription('A counter which changes when a mass storage based resource has been added or deleted. Additional information: The value of this counter changes each time the MASS-STORAGE-RESOURCE-CHANGED object is set to eTrue. The value also changes when the mass storage device is initialized. However, the value does not change when a mass storage device is removed and a different mass storage device is installed. Initializing all volumes sets this object back to the factory default value. A reboot sets this object back to the factory devalut value. Returns <noSuchName> status if attempting to access this object and there is no storage device is installed.')
mass_storage_resource_changed = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 12, 3, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2))).clone(namedValues=NamedValues(("eTrue", 2)))).setLabel("mass-storage-resource-changed").setMaxAccess("writeonly")
if mibBuilder.loadTexts: mass_storage_resource_changed.setStatus('optional')
if mibBuilder.loadTexts: mass_storage_resource_changed.setDescription('Setting to eTrue causes MASS-STORAGE-RESOURCE-CHANGE-COUNTER to be incremented. Additional information: Returns <noSuchName> status if attempting to access this object and there is no storage device is installed.')
ram_disk_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 3))).clone(namedValues=NamedValues(("eOff", 1), ("eAuto", 3)))).setLabel("ram-disk-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: ram_disk_mode.setStatus('optional')
if mibBuilder.loadTexts: ram_disk_mode.setDescription('Returns or controls RAM disk support. eOFF turns off the RAM disk functionality. eOn turns on the RAM disk functionality and creates a RAM disk whose size is controlled by the RAM-DISK-SIZE object. eAuto turns on the RAM disk functionality and creates a RAM disk size determined by the printer based on installed options and other memory related settings. Additional information: Returns or controls RAM disk support. eOFF turns off the RAM disk functionality. eAuto turns on the RAM disk functionality and creates a RAM disk size determined by the printer based on the amount of installed memory.')
ram_disk_size = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 15, 1, 2), Integer32()).setLabel("ram-disk-size").setMaxAccess("readonly")
if mibBuilder.loadTexts: ram_disk_size.setStatus('optional')
if mibBuilder.loadTexts: ram_disk_size.setDescription('Returns or controls the size of the RAM disk. The device POS specifies the minimum memory requirements. The object MAXIMUM-RAM-DISK-MEMORY specifies the maximum memory available for the RAM disk. Additional information: Returns the size of the RAM disk.')
maximum_ram_disk_memory = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 15, 2, 1), Integer32()).setLabel("maximum-ram-disk-memory").setMaxAccess("readonly")
if mibBuilder.loadTexts: maximum_ram_disk_memory.setStatus('optional')
if mibBuilder.loadTexts: maximum_ram_disk_memory.setDescription("This object's name is misleading. This object does not return the maximum configurable RAM disk size. Instead, it returns the maximum amount of memory, in bytes, that can used to increase the size of the RAM disk. Additional information: This object returns the maximum amount of additional memory that is available for increasing the size of the RAM disk.")
page_frame_memory_available = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 72), Integer32()).setLabel("page-frame-memory-available").setMaxAccess("readonly")
if mibBuilder.loadTexts: page_frame_memory_available.setStatus('optional')
if mibBuilder.loadTexts: page_frame_memory_available.setDescription('Returns the abount of page-frame memory in bytes available in the system. Additional information: Total amount of onboard pageframe memory, in kilobytes, present in the printer.')
device_configure_printer_parameters = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 1, 32, 12), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 256))).setLabel("device-configure-printer-parameters").setMaxAccess("readwrite")
if mibBuilder.loadTexts: device_configure_printer_parameters.setStatus('optional')
if mibBuilder.loadTexts: device_configure_printer_parameters.setDescription('Allow the printer to be configured as a duplex or simplex printer. There will be a <<hidden>> encoded in this string and decoded by the printer firmware. If the <<hidden>> check passes the printer will be configured accordingly. Additional information: Used to configure Laserjet 4650 engines for duplex enabled or not. Encoded configuration string is passed in, which is decoded by firmware. Firmware verifies config. string is valid, and retrieves device Configuration data. Expandable to accommodate future products configuration needs.')
job_input_auto_continue_timeout = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 35), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 3600))).setLabel("job-input-auto-continue-timeout").setMaxAccess("readwrite")
if mibBuilder.loadTexts: job_input_auto_continue_timeout.setStatus('optional')
if mibBuilder.loadTexts: job_input_auto_continue_timeout.setDescription('The number of seconds the device waits after a job related continuable error occurs before automatically continuing. An example job related continuable error is the job requesting a media size that is not available. After the timeout expires, the device will continue processing the job as if a continue event occurred, such as the front panel continue key being pressed. If the value is -1, the device does not automatically continue after a job related continuable error occurs. If the value is 0, the device immediately continues. If the value is greater than 0, the value represents the timeout value in seconds. Additional information: The number of seconds the device waits after a job related continuable error occurs before automatically continuing. An example job related continuable error is the job requesting a media size that is not available. After the timeout expires, the device will continue processing the job accourding to the action defined by JOB-INPUT-AUTO-CONTINUE-MODE. If the value is -1, the device does not automatically continue after a job related continuable error occurs. If the value is 0, the device immediately continues. If the value is greater than 0, the value represents the timeout value in seconds. The data for this object is stored in NVRAM.')
job_input_auto_continue_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 36), OctetString()).setLabel("job-input-auto-continue-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: job_input_auto_continue_mode.setStatus('optional')
if mibBuilder.loadTexts: job_input_auto_continue_mode.setDescription('Returns or sets the device behavior when the desired media is not currently available. cCancelJob - The device cancels the job. The device POS should explain what happens if this item is not the only item in the collection. cAutoMediaSizeOverride - The device is allowed to substitute a different size media. cAutoMediaNameOverride - The device is allowed to substitute a different media name. cUSMediaSizeOverride - The device is allowed to substitute US media sizes (letter, etc.) for ISO media sizes (A4, etc.). cISOMediaSizeOverride - The device is allowed to substitute ISO media sizes (A4, etc.) for US media sizes (letter, etc.). Additional information: Returns or sets the device behavior when the desired media is not currently available. cCancelJob - The device cancels the job regardless of other item settings. cAutoMediaSizeOverride - The device is allowed to substitute a different size media. cAutoMediaNameOverride - The device is allowed to substitute a different media name. -- cUSMediaSizeOverride - The device is allowed to substitute -- US media sizes (letter, etc.) for ISO media sizes -- (A4, etc.). -- cISOMediaSizeOverride - The device is allowed to substitute -- ISO media sizes (A4, etc.) for US media sizes -- (letter, etc.). At least one bit of the collection must be set; setting this object to zero will cause a status of <badValue> to be returned. This object describes the action that is performed when the JOB-INPUT-AUTO-CONTINUE-TIMEOUT expires. ')
job_output_auto_continue_timeout = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 2, 40), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 3600))).setLabel("job-output-auto-continue-timeout").setMaxAccess("readwrite")
if mibBuilder.loadTexts: job_output_auto_continue_timeout.setStatus('optional')
if mibBuilder.loadTexts: job_output_auto_continue_timeout.setDescription('Returns or sets the time that the printer will wait after an output bin becomes full and the printer is trying to deliver a sheet of media to that output bin. When the timeout expires, the job is processed according to the OUTBINn-OVERRIDE-MODE. A value of -1 indicates that the printer will wait for a continue event. A non-negative value is the number of seconds to wait. Additional information: Returns or sets the time that the printer will wait after an output bin becomes full and the printer is trying to deliver a sheet of media to that output bin. When the timeout expires, the job is processed according to the OUTBINn-OVERRIDE-MODE. A value of -1 indicates that the printer will wait for a continue event. A non- negative value is the number of seconds to wait.')
model_number = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 3, 1), OctetString()).setLabel("model-number").setMaxAccess("readonly")
if mibBuilder.loadTexts: model_number.setStatus('optional')
if mibBuilder.loadTexts: model_number.setDescription('Identifies the device model number as listed in the HP corporate price list (e.g. C2121A for DeskJet 500C). The string is as specific as possible. Products should note in POS if the model number on the CPL changes but the device reports the previous model number. If the model number changes based on the installed options, the POS should indicate if only the base model number is returned, or if the device senses the installed options and returns the correct model number. If possible, encode the model number in a symbol set (like Roman-8) that matches the ASCII character set and limit the characters used to ASCII characters. Additional information: Identifies the device model number as listed in the HP corporate price list (e.g. C2121A for DeskJet 500C). The string is as specific as possible. The value of this object does not change based on the installed options. The default of this object is the same on all $product_str printers.')
model_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 3, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setLabel("model-name").setMaxAccess("readonly")
if mibBuilder.loadTexts: model_name.setStatus('optional')
if mibBuilder.loadTexts: model_name.setDescription("Identifies the device model name (e.g. ''DeskJet 1200C''). The string is as specific as possible. Capitalization and spacing should match family naming conventions. Products should note in POS if the model name on the HP corporate price list changes but the device reports the previous device name. If the model name changes based on the installed options, the POS should indicate if only the base model name is returned, or if the device senses the installed options and returns the correct model name. If possible, encode the model name in a symbol set (like Roman-8) that matches the ASCII character set and limit the characters used to ASCII characters. Additional information: Since the value of this object is frequently used in displaying a list of printers, it is kept relatively short in case systems have limited width for their display area. The model name does not change based on sensing of installed options.")
formatter_serial_number = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 3, 20), OctetString()).setLabel("formatter-serial-number").setMaxAccess("readonly")
if mibBuilder.loadTexts: formatter_serial_number.setStatus('optional')
if mibBuilder.loadTexts: formatter_serial_number.setDescription('This object returns the formatter serial number for the device. The value returned from this object is the FORMATTERNUMBER system variable. If possible, encode the serial number in a symbol set (like Roman-8) that matches the ASCII character set and limit the characters used to ASCII characters. Additional information: Returns the formatter serial number, prefixed with the PML_UNICODE_PREFIX..')
engine_self_diagnostic = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 5, 7), OctetString()).setLabel("engine-self-diagnostic").setMaxAccess("readonly")
if mibBuilder.loadTexts: engine_self_diagnostic.setStatus('optional')
if mibBuilder.loadTexts: engine_self_diagnostic.setDescription('The ENGINE-SELF-DIAGNOSTIC object reveals current engine failures; it returns a binary string of two-byte motor, clutch, solenoid, and sensor failure codes. Additional information: The ENGINE-SELF-DIAGNOSTIC object reveals current Engine Failures; it returns a BINARY string of two-byte Motor, Clutch, Solenoid and Sensor failure codes.')
default_media_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 3, 1, 22), OctetString()).setLabel("default-media-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: default_media_name.setStatus('optional')
if mibBuilder.loadTexts: default_media_name.setDescription('Returns or sets the media name that is used until the media name is changed by a print job command. Additional information: Returns or sets the media name that is used until the media name is changed by a print job command. This string must be one of the MEDIAn-NAME objects. This object is localized if the corresponding MEDIAn-NAME object is localized. The data for this object is stored in NVRAM.')
override_media_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 1, 2), OctetString()).setLabel("override-media-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: override_media_name.setStatus('optional')
if mibBuilder.loadTexts: override_media_name.setDescription('Sets a string identifying the media name that is to be used in place of the currently requested media. The substitution will continue until another media is selected. If set to a named media that is not currently available the requested media is not overridden. Additional information: When a request is received to print on a size and type of media that is not currently available, this object contains the desired media name as set by the print job. This object should be set to a media name that is currently available in the printer. If a paper mount request is not pending, attempting to get or set this object will cause <noSuchName> to be returned. Setting this object to a string other than one of the MEDIAn-NAME objects (MEDIA-NAMES-AVAILABLE is applied) will cause a status of <badValue> to be returned.')
override_media_size = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 11, 17, 18, 19, 25, 26, 27, 33, 34, 35, 45, 46, 65, 72, 80, 81, 89, 90, 91, 100, 101, 258, 282, 32767))).clone(namedValues=NamedValues(("eUSExecutive", 1), ("eUSLetter", 2), ("eUSLegal", 3), ("eLedger", 11), ("eROC16K", 17), ("eJISExecutive", 18), ("eROC8K", 19), ("eISOandJISA5", 25), ("eISOandJISA4", 26), ("eISOandJISA3", 27), ("ePRC8K270X390", 33), ("ePRC16K195X270", 34), ("ePRC8K260X368", 35), ("eJISB5", 45), ("eJISB4", 46), ("eISOB5", 65), ("eJapanesePostcardDouble", 72), ("eMonarch", 80), ("eCommercial10", 81), ("ePRC16K184X260", 89), ("eInternationalDL", 90), ("eInternationalC5", 91), ("eInternationalB5", 100), ("eCustom", 101), ("eUSLetterR", 258), ("eISOandJISA4R", 282), ("eUnknownMediaSize", 32767)))).setLabel("override-media-size").setMaxAccess("readwrite")
if mibBuilder.loadTexts: override_media_size.setStatus('optional')
if mibBuilder.loadTexts: override_media_size.setDescription('Sets the media size that is to be used in place of the currently requested media size. Additional information: When a request is received to print on a size and type of media that is not currently available, this object contains the desired size as set by the print job. This object should be set to a media size that is currently available to the printer. If a paper mount request is not pending, attempting to get or set this object will cause <noSuchName> to be returned.')
print_density = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 5))).setLabel("print-density").setMaxAccess("readwrite")
if mibBuilder.loadTexts: print_density.setStatus('optional')
if mibBuilder.loadTexts: print_density.setDescription('Returns or sets the print density setting. Print density is the instantaneous amount of marking agent applied to the paper while printing. A value of zero has the lowest print density, yielding a lighter page. A value of 10 has the highest print density, yielding a darker page. Each POS should document what values in the 0 to 10 range are supported. Additional information: Returns or sets the print density setting. Print density is the instantaneous amount of marking agent applied to the paper while printing. A value of zero has the lowest print density, yielding a lighter page. A value of 10 has the highest print density, yielding a darker page. Each POS should document what values in the 0 to 10 range are supported.')
marking_agent_density_setting = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 1, 9, 1), Integer32()).setLabel("marking-agent-density-setting").setMaxAccess("readwrite")
if mibBuilder.loadTexts: marking_agent_density_setting.setStatus('optional')
if mibBuilder.loadTexts: marking_agent_density_setting.setDescription('Returns or sets the marking agent density setting for each of the marking agents installed. The Marking Agent (aka Print) density is the instantaneous amount of marking agent applied to the media while printing. A value of zero has the lowest print density, yielding a lighter page. A value of 10 has the highest print density, yielding a darker page. The device POS will document what values are supported. Additional information: Returns or sets the print density setting. Print density is the instantaneous amount of marking agent applied to the paper while printing. A value of zero has the lowest print density, yielding a lighter page. A value of 10 has the highest print density, yielding a darker page.')
duplex_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 2, 22), Integer32()).setLabel("duplex-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: duplex_page_count.setStatus('optional')
if mibBuilder.loadTexts: duplex_page_count.setDescription('Total number of sheets of media that have been duplex printed. A sheet is counted if it travels through the duplex page path, regardless of whether or not marks are made on the page. The POS will indicate if the value is kept in NVRAM. Additional information: Total number of sheets of media that have been duplex printed. A sheet is counted if it travels through the duplex page path, regardless of whether or not marks are made on the page. This value is kept in NVRAM however the NVRAM value is only updated every 10 sheets. NOTE: The value returned by this object will be incremented every sheet but if power is lost between NVRAM updates up to 9 sheets of the count may be lost. The counter will be reset to zero after 16,777,215 (2^24-1) pages. ')
print_engine_revision = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 2, 26), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 16))).setLabel("print-engine-revision").setMaxAccess("readonly")
if mibBuilder.loadTexts: print_engine_revision.setStatus('optional')
if mibBuilder.loadTexts: print_engine_revision.setDescription('Print engine revision string. Additional information: Print engine revision string. The symbol set for this string is Roman-8. ')
input_tray_auto_select = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("eOff", 1), ("eOn", 2)))).setLabel("input-tray-auto-select").setMaxAccess("readwrite")
if mibBuilder.loadTexts: input_tray_auto_select.setStatus('optional')
if mibBuilder.loadTexts: input_tray_auto_select.setDescription('Indicates if the device will automatically try to load media from the next input media tray in the auto-select sequence (defined by each device) when it cannot load media from the current tray. Locked trays will not be permitted in the auto-select sequence. This object has no meaning if there is only one unlocked input media tray. Additional information: Indicates if the device will automatically try to load media from the next input media tray in the auto-select sequence (defined by each device) when it cannot load media from the current tray. Locked trays will not be permitted in the auto-select sequence. This object has no meaning if there is only one unlocked input media tray.')
custom_paper_feed_dim = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 1, 8), Integer32()).setLabel("custom-paper-feed-dim").setMaxAccess("readwrite")
if mibBuilder.loadTexts: custom_paper_feed_dim.setStatus('optional')
if mibBuilder.loadTexts: custom_paper_feed_dim.setDescription("Sets the printer's custom paper dimension in the feed direction (direction parallel to the direction of paper feeding). The value for this object is specified in micrometers or tenthousandths of an inch, depending upon the value of CUSTOM-PAPER-DIM-UNIT. The valid range is engine-dependent and should be documented in the POS for each product. Additional information: Get/Set custom paper dimension (height). The return/set value is either in micrometers or 10,000ths of inches. A tray has to be in custom switch or without media size sensor to be able to set the custom dimension. If it is successfully setting the dimension value, the size of a tray is set to CUSTOM.")
custom_paper_xfeed_dim = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 1, 9), Integer32()).setLabel("custom-paper-xfeed-dim").setMaxAccess("readwrite")
if mibBuilder.loadTexts: custom_paper_xfeed_dim.setStatus('optional')
if mibBuilder.loadTexts: custom_paper_xfeed_dim.setDescription("Sets the printer's custom paper dimension in the cross-feed direction (direction ninety degrees relative to the direction of paper feeding). The value for this object is specified in micrometers or tenthousandths of an inch, depending upon the value of CUSTOM-PAPER-DIM-UNIT. The valid range is engine-dependent and should be documented in the POS for each product. Additional information: Get/Set custom paper dimension (width). The return/set value is either in micrometers or 10,000ths of inches. A tray has to be in custom switch or without media size sensor to be able to set the custom dimension. If it is successfully setting the dimension value, the size of a tray is set to CUSTOM.")
default_custom_paper_dim_unit = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3, 4))).clone(namedValues=NamedValues(("eTenThousandthsOfInches", 3), ("eMicrometers", 4)))).setLabel("default-custom-paper-dim-unit").setMaxAccess("readwrite")
if mibBuilder.loadTexts: default_custom_paper_dim_unit.setStatus('optional')
if mibBuilder.loadTexts: default_custom_paper_dim_unit.setDescription("The units of measure used to specify the width and height of the printer's default custom paper size. The unit of measure of eTenThousandthsOfInches is 0.0001 inches. Additional information: The units of measure used to specify the width and height of the printer's default custom paper size. The unit of measure of eTenThousandthsOfInches is 0.0001 inches.")
default_custom_paper_feed_dim = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 1, 11), Integer32()).setLabel("default-custom-paper-feed-dim").setMaxAccess("readwrite")
if mibBuilder.loadTexts: default_custom_paper_feed_dim.setStatus('optional')
if mibBuilder.loadTexts: default_custom_paper_feed_dim.setDescription("Sets the printer's default custom paper size dimension in the feed direction (direction parallel to the direction of paper feeding). The value for this object is specified in micrometers or tenthousandths of an inch, depending upon the value of DEFAULT-CUSTOM-PAPER-DIM-UNIT. The valid range is engine-dependent and should be documented in the POS for each product. Additional information: Sets the printer's default custom paper size dimension in the feed direction (direction parallel to the direction of paper feeding). The value for this object is specified in micrometers or tenthousandths of an inch, depending upon the value of DEFAULT-CUSTOM-PAPER-DIM-UNIT. The valid range is engine-dependent and should be documented in the POS for each product.")
default_custom_paper_xfeed_dim = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 1, 12), Integer32()).setLabel("default-custom-paper-xfeed-dim").setMaxAccess("readwrite")
if mibBuilder.loadTexts: default_custom_paper_xfeed_dim.setStatus('optional')
if mibBuilder.loadTexts: default_custom_paper_xfeed_dim.setDescription("Sets the printer's default custom paper size dimension in the cross-feed direction (direction ninety degrees relative to the direction of paper feeding). The value for this object is specified in micrometers or tenthousandths of an inch, depending upon the value of DEFAULT-CUSTOM-PAPER-DIM-UNIT. The valid range is engine-dependent and should be documented in the POS for each product. Additional information: Sets the printer's default custom paper size dimension in the cross-feed direction (direction ninety degrees relative to the direction of paper feeding). The value for this object is specified in micrometers or tenthousandths of an inch, depending upon the value of DEFAULT-CUSTOM-PAPER-DIM-UNIT. The valid range is engine-dependent and should be documented in the POS for each product.")
input_tray_max_media_feed_dim = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 1, 14), Integer32()).setLabel("input-tray-max-media-feed-dim").setMaxAccess("readonly")
if mibBuilder.loadTexts: input_tray_max_media_feed_dim.setStatus('optional')
if mibBuilder.loadTexts: input_tray_max_media_feed_dim.setDescription("The maximum physical media size in the feed direction of this input device expressed in units of measure specified by INPUT- TRAY-MIN-MAX-DIM-UNIT. A value of (-1) implies 'unlimited', a value of (-2) implies 'unknown'. Additional information: The maximum physical media size in the feed direction of this input device expressed in units of measure specified by PrtInputDimUnit. A value of (-1) implies 'unlimited', a value of (-2) implies 'unknown'. Refer to Printer Management Standards web page, http://bldlabs.boi.hp.com/BLDPrinterLab/Project/PrinterManagement, for more details in the original format of the Standard Printer MIB.")
input_tray_max_media_xfeed_dim = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 1, 15), Integer32()).setLabel("input-tray-max-media-xfeed-dim").setMaxAccess("readonly")
if mibBuilder.loadTexts: input_tray_max_media_xfeed_dim.setStatus('optional')
if mibBuilder.loadTexts: input_tray_max_media_xfeed_dim.setDescription("The maximum physical media size across the feed direction of a particular input device expressed in units of measure specified by INPUT-TRAY-MIN-MAX-DIM-UNIT. A value of (-1) implies 'unlimited', a value of (-2) implies 'unknown'. Additional information: The maximum physical media size across the feed direction of this input device expressed in units of measure specified by PrtInputDimUnit. A value of (-1) implies 'unlimited', a value of (-2) implies 'unknown'. Refer to Printer Management Standards web page, http://bldlabs.boi.hp.com/BLDPrinterLab/Project/PrinterManagement, f or more details in the original format of the Standard Printer MIB.")
input_tray_min_media_feed_dim = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 1, 16), Integer32()).setLabel("input-tray-min-media-feed-dim").setMaxAccess("readonly")
if mibBuilder.loadTexts: input_tray_min_media_feed_dim.setStatus('optional')
if mibBuilder.loadTexts: input_tray_min_media_feed_dim.setDescription("The minimum physical media size in the feed direction of a particular input device expressed in units of measure specified by PrtInputMinMaxDimUnit. A value of (-1) implies 'unlimited', a value of (-2) implies 'unknown'. Additional information: The minimum physical media size in the feed direction of this input device expressed in units of measure specified by PrtInputDimUnit. A value of (-1) implies 'unlimited', a value of (-2) implies 'unknown'. Refer to Printer Management Standards web page, http://bldlabs.boi.hp.com/BLDPrinterLab/Project/PrinterManagement, for more details in the original format of the Standard Printer MIB.")
input_tray_min_media_xfeed_dim = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 1, 17), Integer32()).setLabel("input-tray-min-media-xfeed-dim").setMaxAccess("readonly")
if mibBuilder.loadTexts: input_tray_min_media_xfeed_dim.setStatus('optional')
if mibBuilder.loadTexts: input_tray_min_media_xfeed_dim.setDescription("The minimum physical media size across the feed direction of a particular input device expressed in units of measure specified by PrtInputMinMaxDimUnit. A value of (-1) implies 'unlimited', a value of (-2) implies 'unknown'. Additional information: The minimum physical media size across the feed direction of this input device expressed in units of measure specified by PrtInputDimUnit. A value of (-1) implies 'unlimited', a value of (-2) implies 'unknown'. Refer to Printer Management Standards web page, http://bldlabs.boi.hp.com/BLDPrinterLab/Project/PrinterManagement, for more details in the original format of the Standard Printer MIB.")
tray1_media_size_loaded = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 10, 11, 15, 17, 18, 19, 24, 25, 26, 27, 33, 34, 35, 44, 45, 46, 71, 72, 80, 81, 89, 90, 91, 100, 101, 120, 258, 282, 32764, 32765))).clone(namedValues=NamedValues(("eUSExecutive", 1), ("eUSLetter", 2), ("eUSLegal", 3), ("eFoolscap", 10), ("eLedger", 11), ("eMini", 15), ("eROC16K", 17), ("eJISExecutive", 18), ("eROC8K", 19), ("eISOandJISA6", 24), ("eISOandJISA5", 25), ("eISOandJISA4", 26), ("eISOandJISA3", 27), ("ePRC8K270X390", 33), ("ePRC16K195X270", 34), ("ePRC8K260X368", 35), ("eJISB6", 44), ("eJISB5", 45), ("eJISB4", 46), ("eJapansePostcardSingle", 71), ("eJapansePostcardDouble", 72), ("eMonarch", 80), ("eCommercial10", 81), ("ePRC16K184X260", 89), ("eInternationalDL", 90), ("eInternationalC5", 91), ("eInternationalB5", 100), ("eCustom", 101), ("eTabloidExtra", 120), ("eUSLetterR", 258), ("eISOandJISA4R", 282), ("eAnyCustomSize", 32764), ("eAnySize", 32765)))).setLabel("tray1-media-size-loaded").setMaxAccess("readwrite")
if mibBuilder.loadTexts: tray1_media_size_loaded.setStatus('optional')
if mibBuilder.loadTexts: tray1_media_size_loaded.setDescription("Returns the media size that is currently configuredconfigured in tray #1. This object can be set to indicate the media size currently loaded, if the printer supports input trays that can not sense the media size. Complete list of supported media sizes along with their dimensions are listed in the ''Media Size Table'' near the end of this document. Additional information: Returns the media size that is currently configuredconfigured in tray #1. This object can be set to indicate the media size currently loaded, if the printer supports input trays that can not sense the media size. Complete list of supported media sizes along with their dimensions are listed in the ''Media Size Table'' near the end of this document.")
tray1_phd = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 3, 1, 12), Integer32()).setLabel("tray1-phd").setMaxAccess("readonly")
if mibBuilder.loadTexts: tray1_phd.setStatus('optional')
if mibBuilder.loadTexts: tray1_phd.setDescription('Provides the number of the Paper Handling Device that contains this input tray. Additional information: Provides the number of the Paper Handling Device that contains this input tray.')
tray2_media_size_loaded = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 3, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 10, 11, 17, 18, 19, 25, 26, 27, 33, 34, 35, 45, 46, 89, 101, 258, 282, 32764, 32765))).clone(namedValues=NamedValues(("eUSExecutive", 1), ("eUSLetter", 2), ("eUSLegal", 3), ("eFoolscap", 10), ("eLedger", 11), ("eROC16K", 17), ("eJISExecutive", 18), ("eROC8K", 19), ("eISOandJISA5", 25), ("eISOandJISA4", 26), ("eISOandJISA3", 27), ("ePRC8K270X390", 33), ("ePRC16K195X270", 34), ("ePRC8K260X368", 35), ("eJISB5", 45), ("eJISB4", 46), ("ePRC16K184X260", 89), ("eCustom", 101), ("eUSLetterR", 258), ("eISOandJISA4R", 282), ("eAnyCustomSize", 32764), ("eAnySize", 32765)))).setLabel("tray2-media-size-loaded").setMaxAccess("readwrite")
if mibBuilder.loadTexts: tray2_media_size_loaded.setStatus('optional')
if mibBuilder.loadTexts: tray2_media_size_loaded.setDescription("Returns the media size that is currently configured in tray #2. Complete list of supported media sizes along with their dimensions are listed in the ''Media Size Table'' near the end of this document. Additional information: Returns the media size that is currently configured in tray #2.")
tray2_phd = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 3, 2, 12), Integer32()).setLabel("tray2-phd").setMaxAccess("readonly")
if mibBuilder.loadTexts: tray2_phd.setStatus('optional')
if mibBuilder.loadTexts: tray2_phd.setDescription('Provides the number of the Paper Handling Device that contains this input tray. Additional information: Provides the number of the Paper Handling Device that contains this input tray.')
tray3_media_size_loaded = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 3, 3, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 10, 11, 17, 18, 19, 25, 26, 27, 33, 34, 35, 45, 46, 89, 101, 258, 282, 32764, 32765))).clone(namedValues=NamedValues(("eUSExecutive", 1), ("eUSLetter", 2), ("eUSLegal", 3), ("eFoolscap", 10), ("eLedger", 11), ("eROC16K", 17), ("eJISExecutive", 18), ("eROC8K", 19), ("eISOandJISA5", 25), ("eISOandJISA4", 26), ("eISOandJISA3", 27), ("ePRC8K270X390", 33), ("ePRC16K195X270", 34), ("ePRC8K260X368", 35), ("eJISB5", 45), ("eJISB4", 46), ("ePRC16K184X260", 89), ("eCustom", 101), ("eUSLetterR", 258), ("eISOandJISA4R", 282), ("eAnyCustomSize", 32764), ("eAnySize", 32765)))).setLabel("tray3-media-size-loaded").setMaxAccess("readwrite")
if mibBuilder.loadTexts: tray3_media_size_loaded.setStatus('optional')
if mibBuilder.loadTexts: tray3_media_size_loaded.setDescription("Returns the media size that is currently configured in tray #3. Complete list of supported media sizes along with their dimensions are listed in the ''Media Size Table'' near the end of this document. Additional information: Returns the media size that is currently configured in tray #3.")
tray3_phd = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 3, 3, 3, 12), Integer32()).setLabel("tray3-phd").setMaxAccess("readonly")
if mibBuilder.loadTexts: tray3_phd.setStatus('optional')
if mibBuilder.loadTexts: tray3_phd.setDescription('Provides the number of the Paper Handling Device that contains this input tray. Additional information: Provides the number of the Paper Handling Device that contains this input tray.')
overflow_bin = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 4, 1, 4), Integer32()).setLabel("overflow-bin").setMaxAccess("readwrite")
if mibBuilder.loadTexts: overflow_bin.setStatus('optional')
if mibBuilder.loadTexts: overflow_bin.setDescription('Returns or sets the bin that will be used for additional sheets of media when the current bin is full and printing is allowed to continue. Additional information: Returns or sets the bin that will be used for additional sheets of media when the current bin is full and printing is allowed to continue. The data for this object is stored in NVRAM.')
outbin1_override_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 4, 3, 1, 9), OctetString()).setLabel("outbin1-override-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: outbin1_override_mode.setStatus('optional')
if mibBuilder.loadTexts: outbin1_override_mode.setDescription('Returns or sets the device behavior when this output bin condition causes printing to stop. cCancelJob - The device cancels the job. cOutbinFullOverride - The device sends subsequent media to the overflow bin. cOutbinAttentionOverride - The device ignores the attention condition and continues printing. cBinderAttentionOverride - The device ignores the binder attention condition and continues printing. Additional information: Returns or sets the device behavior when this output bin condition causes printing to stop. cCancelJob - The device cancels the job, regardless of other bit settings. cOutbinFullOverride - The device sends subsequent media to the overflow bin. If this bin is the overflow bin, this bit is ignored. cOutbinAttentionOverride - The device ignores the attention condition and continues printing. cBinderAttentionOverride - The device ignores the binder attention condition and continues printing. This object describes the action that is performed when the JOB-OUTPUT-AUTO-CONTINUE-TIMEOUT expires. If no bits are set, no override action is taken (the printer will continue to wait).')
media_names_available = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 1, 1), OctetString()).setLabel("media-names-available").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media_names_available.setStatus('optional')
if mibBuilder.loadTexts: media_names_available.setDescription('The value of this object controls which of the MEDIAx-NAME objects are supported. If a bit is set to zero, then attempting to get or set the corresponding MEDIAx-NAME objects will return <noSuchName>. Additional information: Setting a bit to one will cause the corresponding MEDIAn- objects to be available (attempting to access an unavailable object will return <noSuchName>). MEDIA1- objects are always present, as this is the default media. If this object is set to a value that does not include cMediaName2Available, that bit will be set and a status of <noError> will be returned.')
north_edge_offset = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 1, 2), Integer32()).setLabel("north-edge-offset").setMaxAccess("readwrite")
if mibBuilder.loadTexts: north_edge_offset.setStatus('optional')
if mibBuilder.loadTexts: north_edge_offset.setDescription('Returns or sets the number of 300 dpi dots by which the image is shifted. Shift is relative to the leading edge of the medium as the medium flows through the marking engine with the side to be imaged facing the observer. The leading edge is the North edge and the other edges are defined by the normal compass layout of directions with the compass facing the observer. The adjustment is for all pages printed. A positive value moves the image away from the leading edge of the medium. A negative value moves the image closer to the leading edge of the medium. The value 0 will return the image to its factory default position. Additional information: Returns or sets the number of 300 dpi dots by which the image is shifted. Shift is relative to the leading edge of the medium as the medium flows through the marking engine with the side to be imaged facing the observer. The leading edge is the North edge and the other edges are defined by the normal compass layout of directions with the compass facing the ob server. The adjustment is for all pages printed. A positive value moves the image away from the leading edge of the medium. A negative value moves the image closer to the leading edge of the medium. The value 0 will return the image to its factory default position. The value of this object is stored in NVRAM.')
media1_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media1-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media1_name.setStatus('optional')
if mibBuilder.loadTexts: media1_name.setDescription('Media 1 name. Additional information: The symbol set for this string is Roman-8.')
media1_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media1-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media1_short_name.setStatus('optional')
if mibBuilder.loadTexts: media1_short_name.setDescription("Length restricted version of the media name 1. The length restriction is required to allow the media name to be displayed on the device's control panel. The device POS must state the maximum number of characters allowed. If the device also has a limitation on what characters in a character set can be used (e.g. only uppercase characters allowed), the POS should also state character restrictions. Additional information: Length restricted version of the media name 1. The length restriction is required to allow the media name to be displayed on the device's control panel. The maximum supported string length is 9 characters. If the user entered string is too long, the device will store the first 9 characters and will return the <noError> status. The characters must be in the range 20H to 7FH except 5C cannot be used. The default symbol set is Roman-8 for English; additional legal symbol sets are ISOLatin5, ISOLatin2 and Windows31J. Setting this object with characters outside of the range or of an illegal symbol set will cause an error status of <badValue> to be returned. This string is localized according to prtConsoleLocalization. If this object represents a standard type, and the user attempts to set the object, 'OK Nearest Legal Value Substituted' will be returned, and the standard value is retained. If this object represents a user defined type, and the user attempts to set the object, then the set will be successfull.")
media1_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 1, 3), Integer32()).setLabel("media1-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media1_page_count.setStatus('optional')
if mibBuilder.loadTexts: media1_page_count.setDescription('Number of sheets of media 1 that have been printed. The device POS should state whether this value is lost across a power cycle or kept in NVRAM. Additional information: Number of sheets of media 1 that have been printed. This page count is saved in NVRAM after every 10 pages. The maximum value is 4,294,967,295 which will never be reached in normal operation. The page count is incremented when a sheet of media is pulled from an input tray. A duplex printed sheet will cause this counter to be incremented by one.')
media1_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 1, 4), Integer32()).setLabel("media1-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media1_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media1_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: Displays the engine processing characterstics that are applied to this media type.')
media2_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 2, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media2-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media2_name.setStatus('optional')
if mibBuilder.loadTexts: media2_name.setDescription('Media 2 name. Additional information: See MEDIA1-NAME.')
media2_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 2, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media2-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media2_short_name.setStatus('optional')
if mibBuilder.loadTexts: media2_short_name.setDescription('Length restricted version of the media name 2. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media2_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 2, 3), Integer32()).setLabel("media2-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media2_page_count.setStatus('optional')
if mibBuilder.loadTexts: media2_page_count.setDescription('Number of sheets of media 2 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media2_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 2, 4), Integer32()).setLabel("media2-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media2_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media2_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media3_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 3, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media3-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media3_name.setStatus('optional')
if mibBuilder.loadTexts: media3_name.setDescription('Media 3 name. Additional information: See MEDIA1-NAME.')
media3_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 3, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media3-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media3_short_name.setStatus('optional')
if mibBuilder.loadTexts: media3_short_name.setDescription('Length restricted version of the media name 3. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media3_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 3, 3), Integer32()).setLabel("media3-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media3_page_count.setStatus('optional')
if mibBuilder.loadTexts: media3_page_count.setDescription('Number of sheets of media 3 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media3_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 3, 4), Integer32()).setLabel("media3-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media3_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media3_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media4_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 4, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media4-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media4_name.setStatus('optional')
if mibBuilder.loadTexts: media4_name.setDescription('Media 4 name. Additional information: See MEDIA1-NAME.')
media4_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 4, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media4-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media4_short_name.setStatus('optional')
if mibBuilder.loadTexts: media4_short_name.setDescription('Length restricted version of the media name 4. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media4_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 4, 3), Integer32()).setLabel("media4-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media4_page_count.setStatus('optional')
if mibBuilder.loadTexts: media4_page_count.setDescription('Number of sheets of media 4 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media4_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 4, 4), Integer32()).setLabel("media4-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media4_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media4_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media5_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 5, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media5-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media5_name.setStatus('optional')
if mibBuilder.loadTexts: media5_name.setDescription('Media 5 name. Additional information: See MEDIA1-NAME.')
media5_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 5, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media5-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media5_short_name.setStatus('optional')
if mibBuilder.loadTexts: media5_short_name.setDescription('Length restricted version of the media name 5. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media5_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 5, 3), Integer32()).setLabel("media5-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media5_page_count.setStatus('optional')
if mibBuilder.loadTexts: media5_page_count.setDescription('Number of sheets of media 5 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media5_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 5, 4), Integer32()).setLabel("media5-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media5_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media5_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media6_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 6, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media6-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media6_name.setStatus('optional')
if mibBuilder.loadTexts: media6_name.setDescription('Media 6 name. Additional information: See MEDIA1-NAME.')
media6_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 6, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media6-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media6_short_name.setStatus('optional')
if mibBuilder.loadTexts: media6_short_name.setDescription('Length restricted version of the media name 6. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media6_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 6, 3), Integer32()).setLabel("media6-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media6_page_count.setStatus('optional')
if mibBuilder.loadTexts: media6_page_count.setDescription('Number of sheets of media 6 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media6_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 6, 4), Integer32()).setLabel("media6-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media6_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media6_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media7_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 7, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media7-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media7_name.setStatus('optional')
if mibBuilder.loadTexts: media7_name.setDescription('Media 7 name. Additional information: See MEDIA1-NAME.')
media7_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 7, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media7-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media7_short_name.setStatus('optional')
if mibBuilder.loadTexts: media7_short_name.setDescription('Length restricted version of the media name 7. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media7_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 7, 3), Integer32()).setLabel("media7-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media7_page_count.setStatus('optional')
if mibBuilder.loadTexts: media7_page_count.setDescription('Number of sheets of media 7 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media7_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 7, 4), Integer32()).setLabel("media7-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media7_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media7_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media8_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 8, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media8-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media8_name.setStatus('optional')
if mibBuilder.loadTexts: media8_name.setDescription('Media 8 name. Additional information: See MEDIA1-NAME.')
media8_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 8, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media8-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media8_short_name.setStatus('optional')
if mibBuilder.loadTexts: media8_short_name.setDescription('Length restricted version of the media name 8. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media8_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 8, 3), Integer32()).setLabel("media8-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media8_page_count.setStatus('optional')
if mibBuilder.loadTexts: media8_page_count.setDescription('Number of sheets of media 8 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media8_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 8, 4), Integer32()).setLabel("media8-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media8_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media8_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media9_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 9, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media9-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media9_name.setStatus('optional')
if mibBuilder.loadTexts: media9_name.setDescription('Media 9 name. Additional information: See MEDIA1-NAME.')
media9_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 9, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media9-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media9_short_name.setStatus('optional')
if mibBuilder.loadTexts: media9_short_name.setDescription('Length restricted version of the media name 9. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media9_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 9, 3), Integer32()).setLabel("media9-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media9_page_count.setStatus('optional')
if mibBuilder.loadTexts: media9_page_count.setDescription('Number of sheets of media 9 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media9_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 9, 4), Integer32()).setLabel("media9-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media9_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media9_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media10_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 10, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media10-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media10_name.setStatus('optional')
if mibBuilder.loadTexts: media10_name.setDescription('Media 10 name. Additional information: See MEDIA1-NAME.')
media10_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 10, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media10-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media10_short_name.setStatus('optional')
if mibBuilder.loadTexts: media10_short_name.setDescription('Length restricted version of the media name 10. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media10_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 10, 3), Integer32()).setLabel("media10-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media10_page_count.setStatus('optional')
if mibBuilder.loadTexts: media10_page_count.setDescription('Number of sheets of media 10 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media10_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 10, 4), Integer32()).setLabel("media10-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media10_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media10_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media11_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 11, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media11-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media11_name.setStatus('optional')
if mibBuilder.loadTexts: media11_name.setDescription('Media 11 name. Additional information: See MEDIA1-NAME.')
media11_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 11, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media11-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media11_short_name.setStatus('optional')
if mibBuilder.loadTexts: media11_short_name.setDescription('Length restricted version of the media name 11. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media11_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 11, 3), Integer32()).setLabel("media11-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media11_page_count.setStatus('optional')
if mibBuilder.loadTexts: media11_page_count.setDescription('Number of sheets of media 11 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media11_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 11, 4), Integer32()).setLabel("media11-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media11_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media11_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media12_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 12, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media12-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media12_name.setStatus('optional')
if mibBuilder.loadTexts: media12_name.setDescription('Media 12 name. Additional information: See MEDIA1-NAME.')
media12_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 12, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media12-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media12_short_name.setStatus('optional')
if mibBuilder.loadTexts: media12_short_name.setDescription('Length restricted version of the media name 12. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media12_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 12, 3), Integer32()).setLabel("media12-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media12_page_count.setStatus('optional')
if mibBuilder.loadTexts: media12_page_count.setDescription('Number of sheets of media 12 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media12_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 12, 4), Integer32()).setLabel("media12-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media12_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media12_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media13_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 13, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media13-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media13_name.setStatus('optional')
if mibBuilder.loadTexts: media13_name.setDescription('Media 13 name. Additional information: See MEDIA1-NAME.')
media13_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 13, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media13-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media13_short_name.setStatus('optional')
if mibBuilder.loadTexts: media13_short_name.setDescription('Length restricted version of the media name 13. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media13_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 13, 3), Integer32()).setLabel("media13-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media13_page_count.setStatus('optional')
if mibBuilder.loadTexts: media13_page_count.setDescription('Number of sheets of media 13 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media13_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 13, 4), Integer32()).setLabel("media13-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media13_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media13_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media14_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 14, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media14-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media14_name.setStatus('optional')
if mibBuilder.loadTexts: media14_name.setDescription('Media 14 name. Additional information: See MEDIA1-NAME.')
media14_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 14, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media14-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media14_short_name.setStatus('optional')
if mibBuilder.loadTexts: media14_short_name.setDescription('Length restricted version of the media name 14. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media14_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 14, 3), Integer32()).setLabel("media14-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media14_page_count.setStatus('optional')
if mibBuilder.loadTexts: media14_page_count.setDescription('Number of sheets of media 14 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media14_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 14, 4), Integer32()).setLabel("media14-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media14_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media14_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media15_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 15, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media15-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media15_name.setStatus('optional')
if mibBuilder.loadTexts: media15_name.setDescription('Media 15 name. Additional information: See MEDIA1-NAME.')
media15_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 15, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media15-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media15_short_name.setStatus('optional')
if mibBuilder.loadTexts: media15_short_name.setDescription('Length restricted version of the media name 15. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media15_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 15, 3), Integer32()).setLabel("media15-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media15_page_count.setStatus('optional')
if mibBuilder.loadTexts: media15_page_count.setDescription('Number of sheets of media 15 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media15_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 15, 4), Integer32()).setLabel("media15-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media15_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media15_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media16_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 16, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media16-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media16_name.setStatus('optional')
if mibBuilder.loadTexts: media16_name.setDescription('Media 16 name. Additional information: See MEDIA1-NAME.')
media16_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 16, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media16-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media16_short_name.setStatus('optional')
if mibBuilder.loadTexts: media16_short_name.setDescription('Length restricted version of the media name 16. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media16_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 16, 3), Integer32()).setLabel("media16-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media16_page_count.setStatus('optional')
if mibBuilder.loadTexts: media16_page_count.setDescription('Number of sheets of media 16 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media16_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 16, 4), Integer32()).setLabel("media16-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media16_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media16_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media17_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 17, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media17-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media17_name.setStatus('optional')
if mibBuilder.loadTexts: media17_name.setDescription('Media 17 name. Additional information: See MEDIA1-NAME.')
media17_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 17, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media17-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media17_short_name.setStatus('optional')
if mibBuilder.loadTexts: media17_short_name.setDescription('Length restricted version of the media name 17. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media17_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 17, 3), Integer32()).setLabel("media17-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media17_page_count.setStatus('optional')
if mibBuilder.loadTexts: media17_page_count.setDescription('Number of sheets of media 17 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media17_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 17, 4), Integer32()).setLabel("media17-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media17_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media17_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media18_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 18, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media18-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media18_name.setStatus('optional')
if mibBuilder.loadTexts: media18_name.setDescription('Media 18 name. Additional information: See MEDIA1-NAME.')
media18_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 18, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media18-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media18_short_name.setStatus('optional')
if mibBuilder.loadTexts: media18_short_name.setDescription('Length restricted version of the media name 18. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media18_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 18, 3), Integer32()).setLabel("media18-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media18_page_count.setStatus('optional')
if mibBuilder.loadTexts: media18_page_count.setDescription('Number of sheets of media 18 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media18_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 18, 4), Integer32()).setLabel("media18-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media18_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media18_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media19_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 19, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media19-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media19_name.setStatus('optional')
if mibBuilder.loadTexts: media19_name.setDescription('Media 19 name. Additional information: See MEDIA1-NAME.')
media19_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 19, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media19-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media19_short_name.setStatus('optional')
if mibBuilder.loadTexts: media19_short_name.setDescription('Length restricted version of the media name 19. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media19_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 19, 3), Integer32()).setLabel("media19-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media19_page_count.setStatus('optional')
if mibBuilder.loadTexts: media19_page_count.setDescription('Number of sheets of media 19 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media19_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 19, 4), Integer32()).setLabel("media19-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media19_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media19_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media20_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 20, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media20-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media20_name.setStatus('optional')
if mibBuilder.loadTexts: media20_name.setDescription('Media 20 name. Additional information: See MEDIA1-NAME.')
media20_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 20, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media20-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media20_short_name.setStatus('optional')
if mibBuilder.loadTexts: media20_short_name.setDescription('Length restricted version of the media name 20. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media20_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 20, 3), Integer32()).setLabel("media20-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media20_page_count.setStatus('optional')
if mibBuilder.loadTexts: media20_page_count.setDescription('Number of sheets of media 20 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media20_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 20, 4), Integer32()).setLabel("media20-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media20_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media20_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
media21_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 21, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 24))).setLabel("media21-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media21_name.setStatus('optional')
if mibBuilder.loadTexts: media21_name.setDescription('Media 21 name. Additional information: See MEDIA1-NAME.')
media21_short_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 21, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 9))).setLabel("media21-short-name").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media21_short_name.setStatus('optional')
if mibBuilder.loadTexts: media21_short_name.setDescription('Length restricted version of the media name 21. See MEDIA1-SHORT-NAME for details. Additional information: See MEDIA1-SHORT-NAME.')
media21_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 21, 3), Integer32()).setLabel("media21-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media21_page_count.setStatus('optional')
if mibBuilder.loadTexts: media21_page_count.setDescription('Number of sheets of media 21 that have been printed. See MEDIA1-PAGE-COUNT for details. Additional information: See MEDIA1-PAGE-COUNT.')
media21_engine_media_mode = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 3, 21, 4), Integer32()).setLabel("media21-engine-media-mode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media21_engine_media_mode.setStatus('optional')
if mibBuilder.loadTexts: media21_engine_media_mode.setDescription('The engine processing characteristics that are to be applied to this media. The processing characteristics are device specific. Additional information: See the description for MEDIA1-ENGINE-MEDIA-MODE.')
engine_media_modes_supported1 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 4, 1), OctetString()).setLabel("engine-media-modes-supported1").setMaxAccess("readonly")
if mibBuilder.loadTexts: engine_media_modes_supported1.setStatus('optional')
if mibBuilder.loadTexts: engine_media_modes_supported1.setDescription("The list of engine media modes supported by the device. The modes are each separated by a comma character. An example string would be: 'Normal,Rough,Low,Vellum'. Additional information: The list of engine media modes supported by the device. The modes are each separated by a comma character. An example string would be: 'Normal,Rough,Low,Vellum'.")
media_number_of_type_supported = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 8, 1), Integer32()).setLabel("media-number-of-type-supported").setMaxAccess("readonly")
if mibBuilder.loadTexts: media_number_of_type_supported.setStatus('optional')
if mibBuilder.loadTexts: media_number_of_type_supported.setDescription('Indicates the maximum number of supported media types. Additional information: Indicates the number of supported media type. This also indicates which bit in MEDIA-NAMES-AVAILABLE is significant')
non_assured_oht_page_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 7, 1), Integer32()).setLabel("non-assured-oht-page-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: non_assured_oht_page_count.setStatus('optional')
if mibBuilder.loadTexts: non_assured_oht_page_count.setDescription('This is a count of the number of invalid (non-HP Laser Jet) transparencies that have been printed on. This value is incremented every time an invalid OHT is printed on It is reset whenever the fuser count is set to 0. Additional information: This is a count of the number of invalid (non HP Laser Jet) transparencies that have been printed on. This value is incremented every time an invalid OHT is printed on. It is reset whenever the fuser count is set to 0.')
media_size_west_edge_first_side_offset = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 5, 2), Integer32()).setLabel("media-size-west-edge-first-side-offset").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media_size_west_edge_first_side_offset.setStatus('optional')
if mibBuilder.loadTexts: media_size_west_edge_first_side_offset.setDescription('Returns or sets the number of 300 dpi dots by which the image is shifted relative to the west edge of the medium (see NORTH-EDGE-OFFSET). The adjustment is for the first printed side of the medium only. A positive value moves the image away from the west edge of the medium. A negative value moves the image closer to the west edge of the medium. The value 0 will return the image to its factory default position. Additional information: Returns or sets the number of 300 dpi dots by which the image is shifted relative to the west edge of the medium (see NORTH-EDGE-OFFSET). The adjustment is for the first printed side of the medium only. A positive value moves the image away from the west edge of the medium. A negative value moves the image closer to the west edge of the medium. The value 0 will return the image to its factory default position. The values in the sub array index are from the media size table in the hpmib. This adjustment is done on a paper size by paper size basis. The standard PCL5 codes for paper size are used for the value used in the OID. Please see S_ARRAY_SUB1 for legal ')
media_size_west_edge_second_side_offset = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 5, 3), Integer32()).setLabel("media-size-west-edge-second-side-offset").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media_size_west_edge_second_side_offset.setStatus('optional')
if mibBuilder.loadTexts: media_size_west_edge_second_side_offset.setDescription('Returns or sets the number of 300 dpi dots by which the image is shifted relative to the west edge of the medium (see NORTH-EDGE-OFFSET). The adjustment is for the second printed side of the medium only. A positive value moves the image away from the west edge of the medium. A negative value moves the image closer to the west edge of the medium. The value 0 will return the image to its factory default position. Additional information: Returns or sets the number of 300 dpi dots by which the image is shifted relative to the west edge of the medium (see NORTH-EDGE-OFFSET). The adjustment is for the second printed side of the medium only. A positive value moves the image away from the west edge of the medium. A negative value moves the image closer to the west edge of the medium. The value 0 will return the image to its factory default position. The values in the sub array index are from the media size table in the hpmib. This adjustment is done on a paper size by paper size basis. The standard PCL5 codes for paper size are used for the value used in the OID. Please see S_ARRAY_SUB1 for legal values. The data for this object is stored in NVRAM.')
media_size_west_edge_side_offset_by_tray = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 5, 4), Integer32()).setLabel("media-size-west-edge-side-offset-by-tray").setMaxAccess("readwrite")
if mibBuilder.loadTexts: media_size_west_edge_side_offset_by_tray.setStatus('optional')
if mibBuilder.loadTexts: media_size_west_edge_side_offset_by_tray.setDescription('Returns or sets the number of 300 dpi dots by which the image is shifted relative to the west edge of the medium (see NORTH-EDGE-OFFSET). A positive value moves the image away from the west edge of the medium. A negative value moves the image closer to the west edge of the medium. The value 0 will return the image to its factory default position. Each OID binding corresponds to a valid tray number for this product. There can be a maximum of 15 trays, a combination of internal and external trays. Products that support other combinations of trays will specify this information in the device pos. The data for this object is stored in NVRAM. Additional information: Returns or sets the number of 300 dpi dots by which the image is shifted relative to the west edge of the medium (see NORTH-EDGE-OFFSET). A positive value moves the image away from the west edge of the medium. A negative value moves the image closer to the west edge of the medium. The value 0 will return the image to its factory default position. Each OID binding corresponds to a valid tray number for this product. There can be a maximum of 15 trays, a combination of internal and external trays. Products that support other combinations of trays will specify this information in the device pos. The data for this object is stored in NVRAM.')
printed_media_simplex_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 930576247))).setLabel("printed-media-simplex-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: printed_media_simplex_count.setStatus('optional')
if mibBuilder.loadTexts: printed_media_simplex_count.setDescription("Total number of simplex pages printed in this media size. Additional information: The 5 usage (simplex/duplex count, simplex/duplex click charge and paper size total) objects described here and below detail the usage for the printer for each paper size defined in the PCL Implementor's Guide and in the Media Size Table in the hpmib.txt. The OID binding is based on the click attribute and paper size. For example: The format for the OID is as follows: 3.4.1.5.x.y <-----> | | | | \\ / | paper size / \\ PRINTER-CLICK_TOTALS ROOT \\ OID click attribute 1..5 x values are 1..5: 1) simplex count 2) simplex click charge 3) duplex count 4) duplex click charge 5) printer or scanner paper size total (i.e. depends if the root OID is referrring to the printer or scanner). y : paper size as defined in the Media Size Table in the hpmib.txt ")
printed_media_simplex_charge = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 1, 2), OctetString()).setLabel("printed-media-simplex-charge").setMaxAccess("readwrite")
if mibBuilder.loadTexts: printed_media_simplex_charge.setStatus('optional')
if mibBuilder.loadTexts: printed_media_simplex_charge.setDescription('Charge for each simplex page printed in this media size.')
printed_media_duplex_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 930576247))).setLabel("printed-media-duplex-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: printed_media_duplex_count.setStatus('optional')
if mibBuilder.loadTexts: printed_media_duplex_count.setDescription('Total number of duplex pages printed in this media size.')
printed_media_duplex_charge = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 1, 4), OctetString()).setLabel("printed-media-duplex-charge").setMaxAccess("readwrite")
if mibBuilder.loadTexts: printed_media_duplex_charge.setStatus('optional')
if mibBuilder.loadTexts: printed_media_duplex_charge.setDescription('Charge for each duplex page printed in this media size. Additional information: The click charges for duplex printed media.')
printed_media_total_charge = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 1, 5), OctetString()).setLabel("printed-media-total-charge").setMaxAccess("readonly")
if mibBuilder.loadTexts: printed_media_total_charge.setStatus('optional')
if mibBuilder.loadTexts: printed_media_total_charge.setDescription('The total charge for pages printed in this media size.')
printed_media_maximum_pixels_per_page = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 1, 6), Integer32()).setLabel("printed-media-maximum-pixels-per-page").setMaxAccess("readonly")
if mibBuilder.loadTexts: printed_media_maximum_pixels_per_page.setStatus('optional')
if mibBuilder.loadTexts: printed_media_maximum_pixels_per_page.setDescription('The number of pixels required to completely fill a page of this media size. The device POS will specify the resolution at which this pixel count was calculated. Additional information: The number of pixels required to fill a page of a specific media size. The OID binding is the media size you to query for. Only media sizes which the printer supports will be available.')
printed_media_combined_total = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 1, 7), OctetString()).setLabel("printed-media-combined-total").setMaxAccess("readonly")
if mibBuilder.loadTexts: printed_media_combined_total.setStatus('optional')
if mibBuilder.loadTexts: printed_media_combined_total.setDescription('Total number of letter equivalently weighted pages both color and mono combined with this printer. Additional information: The combined total per page size of simplex and duplex color pages plus simplex and duplex mono pages.')
printed_media_dimplex_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 930576247))).setLabel("printed-media-dimplex-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: printed_media_dimplex_count.setStatus('optional')
if mibBuilder.loadTexts: printed_media_dimplex_count.setDescription('Total number of mono dimplex pages printed in this media size. A dimplex page is one that has been printed in duplex mode but the back side is blank. Dimplex pages occur when the printer firmware inserts a blank page in order to complete a duplexed job which is sent to the printer with an odd number of pages.')
usage_printer_total_charge = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 2), OctetString()).setLabel("usage-printer-total-charge").setMaxAccess("readonly")
if mibBuilder.loadTexts: usage_printer_total_charge.setStatus('optional')
if mibBuilder.loadTexts: usage_printer_total_charge.setDescription('Total printer charge for all paper sizes printed.')
usage_average_toner_coverage = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 3), OctetString()).setLabel("usage-average-toner-coverage").setMaxAccess("readonly")
if mibBuilder.loadTexts: usage_average_toner_coverage.setStatus('optional')
if mibBuilder.loadTexts: usage_average_toner_coverage.setDescription('Reports the average toner area coverage of all pages printed over the life of the device. Counts pixels on each page, divides pixel count by pixels possible on a page to give the percentage of coverage, and keeps a rolling average percent weighted by area printed.')
usage_staple_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 930576247))).setLabel("usage-staple-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: usage_staple_count.setStatus('optional')
if mibBuilder.loadTexts: usage_staple_count.setDescription('Total number of staples used.')
usage_instructions_line1 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setLabel("usage-instructions-line1").setMaxAccess("readwrite")
if mibBuilder.loadTexts: usage_instructions_line1.setStatus('optional')
if mibBuilder.loadTexts: usage_instructions_line1.setDescription('The first line of usage instructions for the device user.Appears on Line 1 of the usage page.')
usage_instructions_line2 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setLabel("usage-instructions-line2").setMaxAccess("readwrite")
if mibBuilder.loadTexts: usage_instructions_line2.setStatus('optional')
if mibBuilder.loadTexts: usage_instructions_line2.setDescription('The second line of usage instructions for the device user.Appears on Line 1 of the usage page.')
usage_instructions_line3 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setLabel("usage-instructions-line3").setMaxAccess("readwrite")
if mibBuilder.loadTexts: usage_instructions_line3.setStatus('optional')
if mibBuilder.loadTexts: usage_instructions_line3.setDescription('The third line of usage instructions for the device user.Appears on Line 1 of the usage page.')
usage_instructions_line4 = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 8), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setLabel("usage-instructions-line4").setMaxAccess("readwrite")
if mibBuilder.loadTexts: usage_instructions_line4.setStatus('optional')
if mibBuilder.loadTexts: usage_instructions_line4.setDescription('The fourth line of usage instructions for the device user.Appears on Line 1 of the usage page.')
printed_modes_usage_total = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 9), Integer32()).setLabel("printed-modes-usage-total").setMaxAccess("readonly")
if mibBuilder.loadTexts: printed_modes_usage_total.setStatus('optional')
if mibBuilder.loadTexts: printed_modes_usage_total.setDescription(' This object reports the total color and mono print modes usage for the life of the printer. This value is reported on the usage page.')
source_tray_usage_total = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 10), Integer32()).setLabel("source-tray-usage-total").setMaxAccess("readonly")
if mibBuilder.loadTexts: source_tray_usage_total.setStatus('optional')
if mibBuilder.loadTexts: source_tray_usage_total.setDescription('This object reports the total source tray usage for the life of the printer. This value is reported on the usage page.')
destination_bin_usage_total = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 1, 11), Integer32()).setLabel("destination-bin-usage-total").setMaxAccess("readonly")
if mibBuilder.loadTexts: destination_bin_usage_total.setStatus('optional')
if mibBuilder.loadTexts: destination_bin_usage_total.setDescription('This object reports the total destination bin usage for the life of the printer. This value is reported on the usage page.')
printed_modes_total_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 4, 1, 5), Integer32()).setLabel("printed-modes-total-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: printed_modes_total_count.setStatus('optional')
if mibBuilder.loadTexts: printed_modes_total_count.setDescription('The total count for pages printed in this fuser mode.')
source_tray_usage_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 5, 1, 1), Integer32()).setLabel("source-tray-usage-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: source_tray_usage_count.setStatus('optional')
if mibBuilder.loadTexts: source_tray_usage_count.setDescription('Total number of pages printed from this source tray. Additional information: This object will track how many images have been printed with the original source tray as one of the following: 1. Envelope Feeder 2. Manual Feed Tray 3. MP Tray 4. Tray 1 5. Tray 2 6. Tray 3 7. External Tray 8. Other')
destination_bin_usage_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 16, 6, 1, 1), Integer32()).setLabel("destination-bin-usage-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: destination_bin_usage_count.setStatus('optional')
if mibBuilder.loadTexts: destination_bin_usage_count.setDescription('Total number of pages printed to this destination bin. Additional information: This object will track how many images have been printed with the original destination bin as one of the following: 1. Face Down Bin 2. Face Up Bin 3. External Bin 4. Other')
low_marking_agent_processing = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 5, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("eStop", 1), ("eCont", 2)))).setLabel("low-marking-agent-processing").setMaxAccess("readwrite")
if mibBuilder.loadTexts: low_marking_agent_processing.setStatus('optional')
if mibBuilder.loadTexts: low_marking_agent_processing.setDescription('Returns or changes how the device processes a low marking agent event. If eCont, then the device continues to print. If eStop, then the device stops until a continue event occurs.')
out_marking_agent_processing = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 5, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("eStop", 1), ("eCont", 2)))).setLabel("out-marking-agent-processing").setMaxAccess("readwrite")
if mibBuilder.loadTexts: out_marking_agent_processing.setStatus('optional')
if mibBuilder.loadTexts: out_marking_agent_processing.setDescription('Returns or changes how the device processes an out marking agent event. If eCont, then the device continues to print. If eStop, then the device stops until a continue event occurs.')
media_size_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 8, 5, 1), Integer32()).setLabel("media-size-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: media_size_count.setStatus('optional')
if mibBuilder.loadTexts: media_size_count.setDescription("The number of impressions printed on sheets of this media size. The device POS should state whether this value is lost across a power cycle or is kept in permanent storage. Additional information: The value of this object is persistent across a power cycle. The index for these objects, the last number of the OID, uniquely identifies the paper size. This value corresponds to the page sizes listed below. These values are also documented in the PCL Implementor's Guide and the PML Master MIB. 1 US-Executive 2 US-Letter 3 US-Legal 15 Statement 10 Foolscap 17 ROC 16K 19 ROC 8K 33 8K 270x390MM 34 16K 195x270MM 35 8K 260x368MM 89 16K 184x260MM 18 JIS Executive 25 ISO and JIS A5 26 ISO and JIS A4 45 JIS B5 65 ISO B5 72 eJapanesePostcardDouble 80 Monarch 81 Commercal-10 90 International DL 91 International C5 100 International B5 101 Custom 32764 AnyCustomSize 32765 AnySize 32767 Unknown Paper Size")
consumable_status_cartridge_model = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 24))).setLabel("consumable-status-cartridge-model").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_cartridge_model.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_cartridge_model.setDescription('This object is used to read the cartridge model number associated with this consumable. Additional information: This object will only exist for Authentic HP consumables. If the cartridge is deemed to be NonHP, then this object will not exist.')
consumable_status_manufacturing_date = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 24))).setLabel("consumable-status-manufacturing-date").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_manufacturing_date.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_manufacturing_date.setDescription("This object is used to report the date on which this consumable was manufactured. The format of the string is 'YYYYMMDD', where YYYY is the year, MM is the month (1-12), and DD is the day (1-31). Additional information: This object will only exist for Authentic HP consumables. If the cartridge is deemed to be NonHP, then this object will not exist.")
consumable_status_serial_number = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 24))).setLabel("consumable-status-serial-number").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_serial_number.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_serial_number.setDescription('This object is used to report the serial number for this consumable. Additional information: This object will only exist for Authentic HP consumables. If the cartridge is deemed to be NonHP, then this object will not exist.')
consumable_status_capacity_units = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("ePagesAt5PercentCoverage", 1), ("e1KPagesAt5PercentCoverage", 2), ("e10KPagesAt5PercentCoverage", 3)))).setLabel("consumable-status-capacity-units").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_capacity_units.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_capacity_units.setDescription('This object is used to report the usage units used by the CONSUMABLE-STATUS-TOTAL-CAPACITY object. Additional information: This object will only exist on engines that are E-Label capable, but will exist on these engines regardless of the cartridge being Authentic HP or NonHP. This object can be used to ensure the capability of the E-Label feature for a given engine.')
consumable_status_total_capacity = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setLabel("consumable-status-total-capacity").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_total_capacity.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_total_capacity.setDescription('This object is used to report the total capacity of a new consumable of this type. The PML object CONSUMABLE-STATUS-CAPACITY-UNITS can be used to determine the units of measure for this PML object. Additional information: This object will only exist for Authentic HP consumables. If the cartridge is deemed to be NonHP, then this object will not exist.')
consumable_status_info = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 7), OctetString()).setLabel("consumable-status-info").setMaxAccess("readwrite")
if mibBuilder.loadTexts: consumable_status_info.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_info.setDescription('This object is used to read and write the various status flags supported by this consumable. For LaserJet 4100, LaserJet 4550 the collection bits supported include: cMarkedAsReordered - indicates the consumable has been reordered. This is the only bit that can be both read and written. cTonerLowCondition - indicates a toner low condition has occured. This bit in the collection is read-only to PML. cTonerOutCondition - indicates a toner out condition has occured. This bit in the collection is read-only to PML. cDeveloperLowCondition - indicates a developer low condition has occured. This bit in the collection is read-only to PML. cDeveloperOutCondition - indicates a developer out condition has occured. This bit in the collection is read-only to PML. cDrumLowCondition - indicates a drum low condition has occured. This bit in the collection is read-only to PML. cDrumOutCondition - indicates a drum out condition has occured. This bit in the collection is read-only to PML. cWasteTonerFullCondition - indicates a waste toner full condition has occured. This bit in the collection is read-only to PML. cWasteTonerFullWarningCondition - indicates a waste toner full warning condition has occured. This bit in the collection is read-only to PML. cNewConsumable - indicates the consumable is new and has never been used. This bit in the collection is read-only to PML. cTonerLowNotificationSent - inidicates that toner low notification has been sent for this consumable. This bit in the collection is readonly. cTonerOutNotificationSent - inidicates that toner out notification has been sent for this consumable. This bit in the collection is read only. cAnyPartLowCondition - indicates that at least one part of this consumable has reached a low condition. This bit in the collection is read-only to PML. cAnyPartOutCondition - indicates that at least one part of this consumable has reached an out condition. This bit in the collection is read-only to PML. Additional information: This object will only exist for Authentic HP consumables. If the cartridge is deemed to be NonHP, then this object will not exist.')
consumable_status_first_install_date = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 8), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 24))).setLabel("consumable-status-first-install-date").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_first_install_date.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_first_install_date.setDescription("This object is used to report the date on which this consumable was first installed. The format of the string is 'YYYYMMDD'. Where: YYYY is the year. MM is the month (1-12). DD is the day (1-31). The device POS needs to indicate what will be written in the event the printer does not have a real time clock. Additional information: This object will only exist for Authentic HP consumables. If the cartridge is deemed to be NonHP, then this object will not exist. For printers without internal clocks, the date will always be 20000101.")
consumable_status_last_use_date = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 9), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 24))).setLabel("consumable-status-last-use-date").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_last_use_date.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_last_use_date.setDescription("This object is used to report the date on which this consumable was last used. The format of the string is 'YYYYMMDD'. Where: YYYY is the year. MM is the month (1-12). DD is the day (1-31). The device POS needs to indicate what will be written in the event the printer does not have a real time clock. Additional information: This object will only exist for Authentic HP consumables. If the cartridge is deemed to be NonHP, then this object will not exist. For printers without internal clocks, the date will always be 20000101.")
consumable_status_page_count_a3_ledger = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setLabel("consumable-status-page-count-a3-ledger").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_page_count_a3_ledger.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_page_count_a3_ledger.setDescription('This object is used to report the number of pages that have been printed by this consumable that are of the specified media size - A3 or Ledger. Additional information: This object will only exist for Authentic HP consumables. If the cartridge is deemed to be NonHP, then this object will not exist.')
consumable_status_page_count_legal = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setLabel("consumable-status-page-count-legal").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_page_count_legal.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_page_count_legal.setDescription('This object is used to report the number of pages that have been printed by this consumable that are of the specified media size - Legal. Additional information: This object will only exist for Authentic HP consumables. If the cartridge is deemed to be NonHP, then this object will not exist.')
consumable_status_page_count_a4_letter = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setLabel("consumable-status-page-count-a4-letter").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_page_count_a4_letter.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_page_count_a4_letter.setDescription('This object is used to report the number of pages that have been printed by this consumable that are of the specified media size - A4 or Letter. Additional information: This object will only exist for Authentic HP consumables. If the cartridge is deemed to be NonHP, then this object will not exist.')
consumable_status_page_count_b5_executive = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setLabel("consumable-status-page-count-b5-executive").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_page_count_b5_executive.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_page_count_b5_executive.setDescription('This object is used to report the number of pages that have been printed by this consumable that are of the specified media size - B5 or Executive. Additional information: This object will only exist for Authentic HP consumables. If the cartridge is deemed to be NonHP, then this object will not exist.')
consumable_status_page_count_envelope = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setLabel("consumable-status-page-count-envelope").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_page_count_envelope.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_page_count_envelope.setDescription('This object is used to report the number of pages that have been printed by this consumable that are of the specified media size - Envelope. Additional information: This object will only exist for Authentic HP consumables. If the cartridge is deemed to be NonHP, then this object will not exist.')
consumable_status_page_count_xy_other = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setLabel("consumable-status-page-count-xy-other").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_page_count_xy_other.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_page_count_xy_other.setDescription('This object is used to report the number of pages that have been printed by this consumable that are of the specified media size - XY or Other. Additional information: This object will only exist for Authentic HP consumables. If the cartridge is deemed to be NonHP, then this object will not exist.')
consumable_status_job_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setLabel("consumable-status-job-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_job_count.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_job_count.setDescription('This object is used to report the number of jobs that have been printed by this consumable. Additional information: This object will only exist for Authentic HP consumables. If the cartridge is deemed to be NonHP, then this object will not exist.')
consumable_status_usage_units = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("ePixels", 1), ("eTenthsOfGrams", 2), ("eGrams", 3), ("eRotations", 4), ("ePages", 5), ("eImpressions", 6), ("ePercentLifeRemaining", 7), ("eOther", 8)))).setLabel("consumable-status-usage-units").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_usage_units.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_usage_units.setDescription('This object is used to report the units used to measure the capacity of this consumable. Additional information: This object will only exist on engines that are E-Label capable, but will exist on these engines regardless of the cartridge being Authentic HP or NonHP. This object can be used to ensure the capability of the E-Label feature for a given engine.')
consumable_status_usage_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 18), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setLabel("consumable-status-usage-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_usage_count.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_usage_count.setDescription('This object is used to report the number of usage units that remain in this consumable. The units of measurement used by this object can be obtained by querying the CONSUMABLE-STATUS-USAGE-UNITS object. Additional information: This object will only exist for Authentic HP consumables. If the cartridge is deemed to be NonHP, then this object will not exist.')
consumable_status_manufacturer_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 19), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 8))).setLabel("consumable-status-manufacturer-name").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_manufacturer_name.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_manufacturer_name.setDescription('This object is used to report the name of the manufacturer of this consumable. The device POS will specify the set of manufacturers that could be returned. Additional information: This object will only exist for Authentic HP consumables. If the cartridge is deemed to be NonHP, then this object will not exist.')
consumable_status_oem_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 20), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 8))).setLabel("consumable-status-oem-name").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_oem_name.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_oem_name.setDescription("This object is used to report the name of the OEM of this consumable. This object will typically return the string 'HP'. The device POS will specify if other OEM consumables are supported. Additional information: This object will only exist for Authentic HP consumables. If the cartridge is deemed to be NonHP, then this object will not exist. This object should always return 'HP' as the OEM name")
consumable_status_engine_usage_units = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("ePixels", 1), ("eTenthsOfGrams", 2), ("eGrams", 3), ("eRotations", 4), ("ePages", 5), ("eImpressions", 6), ("ePercentLifeRemaining", 7), ("eOther", 8)))).setLabel("consumable-status-engine-usage-units").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_engine_usage_units.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_engine_usage_units.setDescription('This object is used to report the units used to measure the capacity of this consumable as reported by the Engine. Additional information: This object will only exist on engines that are E-Label capable, but will exist on these engines regardless of the cartridge being Authentic HP or NonHP. This object can be used to ensure the capability of the E-Label feature for a given engine.')
consumable_status_engine_usage_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setLabel("consumable-status-engine-usage-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_engine_usage_count.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_engine_usage_count.setDescription('This object is used to report the number of usage units, estimated by the engine, that remain in this consumable. The units of measurement used by this object can be obtained by querying the CONSUMABLE-STATUS-ENGINE-USAGE-UNITS object. Additional information: This object will only exist for Authentic HP consumables. If the cartridge is deemed to be NonHP, then this object will not exist.')
consumable_status_drum_life_units = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 38), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("ePixels", 1), ("eTenthsOfGrams", 2), ("eGrams", 3), ("eRotations", 4), ("ePages", 5), ("eImpressions", 6), ("ePercentLifeRemaining", 7), ("eOther", 8)))).setLabel("consumable-status-drum-life-units").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_drum_life_units.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_drum_life_units.setDescription('This object is used to report the units used to measure the drum life remaining. Additional information: This object will only exist on engines that are E-Label capable, but will exist on these engines regardless of the cartridge being Authentic HP or NonHP. This object can be used to ensure the capability of the E-Label feature for a given engine.')
consumable_status_drum_life = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 39), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setLabel("consumable-status-drum-life").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_drum_life.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_drum_life.setDescription('This object is used to report the number of life units, remaining in the drum. The units of measurement used by this object can be obtained by querying the CONSUMABLE- STATUS-DRUM-LIFE-UNITS object. This object will not exist if this consumable does not contain a drum. Additional information: This object will only exist on engines that are E-Label capable, but will exist on these engines regardless of the cartridge being Authentic HP or NonHP. This object can be used to ensure the capability of the E-Label feature for a given engine.')
consumable_status_authentication = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 40), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("eAuthLevel1", 1), ("eAuthLevel2", 2), ("eAuthLevel3", 3)))).setLabel("consumable-status-authentication").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_status_authentication.setStatus('optional')
if mibBuilder.loadTexts: consumable_status_authentication.setDescription('This object returns the authentication level of the elabel on the given supply. The elabel authentication is returned via the appropriate enum. Additional information: There are three levels of authentication of an ELabel cartridge supported. Each ELabel cartridges level of authentication is provided here, via this enumeration.')
consumable_drum_life_level = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 1, 1, 77), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("eDrumLifeLevel1", 1), ("eDrumLifeLevel2", 2), ("eDrumLifeLevel3", 3), ("eDrumLifeLevel4", 4)))).setLabel("consumable-drum-life-level").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_drum_life_level.setStatus('optional')
if mibBuilder.loadTexts: consumable_drum_life_level.setDescription('This object is used to get the Drum Life Level of the Drum. This object is for testing purposes only and is controlled by the <<hidden>> object. Additional information: This object returns the drum life level depending on the life phases that are defined for the TECM feature. The Drum Life phase are defined are follows: eDrumLifeLevel1 : 0 to 24% eDrumLifeLevel2 : 25 to 57% eDrumLifeLevel3 : 58 to 91% eDrumLifeLevel4 : 92 to 100% This object is always supported and will return the DRUM LIFE LEVEL set depending on the percentage value of drum life remaining.')
consumable_string_information = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 8, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 287))).setLabel("consumable-string-information").setMaxAccess("readwrite")
if mibBuilder.loadTexts: consumable_string_information.setStatus('optional')
if mibBuilder.loadTexts: consumable_string_information.setDescription('This object is used to read and write the string value that describes the consumable information.')
consumable_string_information_reset = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 8, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("ePresetToNVRAM", 1)))).setLabel("consumable-string-information-reset").setMaxAccess("readwrite")
if mibBuilder.loadTexts: consumable_string_information_reset.setStatus('optional')
if mibBuilder.loadTexts: consumable_string_information_reset.setDescription('This object is used to reset (set back to factory default) or read the current status of the corresponding information string. When Set to eResetToDefault, this object can be used to reset the corresponding information object back to its factory default value. Setting this object with an enumerated value of eCustomized has no effect. When a Get operation is performed on this object it will return a value eResetToDefault if still set to its factory value. It will return eCustomized if the corresponding information value has been set to a custom value. Additional information: This object returns ePresetToNVRAM(1) if CONSUMABLE-STRING-INFORMATION is currently set to the default string. It will return eCustomized(2) otherwise. However, we can explicitly set this object only to ePresetToNVRAM(1) and not eCustomized(2).')
consumable_reorder_url = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setLabel("consumable-reorder-url").setMaxAccess("readwrite")
if mibBuilder.loadTexts: consumable_reorder_url.setStatus('optional')
if mibBuilder.loadTexts: consumable_reorder_url.setDescription('This object is used to read and write the URL that can be used to reorder consumables for this device. This URL is set at the factory but can be updated by a reseller or third party. Additional information: The URL can be up to 64 characters long.')
consumable_current_state = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 7), OctetString()).setLabel("consumable-current-state").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_current_state.setStatus('optional')
if mibBuilder.loadTexts: consumable_current_state.setDescription('This PML object returns the current state of the particular consumable. cAuthLevel1 - Consumable is at Authentication Level 1 cAuthLevel2 - Consumable is at Authentication Level 2 cAuthLevel3 - Consumable is at Authentication Level 3 cGenuineHPUnsupported - Cartridge is GenuineHP intended for another product cDefectiveMemory - Cartridge has a defective memory tag cMissingMemory - Memory tag is missing from the cartridge cLowCondition - Consumable has reached the engine low threshold cOutCondition - Consumable has reached its out threshold cIncorrect - Cartridge inserted is not the correct one cMissing - Consumable is missing from the printer cConfigurableLow - Consumable has reached the configurable low threshold value cStatusArevalid - The status reported on other bits are valid only if the bit is set to 1. If it is 0, the values are invalid. These are the possible states and whenever the consumable is in any of these states, the appropriate bit will be set. The cLowCondition will be set when the consumable reaches the engine low threshold, and cConfigurableLow will be set when the consumable reaches the Configurable Low threshold value. For non-cartridge supplies only cLowCondition, cOutCondition, and cMissing will be supported. Additional information: This object returns the current state of the particular consuma ble. cAuthLevel1 - Consumable is at Authentication Level 1 cAuthLevel2 - Consumable is at Authentication Level 2 cAuthLevel3 - Consumable is at Authentication Level 3 cGenuineHPUnsupported - Cartridge is GenuineHP intended for ano ther product cDefectiveMemory - Cartridge has a Defective Memory cMissingMemory - Memory is Missing from the Cartridge cLowCondition - Consumable has reached its low threshold cOutCondition - Consumable has reached its out threshold cIncorrect - Cartridge inserted is not the correct one cMissing - Consumable is Missing from the Printer cStatusAreValid - The Status reported on other bits are valid o nly if this bit is set to 1. If it is 0, the values are invalid. These are the possible states and whenever a Consumable is in a ny of these states, the appropriate bit will be set. For Non Cartridge Supp lies only cLowCondition, cOutCondition and cMissing will be supported.')
consumable_life_usage_units_remaining = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 5, 1, 1), Integer32()).setLabel("consumable-life-usage-units-remaining").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_life_usage_units_remaining.setStatus('optional')
if mibBuilder.loadTexts: consumable_life_usage_units_remaining.setDescription('This object reports the current estimate of the number of usage units that remain before this supply is depleted. An installed supply that cannot report such a number will return a value of -2. It is the reponsibility of the host application to query each supply in order to determine an overall device USAGE-UNITS-REMAINING number - the lowest value returned. The unit of measure for this life estimate is determined by reading the corresponding CONSUMABLE-LIFE-USAGE-UNITS object. Additional information: This object will return the Pages or Estimated Pages remaining for the speciffic supply requested by the leaf node of this object.')
consumable_life_usage_units = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 5, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ePagesRemaining", 1), ("eEstimatedPagesRemaining", 2)))).setLabel("consumable-life-usage-units").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_life_usage_units.setStatus('optional')
if mibBuilder.loadTexts: consumable_life_usage_units.setDescription('This object reports current usage units in use by the corresponding CONSUMABLE-LIFE-USAGE-UNITS-REMAINING object. Additional information: This object returns the units that CONSUMABLE-LIFE-USAGE-UNITS-REMAINING is returned in. Either ePagesRemaining(1) or eEstimatedPagesRemaining(2).')
consumable_life_low_threshold = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 10, 5, 1, 3), Integer32()).setLabel("consumable-life-low-threshold").setMaxAccess("readonly")
if mibBuilder.loadTexts: consumable_life_low_threshold.setStatus('optional')
if mibBuilder.loadTexts: consumable_life_low_threshold.setDescription('This object is used to report and modify a threshold value indicating the point in the life of a consumable or supply at which a transition to a LOW state will occur.')
printer_average_marking_agent_coverage = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 11, 1, 1), OctetString()).setLabel("printer-average-marking-agent-coverage").setMaxAccess("readonly")
if mibBuilder.loadTexts: printer_average_marking_agent_coverage.setStatus('optional')
if mibBuilder.loadTexts: printer_average_marking_agent_coverage.setDescription('This object is used to read the average marking agent coverage for a given color plane. It is a real number that represents percentage full and will read from 0 to 100%. This object obsoletes or replaces USAGE-AVERAGE-TONER-COVERAGE which could only be used for black toner coverage information. Additional information: The average percent of toner coverage for a specific color plane. The OID binding is the color plane. Only color planes which the printer supports will be available. This is only available if a hard disk is installed.')
printer_average_marking_agent_coverage_sum = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 11, 1, 2), OctetString()).setLabel("printer-average-marking-agent-coverage-sum").setMaxAccess("readonly")
if mibBuilder.loadTexts: printer_average_marking_agent_coverage_sum.setStatus('optional')
if mibBuilder.loadTexts: printer_average_marking_agent_coverage_sum.setDescription('This object is used to read the accumulated sum of the percent coverage numbers calculated on a per page basis over the life of the printer. Additional information: The sum of the percent toner coverages for all impressions printed. The OID binding is the color plane. Only color planes which the printer supports will be available.')
printer_average_marking_agent_coverage_sum_squared = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 11, 1, 3), OctetString()).setLabel("printer-average-marking-agent-coverage-sum-squared").setMaxAccess("readonly")
if mibBuilder.loadTexts: printer_average_marking_agent_coverage_sum_squared.setStatus('optional')
if mibBuilder.loadTexts: printer_average_marking_agent_coverage_sum_squared.setDescription('This object is used to read the accumulated sum of the squares of the percent coverage numbers calculated on a per page basis over the life of the printer. Additional information: The sum of the percent toner coverages squared for all impressions printed. The OID binding is the color plane. Only color planes which the printer supports will be available.')
printer_average_marking_agent_units_per_gram = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 11, 1, 4), OctetString()).setLabel("printer-average-marking-agent-units-per-gram").setMaxAccess("readonly")
if mibBuilder.loadTexts: printer_average_marking_agent_units_per_gram.setStatus('optional')
if mibBuilder.loadTexts: printer_average_marking_agent_units_per_gram.setDescription('This object is used to read the marking agent units of measure per gram. This is typically a conversion factor for converting pixels of toner to grams. This pixels-per-gram value is used to calculate the estimated number of pages remaining for a given marking agent.')
printer_average_marking_agent_coverage_actual = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 4, 1, 11, 1, 5), OctetString()).setLabel("printer-average-marking-agent-coverage-actual").setMaxAccess("readonly")
if mibBuilder.loadTexts: printer_average_marking_agent_coverage_actual.setStatus('optional')
if mibBuilder.loadTexts: printer_average_marking_agent_coverage_actual.setDescription('This object is used to read the actual average marking agent coverage for a given color plane. It is a real number that represents percent coverage and will read from 0 to 100%. This object will return the same value as PRINTER-AVERAGE-MARKING-AGENT-COVERAGE except under certain conditions which will be specified in the device POS. Additional information: This object will return the historical page coverage based on the specific supply requested. It is not based on the printers historical value')
web_server_security = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 3, 9, 1, 3), OctetString()).setLabel("web-server-security").setMaxAccess("readwrite")
if mibBuilder.loadTexts: web_server_security.setStatus('optional')
if mibBuilder.loadTexts: web_server_security.setDescription('Each collection bit represents a device or Embedded Web Server feature that can be enabled or disabled via this object.')
firmware_download_write_status_supported = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 18, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("eFalse", 1), ("eTrue", 2)))).setLabel("firmware-download-write-status-supported").setMaxAccess("readonly")
if mibBuilder.loadTexts: firmware_download_write_status_supported.setStatus('optional')
if mibBuilder.loadTexts: firmware_download_write_status_supported.setDescription('This object provides information on whether the printer has the ability to communicate the write-status of the firmware download while the download is taking place.')
firmware_download_write_time = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 18, 2), Integer32()).setLabel("firmware-download-write-time").setMaxAccess("readonly")
if mibBuilder.loadTexts: firmware_download_write_time.setStatus('optional')
if mibBuilder.loadTexts: firmware_download_write_time.setDescription('If the design of the firmware-download implementation does not allow PML interaction during the download process, this value provides an estimation of the time where the printer will not be able to engage in PML communication. Additional information: If the design of the firmware-download implementation does not allow PML interaction during the download process, this value provides an estimation of the time where the printer will not be able to engage in PML communication. The calculated time is expected to be 140 secs. However, this time may increase as high as 280 secs as the flash part nears the maximum flash count.')
firmware_download_write_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 18, 3), Integer32()).setLabel("firmware-download-write-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: firmware_download_write_count.setStatus('optional')
if mibBuilder.loadTexts: firmware_download_write_count.setDescription('Tells the number of times that firmware has been downloaded to the flash part. Additional information: Tells the number of times that firmware has been downloaded to the flash part. The default value will vary depending on how many times the firmware is rolled before shipping.')
firmware_download_current_state = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 18, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("eIdle", 1), ("eReceivingImage", 2), ("eReceivedImageError", 3), ("eVerifyingImage", 4), ("eVerifiedImageError", 5), ("eWritingImage", 6), ("eWritingImageError", 7), ("eDownloadComplete", 8), ("eOKtoShutDown", 9), ("eCancelDownload", 10), ("eShuttingDown", 11)))).setLabel("firmware-download-current-state").setMaxAccess("readwrite")
if mibBuilder.loadTexts: firmware_download_current_state.setStatus('optional')
if mibBuilder.loadTexts: firmware_download_current_state.setDescription('Provides the current or last reportable state of the firmware download process. The current state may not necessarily be the current state, but could be the post-mortem state.')
firmware_download_maximum_write_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 18, 5), Integer32()).setLabel("firmware-download-maximum-write-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: firmware_download_maximum_write_count.setStatus('optional')
if mibBuilder.loadTexts: firmware_download_maximum_write_count.setDescription('Reports the manufacturer specified number of times that firmware can be downloaded to the flash part. A value of -1 means that there is no limit. A value of 0 means that downloading firmware is not permitted by this part. Any other positive integer value corresponds to the number of times that firmware can be downloaded to the flash part. Additional information: Reports the manufacturer specified number of times that firmware can be downloaded to the flash part. A value of -1 means that there is no limit. A value of 0 means that downloading firmware is not permitted by this part. Any other positive integer value corresponds to the number of times that firmware can be downloaded to the flash part. The current maximum write count is 500. This may change as flash technology evolves.')
firmware_download_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 18, 6), OctetString()).setLabel("firmware-download-name").setMaxAccess("readonly")
if mibBuilder.loadTexts: firmware_download_name.setStatus('mandatory')
if mibBuilder.loadTexts: firmware_download_name.setDescription('This returns the name of the printer. This should match the name in the header of the upgrade image being sent to the flash part. If the name does not match with the name returned then the image that we are attempting to download does not upgrade the printer firmware. Additional information: This object returns the HP name of the printer. This should match what is in the PJL header of the RFU job Web Jet Admin uses this to ensure the printer is available to be upgraded.')
firmware_download_version = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 18, 7), OctetString()).setLabel("firmware-download-version").setMaxAccess("readonly")
if mibBuilder.loadTexts: firmware_download_version.setStatus('mandatory')
if mibBuilder.loadTexts: firmware_download_version.setDescription('This object will return a string value representing the current revision of firmware that the printer is operating with. This is used to determine if code needs to be upgraded when an firmware bundle comes in, in an upgrade job. Additional information: Web Jet Admin uses the version string that is returned to determine what peices of an RFU bundle need to be upgraded when an RFU job is being built. This objects will return the version string of the printer.')
upgradable_devices_write_status_supported = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 20, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("eFalse", 1), ("eTrue", 2)))).setLabel("upgradable-devices-write-status-supported").setMaxAccess("readonly")
if mibBuilder.loadTexts: upgradable_devices_write_status_supported.setStatus('optional')
if mibBuilder.loadTexts: upgradable_devices_write_status_supported.setDescription('This object provides information on whether the upgradable device has the ability to communicate the write-status of the upgrade while the upgrade is taking place.')
upgradable_devices_write_time = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 20, 2), Integer32()).setLabel("upgradable-devices-write-time").setMaxAccess("readonly")
if mibBuilder.loadTexts: upgradable_devices_write_time.setStatus('optional')
if mibBuilder.loadTexts: upgradable_devices_write_time.setDescription('If the design of the device upgrade implementation does not allow PML interaction during the download process, this value provides an estimation of the time where the device will not be able to engage in PML communication. The time returned will depend upon what device is attempting to be upgraded.')
upgradable_devices_write_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 20, 3), Integer32()).setLabel("upgradable-devices-write-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: upgradable_devices_write_count.setStatus('optional')
if mibBuilder.loadTexts: upgradable_devices_write_count.setDescription('Tells the number of times that firmware for device X has been downloaded to the flash part. The default value will vary depending on how many times the firmware is rolled before shipping.')
upgradable_devices_current_state = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 20, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("eIdle", 1), ("eReceivedImage", 2), ("eReceivedImageError", 3), ("eVerifiedImage", 4), ("eVerifiedImageError", 5), ("eWritingImage", 6), ("eWritingImageError", 7), ("eUpgradeComplete", 8)))).setLabel("upgradable-devices-current-state").setMaxAccess("readonly")
if mibBuilder.loadTexts: upgradable_devices_current_state.setStatus('optional')
if mibBuilder.loadTexts: upgradable_devices_current_state.setDescription('Provides the current or last reportable state of the device upgrade process. The current state may not necessarily be the current state, but could be the post-mortem state.')
upgradable_devices_max_write_count = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 20, 5), Integer32()).setLabel("upgradable-devices-max-write-count").setMaxAccess("readonly")
if mibBuilder.loadTexts: upgradable_devices_max_write_count.setStatus('optional')
if mibBuilder.loadTexts: upgradable_devices_max_write_count.setDescription('Reports the manufacturer specified number of times that firmware for device X can be downloaded to the flash part. A value of -1 means that there is no limit. A value of 0 means that downloading firmware is not permitted by this part. Any other positive integer value corresponds to the number of times that the firmware for device X can be downloaded to the flash part. The current maximum write count is 500. This may change as flash technology evolves.')
upgradable_devices_name = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 20, 6), OctetString()).setLabel("upgradable-devices-name").setMaxAccess("readonly")
if mibBuilder.loadTexts: upgradable_devices_name.setStatus('mandatory')
if mibBuilder.loadTexts: upgradable_devices_name.setDescription('This returns the name of the upgradable device. This should match the name in the header of the upgrade image being sent to the flash part. If the name does not match with any of the names returned then the device that we are attempting to upgrade is unavailable for upgrading. Additional information: This object returns the HP name of the printer and the upgradable devices. There is one Name object per upgradable device. This should match what is in the PJL header of the RFU job Web Jet Admin uses this to ensure the printer or device is available to be upgraded.')
upgradable_devices_version = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 20, 7), OctetString()).setLabel("upgradable-devices-version").setMaxAccess("readonly")
if mibBuilder.loadTexts: upgradable_devices_version.setStatus('mandatory')
if mibBuilder.loadTexts: upgradable_devices_version.setDescription('This object will return a string value representing the current revision of firmware that device X is operating with. This is used to determine if code needs to be upgraded when a firmware bundle comes in, in an upgrade job. Additional information: Web Jet Admin uses the version string that is returned to determine what peices of an RFU bundle need to be upgraded when an RFU job is being built. This internal object will return the current version string of the printer or device that it corresponds to.')
remote_upgrade_enable = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 3, 9, 4, 2, 1, 1, 20, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("eOff", 1), ("eOn", 2)))).setLabel("remote-upgrade-enable").setMaxAccess("readwrite")
if mibBuilder.loadTexts: remote_upgrade_enable.setStatus('mandatory')
if mibBuilder.loadTexts: remote_upgrade_enable.setDescription('Enables or disables the ability to upgrade the firmware associated with any device that is available to be upgraded. This will be set to on or off at the RFU level in that if this mode is on any available device can be upgraded and if this mode is off none of the devices can be upgraded. There is not an individual mode for each device. Trying to set this to a value other than eOn or eOff will cause an UnsupportedValue error to occur.')
mibBuilder.exportSymbols("LJ5200-MIB", hold_job_timeout=hold_job_timeout, media4_short_name=media4_short_name, error39_code=error39_code, job_info_attr_10=job_info_attr_10, tray3_media_size_loaded=tray3_media_size_loaded, mio1_type=mio1_type, media3_name=media3_name, held_job_print=held_job_print, media10_name=media10_name, media12_short_name=media12_short_name, error33_time_stamp=error33_time_stamp, job_info_accounting_media_simplex_count=job_info_accounting_media_simplex_count, media_number_of_type_supported=media_number_of_type_supported, consumables_1=consumables_1, error45_code=error45_code, error45_time_stamp=error45_time_stamp, simm2_capacity=simm2_capacity, ram_disk_size=ram_disk_size, default_custom_paper_feed_dim=default_custom_paper_feed_dim, media_size=media_size, error7_time_stamp=error7_time_stamp, consumable_status_info=consumable_status_info, usage_instructions_line2=usage_instructions_line2, media11=media11, current_job_parsing_id=current_job_parsing_id, ram_disk_mode=ram_disk_mode, firmware_download_name=firmware_download_name, file_system_set_system_partition_readonly=file_system_set_system_partition_readonly, consumable_status_drum_life_units=consumable_status_drum_life_units, error5_code=error5_code, model_number=model_number, media9_page_count=media9_page_count, media11_engine_media_mode=media11_engine_media_mode, background_status_msg_line2_part1=background_status_msg_line2_part1, outbin=outbin, processing_subsystem=processing_subsystem, media5_short_name=media5_short_name, printed_modes_usage_total=printed_modes_usage_total, simm2_bank1_type=simm2_bank1_type, error8_code=error8_code, duplex_page_count=duplex_page_count, media16_short_name=media16_short_name, error4_time_stamp=error4_time_stamp, error23_time_stamp=error23_time_stamp, firmware_download_write_status_supported=firmware_download_write_status_supported, media14_page_count=media14_page_count, mio4_model_name=mio4_model_name, consumable_life_usage_units_remaining=consumable_life_usage_units_remaining, error21_code=error21_code, simm4_bank1_capacity=simm4_bank1_capacity, job_info_page_count_current_original=job_info_page_count_current_original, simm5_bank=simm5_bank, simm1_bank1_type=simm1_bank1_type, error17_time_stamp=error17_time_stamp, media12=media12, error35=error35, media3_short_name=media3_short_name, held_job_user_name=held_job_user_name, non_assured_oht_page_count=non_assured_oht_page_count, job_info_attr_4=job_info_attr_4, spooler=spooler, intray2=intray2, default_copies=default_copies, media6_short_name=media6_short_name, upgradable_devices_version=upgradable_devices_version, media12_engine_media_mode=media12_engine_media_mode, held_job_job_name=held_job_job_name, error7_code=error7_code, job_info_pages_printed=job_info_pages_printed, menus=menus, usage_printer_total_charge=usage_printer_total_charge, job_input_auto_continue_timeout=job_input_auto_continue_timeout, media19_name=media19_name, control_panel_display_contents_crc=control_panel_display_contents_crc, self_test=self_test, media9_name=media9_name, error49=error49, error33=error33, job_info_accounting_media_size=job_info_accounting_media_size, default_page_protect=default_page_protect, job=job, simm3_capacity=simm3_capacity, media12_name=media12_name, mass_storage_resource_change_counter=mass_storage_resource_change_counter, simm1_bank1=simm1_bank1, usage_instructions_line3=usage_instructions_line3, tray1_media_size_loaded=tray1_media_size_loaded, media3_engine_media_mode=media3_engine_media_mode, job_info_attr_8=job_info_attr_8, error36_code=error36_code, media1_page_count=media1_page_count, consumable_status_page_count_legal=consumable_status_page_count_legal, error37=error37, simm2_bank2_capacity=simm2_bank2_capacity, file_system_set_system_partition_writeable=file_system_set_system_partition_writeable, intray1=intray1, file_system_max_open_files=file_system_max_open_files, media8_name=media8_name, status_mass_storage_bd=status_mass_storage_bd, default_vmi=default_vmi, consumable_status_page_count_envelope=consumable_status_page_count_envelope, printer_average_marking_agent_coverage=printer_average_marking_agent_coverage, job_info_accounting_grayscale_impression_count=job_info_accounting_grayscale_impression_count, media4=media4, error40_time_stamp=error40_time_stamp, job_info_accounting_cyan_dots=job_info_accounting_cyan_dots, display=display, simm2_bank2_type=simm2_bank2_type, error1_code=error1_code, job_info_attr_16=job_info_attr_16, simm1=simm1, socket_ping=socket_ping, error20=error20, error38_code=error38_code, outbin1=outbin1, error32_code=error32_code, error42_code=error42_code, simm1_bank2_capacity=simm1_bank2_capacity, error46_code=error46_code, media18_engine_media_mode=media18_engine_media_mode, media5_name=media5_name, job_info_accounting_media_type=job_info_accounting_media_type, printer_accounting=printer_accounting, firmware_download_write_time=firmware_download_write_time, media5_engine_media_mode=media5_engine_media_mode, clearable_warning=clearable_warning, error21_time_stamp=error21_time_stamp, printed_media_duplex_count=printed_media_duplex_count, upgradable_devices_write_time=upgradable_devices_write_time, error44=error44, media6_page_count=media6_page_count, control_panel_button_press=control_panel_button_press, job_info_outcome=job_info_outcome, channel=channel, status_pdl=status_pdl, media14_name=media14_name, error27_code=error27_code, simm3_bank2=simm3_bank2, model_name=model_name, localization_languages_supported=localization_languages_supported, error31_code=error31_code, error46_time_stamp=error46_time_stamp, printer_average=printer_average, simm4_bank2_capacity=simm4_bank2_capacity, job_info_outbins_used=job_info_outbins_used, timestamp=timestamp, pcl_default_font_number=pcl_default_font_number, error14=error14, pcl_total_page_count=pcl_total_page_count, error37_code=error37_code, media7=media7, job_info_accounting_yellow_dots=job_info_accounting_yellow_dots, tray2_media_size_loaded=tray2_media_size_loaded, tray3_phd=tray3_phd, media7_name=media7_name, port1_parallel_speed=port1_parallel_speed, tables=tables, background_message1=background_message1, default_media_size=default_media_size, host_application_available_memory=host_application_available_memory, error5=error5, fw_rom_revision=fw_rom_revision, media19_engine_media_mode=media19_engine_media_mode, error40_code=error40_code, usage_instructions_line4=usage_instructions_line4, source_tray_accounting=source_tray_accounting, held_job_security=held_job_security, formatter_serial_number=formatter_serial_number, firmware_download_version=firmware_download_version, print_engine_revision=print_engine_revision, job_info_attr_14=job_info_attr_14, simm4_type=simm4_type, job_output_auto_continue_timeout=job_output_auto_continue_timeout, error4=error4, default_horizontal_black_resolution=default_horizontal_black_resolution, media_info=media_info, error33_code=error33_code, job_info_state=job_info_state, simm=simm, simm2_bank=simm2_bank, error30_time_stamp=error30_time_stamp, default_media_name=default_media_name, media20=media20, media_size_west_edge_side_offset_by_tray=media_size_west_edge_side_offset_by_tray, background_message=background_message, socket_ping_job_events_version=socket_ping_job_events_version, error10_code=error10_code, job_info_attr_1=job_info_attr_1, error41=error41, sleep_mode=sleep_mode, error10_time_stamp=error10_time_stamp, background_message2=background_message2, error39_time_stamp=error39_time_stamp, media9_short_name=media9_short_name, printer_average_marking_agent_coverage_sum_squared=printer_average_marking_agent_coverage_sum_squared, postscript_total_page_count=postscript_total_page_count, date_display=date_display, error24_code=error24_code, settings_job=settings_job, default_custom_paper_xfeed_dim=default_custom_paper_xfeed_dim, media5_page_count=media5_page_count, media14_engine_media_mode=media14_engine_media_mode, job_info_physical_outbins_used=job_info_physical_outbins_used, fw_rom_datecode=fw_rom_datecode, consumable_reorder_url=consumable_reorder_url, printer_average_marking_agent_units_per_gram=printer_average_marking_agent_units_per_gram, error24=error24, media10_engine_media_mode=media10_engine_media_mode, DisplayString=DisplayString, upgradable_devices_max_write_count=upgradable_devices_max_write_count, source_tray_usage_total=source_tray_usage_total, settings_pdl=settings_pdl, reprint=reprint, error41_time_stamp=error41_time_stamp, printer_average_marking_agent_coverage_actual=printer_average_marking_agent_coverage_actual, media16_name=media16_name, media6_engine_media_mode=media6_engine_media_mode, consumable_status_page_count_a3_ledger=consumable_status_page_count_a3_ledger, job_info_attr_11=job_info_attr_11, media21_engine_media_mode=media21_engine_media_mode, simm2=simm2, consumable_status_authentication=consumable_status_authentication, media_modes=media_modes, media9_engine_media_mode=media9_engine_media_mode, error_log_clear=error_log_clear, media19_page_count=media19_page_count, error22_time_stamp=error22_time_stamp, job_info_io_source=job_info_io_source, job_info_attr_9=job_info_attr_9, error23_code=error23_code, control_panel_display=control_panel_display, simm5_bank1_type=simm5_bank1_type, input_tray_min_media_xfeed_dim=input_tray_min_media_xfeed_dim, error14_time_stamp=error14_time_stamp, error13_code=error13_code, consumable_status=consumable_status, job_info_attribute=job_info_attribute, settings_io=settings_io, default_bits_per_pixel=default_bits_per_pixel, held_job_info=held_job_info, io_switch=io_switch, tray1_phd=tray1_phd, job_info_attr_12=job_info_attr_12, error11_code=error11_code, destination_bin_usage_total=destination_bin_usage_total, resource_manager=resource_manager, settings_rpc=settings_rpc, error43_code=error43_code, outbins=outbins, consumable_current_state=consumable_current_state, default_vertical_black_resolution=default_vertical_black_resolution, outbin1_override_mode=outbin1_override_mode, errorlog=errorlog, media11_page_count=media11_page_count)
mibBuilder.exportSymbols("LJ5200-MIB", error47=error47, control_panel_display_contents_change_counter=control_panel_display_contents_change_counter, job_info_accounting_finishing_options=job_info_accounting_finishing_options, device_configure=device_configure, perm_store_init_occurred=perm_store_init_occurred, media6=media6, error26_code=error26_code, out_marking_agent_processing=out_marking_agent_processing, auto_continue=auto_continue, media17_engine_media_mode=media17_engine_media_mode, marking_agent_density=marking_agent_density, error44_code=error44_code, error22=error22, error15_time_stamp=error15_time_stamp, date_and_time=date_and_time, printed_media_dimplex_count=printed_media_dimplex_count, webserver_proc_sub=webserver_proc_sub, error3_time_stamp=error3_time_stamp, job_info_accounting_color_impression_count=job_info_accounting_color_impression_count, error6_time_stamp=error6_time_stamp, error35_time_stamp=error35_time_stamp, destination_bin_usage_count=destination_bin_usage_count, error29=error29, file_system4=file_system4, media1_engine_media_mode=media1_engine_media_mode, media4_engine_media_mode=media4_engine_media_mode, error14_code=error14_code, consumable_status_first_install_date=consumable_status_first_install_date, mio=mio, localization_countries_supported=localization_countries_supported, job_info_accounting_job_type=job_info_accounting_job_type, simm2_type=simm2_type, error27=error27, job_info_pages_in_original=job_info_pages_in_original, usage_instructions_line1=usage_instructions_line1, error9=error9, error17=error17, error42=error42, media17=media17, media13_engine_media_mode=media13_engine_media_mode, source_subsystem=source_subsystem, upgradable_devices_name=upgradable_devices_name, media16_page_count=media16_page_count, media5=media5, error6=error6, rpc_bind_protocol_address=rpc_bind_protocol_address, media13_short_name=media13_short_name, error25_time_stamp=error25_time_stamp, error26_time_stamp=error26_time_stamp, job_info_attr_2=job_info_attr_2, job_info_requested_originals=job_info_requested_originals, consumable_status_capacity_units=consumable_status_capacity_units, ports=ports, pcl_default_font_source=pcl_default_font_source, consumable_status_total_capacity=consumable_status_total_capacity, pml=pml, settings_system=settings_system, settings_prt_eng=settings_prt_eng, error12_time_stamp=error12_time_stamp, settings_marking_agent=settings_marking_agent, consumable_status_serial_number=consumable_status_serial_number, media3=media3, simm1_bank=simm1_bank, simm1_bank2_type=simm1_bank2_type, intray=intray, intrays=intrays, held_job_delete=held_job_delete, input_tray_auto_select=input_tray_auto_select, error31_time_stamp=error31_time_stamp, remote_upgrade_enable=remote_upgrade_enable, printed_media_usage=printed_media_usage, mio1_manufacturing_info=mio1_manufacturing_info, mio4_type=mio4_type, media21_name=media21_name, mio4_manufacturing_info=mio4_manufacturing_info, error42_time_stamp=error42_time_stamp, upgradable_devices_write_status_supported=upgradable_devices_write_status_supported, error28_code=error28_code, settings_file_system=settings_file_system, error18_code=error18_code, error45=error45, error34=error34, consumable_status_oem_name=consumable_status_oem_name, media10=media10, error23=error23, usage_average_toner_coverage=usage_average_toner_coverage, media13_page_count=media13_page_count, energy_star=energy_star, error1_time_stamp=error1_time_stamp, error39=error39, error11=error11, media19_short_name=media19_short_name, test=test, pysmi_continue=pysmi_continue, device_location=device_location, printed_media_simplex_count=printed_media_simplex_count, consumable_status_job_count=consumable_status_job_count, media12_page_count=media12_page_count, simm3=simm3, error9_code=error9_code, job_info_attr_3=job_info_attr_3, usage_staple_count=usage_staple_count, error43=error43, display_status=display_status, io=io, error11_time_stamp=error11_time_stamp, overflow_bin=overflow_bin, intray3=intray3, error15=error15, job_info_change_id=job_info_change_id, printed_modes_total_count=printed_modes_total_count, media18_page_count=media18_page_count, simm3_bank1_type=simm3_bank1_type, error34_code=error34_code, consumable_status_engine_usage_units=consumable_status_engine_usage_units, input_tray_max_media_feed_dim=input_tray_max_media_feed_dim, error28=error28, job_info=job_info, consumables_status=consumables_status, error50_time_stamp=error50_time_stamp, held_job_quantity=held_job_quantity, custom_paper_feed_dim=custom_paper_feed_dim, error2_time_stamp=error2_time_stamp, error22_code=error22_code, media1=media1, destination_subsystem=destination_subsystem, simm1_type=simm1_type, tray2_phd=tray2_phd, media17_short_name=media17_short_name, media14=media14, simm3_bank2_capacity=simm3_bank2_capacity, media21_short_name=media21_short_name, simm5_bank2_capacity=simm5_bank2_capacity, error38=error38, print_meter=print_meter, error2_code=error2_code, control_panel_display_graphical_contents=control_panel_display_graphical_contents, error37_time_stamp=error37_time_stamp, port1_parallel_bidirectionality=port1_parallel_bidirectionality, upgradable_devices=upgradable_devices, simm3_bank2_type=simm3_bank2_type, media2_engine_media_mode=media2_engine_media_mode, simm4_capacity=simm4_capacity, simm3_bank1=simm3_bank1, held_job_enable=held_job_enable, media9=media9, consumable_string=consumable_string, error38_time_stamp=error38_time_stamp, printed_media_duplex_charge=printed_media_duplex_charge, low_marking_agent_processing=low_marking_agent_processing, operating_system=operating_system, media_counts=media_counts, job_info_attr_6=job_info_attr_6, firmware_download_current_state=firmware_download_current_state, mass_storage_resources=mass_storage_resources, interface=interface, error32=error32, settings_mass_storage_bd=settings_mass_storage_bd, printed_media_combined_total=printed_media_combined_total, custom_paper_xfeed_dim=custom_paper_xfeed_dim, rpc_bound_protocol_address=rpc_bound_protocol_address, simm5_bank1_capacity=simm5_bank1_capacity, media15_name=media15_name, file_system2=file_system2, error15_code=error15_code, error29_time_stamp=error29_time_stamp, media17_page_count=media17_page_count, job_info_name1=job_info_name1, media_size_count=media_size_count, file_system=file_system, simm3_bank1_capacity=simm3_bank1_capacity, media15_short_name=media15_short_name, error16=error16, simm5_bank2=simm5_bank2, media16_engine_media_mode=media16_engine_media_mode, postscript_print_errors=postscript_print_errors, error48_time_stamp=error48_time_stamp, default_lines_per_page=default_lines_per_page, on_off_line=on_off_line, printed_media_simplex_charge=printed_media_simplex_charge, simm5_capacity=simm5_capacity, media1_name=media1_name, maximum_ram_disk_memory=maximum_ram_disk_memory, consumable_status_manufacturer_name=consumable_status_manufacturer_name, printed_media_maximum_pixels_per_page=printed_media_maximum_pixels_per_page, job_info_stage=job_info_stage, printer_average_marking_agent_coverage_sum=printer_average_marking_agent_coverage_sum, simm4=simm4, error40=error40, file_systems=file_systems, default_custom_paper_dim_unit=default_custom_paper_dim_unit, error47_time_stamp=error47_time_stamp, media18=media18, mio1_model_name=mio1_model_name, media8=media8, error21=error21, held_job=held_job, media2_name=media2_name, job_info_attr_7=job_info_attr_7, job_info_accounting_magenta_dots=job_info_accounting_magenta_dots, media15_engine_media_mode=media15_engine_media_mode, mass_storage_block_driver=mass_storage_block_driver, error4_code=error4_code, error12_code=error12_code, error49_time_stamp=error49_time_stamp, active_print_jobs=active_print_jobs, marking_agent_density_setting=marking_agent_density_setting, os_execute_file=os_execute_file, error43_time_stamp=error43_time_stamp, device_name=device_name, media11_short_name=media11_short_name, media8_page_count=media8_page_count, held_job_set_queue_size=held_job_set_queue_size, settings_outbin=settings_outbin, simm4_bank=simm4_bank, error28_time_stamp=error28_time_stamp, simm4_bank1=simm4_bank1, error30=error30, media1_short_name=media1_short_name, media15=media15, media3_page_count=media3_page_count, media15_page_count=media15_page_count, settings_intray=settings_intray, time_display=time_display, error27_time_stamp=error27_time_stamp, consumable_status_manufacturing_date=consumable_status_manufacturing_date, consumable_string_information=consumable_string_information, source_tray_usage=source_tray_usage, error17_code=error17_code, media4_page_count=media4_page_count, media_size_west_edge_second_side_offset=media_size_west_edge_second_side_offset, file_system3_initialize_volume=file_system3_initialize_volume, consumables_life=consumables_life, job_info_accounting_media_duplex_count=job_info_accounting_media_duplex_count, engine_self_diagnostic=engine_self_diagnostic, media20_engine_media_mode=media20_engine_media_mode, error13_time_stamp=error13_time_stamp, firmware_download_maximum_write_count=firmware_download_maximum_write_count, simm5_bank2_type=simm5_bank2_type, port1=port1, error16_code=error16_code, error32_time_stamp=error32_time_stamp, device_configure_printer_parameters=device_configure_printer_parameters, mass_storage_resource_changed=mass_storage_resource_changed, media20_page_count=media20_page_count, north_edge_offset=north_edge_offset, device=device, pdl=pdl, id=id, print_engine=print_engine, web_server_security=web_server_security, io_timeout=io_timeout, channelnumberofchannels=channelnumberofchannels, media2_page_count=media2_page_count)
mibBuilder.exportSymbols("LJ5200-MIB", input_tray_min_media_feed_dim=input_tray_min_media_feed_dim, simm3_bank=simm3_bank, channelprinteralert=channelprinteralert, consumable_drum_life_level=consumable_drum_life_level, error24_time_stamp=error24_time_stamp, simm4_bank2=simm4_bank2, media_types=media_types, simm4_bank1_type=simm4_bank1_type, show_address=show_address, media17_name=media17_name, settings_spooler=settings_spooler, media20_name=media20_name, status_system=status_system, error35_code=error35_code, error29_code=error29_code, held_job_pin=held_job_pin, error20_time_stamp=error20_time_stamp, error8_time_stamp=error8_time_stamp, consumable_string_information_reset=consumable_string_information_reset, consumable_status_engine_usage_count=consumable_status_engine_usage_count, file_system_delete_files=file_system_delete_files, error1=error1, job_being_parsed=job_being_parsed, media7_engine_media_mode=media7_engine_media_mode, firmware_download_write_count=firmware_download_write_count, settings_webserver=settings_webserver, media21_page_count=media21_page_count, error18_time_stamp=error18_time_stamp, error8=error8, media6_name=media6_name, page_frame_memory_available=page_frame_memory_available, error7=error7, error31=error31, pdl_pcl=pdl_pcl, simm5_bank1=simm5_bank1, job_input_auto_continue_mode=job_input_auto_continue_mode, error34_time_stamp=error34_time_stamp, media16=media16, media10_short_name=media10_short_name, pdl_postscript=pdl_postscript, job_info_attr_15=job_info_attr_15, job_info_printed_originals=job_info_printed_originals, error6_code=error6_code, printed_media_total_charge=printed_media_total_charge, error5_time_stamp=error5_time_stamp, error19_code=error19_code, job_info_accounting=job_info_accounting, simm5_type=simm5_type, media_names_available=media_names_available, error50=error50, held_job_retention=held_job_retention, status_prt_eng=status_prt_eng, media18_name=media18_name, input_tray_max_media_xfeed_dim=input_tray_max_media_xfeed_dim, media2=media2, media14_short_name=media14_short_name, accounting=accounting, mopy_mode=mopy_mode, print_internal_page=print_internal_page, service_id=service_id, remote_procedure_call=remote_procedure_call, consumable_life_usage_units=consumable_life_usage_units, settings_print_media=settings_print_media, media20_short_name=media20_short_name, error48_code=error48_code, media_size_west_edge_first_side_offset=media_size_west_edge_first_side_offset, netPMLmgmt=netPMLmgmt, upgradable_devices_current_state=upgradable_devices_current_state, override_media_name=override_media_name, error3=error3, error47_code=error47_code, media7_short_name=media7_short_name, mio1=mio1, simm4_bank2_type=simm4_bank2_type, marking_agent=marking_agent, consumables=consumables, simm1_capacity=simm1_capacity, file_system3=file_system3, error46=error46, cancel_job=cancel_job, consumable_status_cartridge_model=consumable_status_cartridge_model, error9_time_stamp=error9_time_stamp, asset_number=asset_number, engine_media_modes_supported1=engine_media_modes_supported1, media13=media13, destination_bin_usage=destination_bin_usage, consumable_status_page_count_xy_other=consumable_status_page_count_xy_other, file_system4_initialize_volume=file_system4_initialize_volume, simm1_bank1_capacity=simm1_bank1_capacity, install_date=install_date, media18_short_name=media18_short_name, held_job_control=held_job_control, error49_code=error49_code, destination_bin_accounting=destination_bin_accounting, media10_page_count=media10_page_count, source_tray_usage_count=source_tray_usage_count, error26=error26, error13=error13, error20_code=error20_code, media7_page_count=media7_page_count, print_density=print_density, form_feed=form_feed, pcl_default_font_width=pcl_default_font_width, media4_name=media4_name, media8_short_name=media8_short_name, error2=error2, job_info_attr_5=job_info_attr_5, media13_name=media13_name, printed_modes_accounting=printed_modes_accounting, pcl_default_font_height=pcl_default_font_height, simm2_bank1_capacity=simm2_bank1_capacity, printed_modes_usage=printed_modes_usage, media11_name=media11_name, error12=error12, error30_code=error30_code, serial_number=serial_number, job_info_size=job_info_size, consumable_life_low_threshold=consumable_life_low_threshold, error19=error19, job_info_accounting_black_dots=job_info_accounting_black_dots, error25=error25, pjl=pjl, override_media_size=override_media_size, consumable_status_page_count_b5_executive=consumable_status_page_count_b5_executive, error19_time_stamp=error19_time_stamp, simm2_bank2=simm2_bank2, error50_code=error50_code, job_info_name2=job_info_name2, file_system2_initialize_volume=file_system2_initialize_volume, error44_time_stamp=error44_time_stamp, consumable_status_last_use_date=consumable_status_last_use_date, error41_code=error41_code, media19=media19, mio4=mio4, job_info_attr_13=job_info_attr_13, simm2_bank1=simm2_bank1, form_feed_needed=form_feed_needed, error48=error48, consumable_status_usage_units=consumable_status_usage_units, job_info_pages_processed=job_info_pages_processed, error36=error36, collated_originals_support=collated_originals_support, status_rpc=status_rpc, upgradable_devices_write_count=upgradable_devices_write_count, simm1_bank2=simm1_bank2, consumable_status_usage_count=consumable_status_usage_count, consumable_status_page_count_a4_letter=consumable_status_page_count_a4_letter, hp=hp, media21=media21, simm3_type=simm3_type, error16_time_stamp=error16_time_stamp, cold_reset_media_size=cold_reset_media_size, firmware_download=firmware_download, error36_time_stamp=error36_time_stamp, device_system=device_system, error10=error10, print_media=print_media, simm5=simm5, consumable_status_drum_life=consumable_status_drum_life, media8_engine_media_mode=media8_engine_media_mode, error18=error18, background_status_msg_line1_part1=background_status_msg_line1_part1, media2_short_name=media2_short_name, error25_code=error25_code, error3_code=error3_code)
| 239.682987 | 10,042 | 0.772772 |
ace34ee7ebe917a8b71983cb38792fdfac9cec52 | 6,974 | py | Python | asposewordscloud/models/requests/insert_run_online_request.py | aspose-words-cloud/aspose-words-cloud-python | 65c7b55fa4aac69b60d41e7f54aed231df285479 | [
"MIT"
] | 14 | 2018-07-15T17:01:52.000Z | 2018-11-29T06:15:33.000Z | asposewordscloud/models/requests/insert_run_online_request.py | aspose-words-cloud/aspose-words-cloud-python | 65c7b55fa4aac69b60d41e7f54aed231df285479 | [
"MIT"
] | 1 | 2018-09-28T12:59:34.000Z | 2019-10-08T08:42:59.000Z | asposewordscloud/models/requests/insert_run_online_request.py | aspose-words-cloud/aspose-words-cloud-python | 65c7b55fa4aac69b60d41e7f54aed231df285479 | [
"MIT"
] | 2 | 2020-12-21T07:59:17.000Z | 2022-02-16T21:41:25.000Z | # coding: utf-8
# -----------------------------------------------------------------------------------
# <copyright company="Aspose" file="insert_run_online_request.py">
# Copyright (c) 2021 Aspose.Words for Cloud
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# -----------------------------------------------------------------------------------
import json
from six.moves.urllib.parse import quote
from asposewordscloud import *
from asposewordscloud.models import *
from asposewordscloud.models.requests import *
from asposewordscloud.models.responses import *
class InsertRunOnlineRequest(BaseRequestObject):
"""
Request model for insert_run_online operation.
Initializes a new instance.
:param document The document.
:param paragraph_path The path to the paragraph in the document tree.
:param run Run data.
:param load_encoding Encoding that will be used to load an HTML (or TXT) document if the encoding is not specified in HTML.
:param password Password for opening an encrypted document.
:param dest_file_name Result path of the document after the operation. If this parameter is omitted then result of the operation will be saved as the source document.
:param revision_author Initials of the author to use for revisions.If you set this parameter and then make some changes to the document programmatically, save the document and later open the document in MS Word you will see these changes as revisions.
:param revision_date_time The date and time to use for revisions.
:param insert_before_node The index of the node. A new Run object will be inserted before the node with the specified node Id.
"""
def __init__(self, document, paragraph_path, run, load_encoding=None, password=None, dest_file_name=None, revision_author=None, revision_date_time=None, insert_before_node=None):
self.document = document
self.paragraph_path = paragraph_path
self.run = run
self.load_encoding = load_encoding
self.password = password
self.dest_file_name = dest_file_name
self.revision_author = revision_author
self.revision_date_time = revision_date_time
self.insert_before_node = insert_before_node
def create_http_request(self, api_client):
# verify the required parameter 'document' is set
if self.document is None:
raise ValueError("Missing the required parameter `document` when calling `insert_run_online`") # noqa: E501
# verify the required parameter 'paragraph_path' is set
if self.paragraph_path is None:
raise ValueError("Missing the required parameter `paragraph_path` when calling `insert_run_online`") # noqa: E501
# verify the required parameter 'run' is set
if self.run is None:
raise ValueError("Missing the required parameter `run` when calling `insert_run_online`") # noqa: E501
path = '/v4.0/words/online/post/{paragraphPath}/runs'
path_params = {}
if self.paragraph_path is not None:
path_params['paragraphPath'] = self.paragraph_path # noqa: E501
else:
path_params['paragraphPath'] = '' # noqa: E501
# path parameters
collection_formats = {}
if path_params:
path_params = api_client.sanitize_for_serialization(path_params)
path_params = api_client.parameters_to_tuples(path_params, collection_formats)
for k, v in path_params:
# specified safe chars, encode everything
path = path.replace(
'{%s}' % k,
quote(str(v), safe=api_client.configuration.safe_chars_for_path_param)
)
# remove optional path parameters
path = path.replace('//', '/')
query_params = []
if self.load_encoding is not None:
query_params.append(('loadEncoding', self.load_encoding)) # noqa: E501
if self.password is not None:
query_params.append(('password', self.password)) # noqa: E501
if self.dest_file_name is not None:
query_params.append(('destFileName', self.dest_file_name)) # noqa: E501
if self.revision_author is not None:
query_params.append(('revisionAuthor', self.revision_author)) # noqa: E501
if self.revision_date_time is not None:
query_params.append(('revisionDateTime', self.revision_date_time)) # noqa: E501
if self.insert_before_node is not None:
query_params.append(('insertBeforeNode', self.insert_before_node)) # noqa: E501
header_params = {}
# HTTP header `Content-Type`
header_params['Content-Type'] = api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
form_params = []
if self.document is not None:
form_params.append(['document', self.document, 'file']) # noqa: E501
if self.run is not None:
form_params.append(['run', self.run.to_json(), 'string']) # noqa: E501
body_params = None
return {
"method": "PUT",
"path": path,
"query_params": query_params,
"header_params": header_params,
"form_params": form_params,
"body": body_params,
"collection_formats": collection_formats,
"response_type": 'InsertRunOnlineResponse' # noqa: E501
}
def get_response_type(self):
return 'InsertRunOnlineResponse' # noqa: E501
def deserialize_response(self, api_client, response):
multipart = self.getparts(response)
return InsertRunOnlineResponse(
self.deserialize(json.loads(multipart[0].text), RunResponse, api_client),
self.deserialize_file(multipart[1].content, multipart[1].headers, api_client))
| 50.536232 | 255 | 0.67006 |
ace34f09fb8ce056843c00fe658ed94941ca4e69 | 4,327 | py | Python | nlphug/docsimilarity.py | readall/mlgitpod | 91e388f32f4a461153094cfbf1cfdbab1f7eb199 | [
"Apache-2.0"
] | 1 | 2021-07-13T07:13:32.000Z | 2021-07-13T07:13:32.000Z | nlphug/docsimilarity.py | readall/mlgitpod | 91e388f32f4a461153094cfbf1cfdbab1f7eb199 | [
"Apache-2.0"
] | null | null | null | nlphug/docsimilarity.py | readall/mlgitpod | 91e388f32f4a461153094cfbf1cfdbab1f7eb199 | [
"Apache-2.0"
] | null | null | null | import pandas as pd
import numpy as np
import nltk
nltk.download('stopwords', download_dir='/workspace/conda/hugface/nltk_data')
from nltk.corpus import stopwords
import re
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.metrics.pairwise import euclidean_distances
from sentence_transformers import SentenceTransformer
from transformers import AutoTokenizer, AutoModel
import torch
# import docx2txt
# # extract text
# text = docx2txt.process("file.docx")
# nltk.download('stopwords', download_dir='/workspace/data/nltk/')
documents = ['Machine learning is the study of computer algorithms that improve automatically through experience.\
Machine learning algorithms build a mathematical model based on sample data, known as training data.\
The discipline of machine learning employs various approaches to teach computers to accomplish tasks \
where no fully satisfactory algorithm is available.',
'Machine learning is closely related to computational statistics, which focuses on making predictions using computers.\
The study of mathematical optimization delivers methods, theory and application domains to the field of machine learning.',
'Machine learning involves computers discovering how they can perform tasks without being explicitly programmed to do so. \
It involves computers learning from data provided so that they carry out certain tasks.',
'Machine learning approaches are traditionally divided into three broad categories, depending on the nature of the "signal"\
or "feedback" available to the learning system: Supervised, Unsupervised and Reinforcement',
'Software engineering is the systematic application of engineering approaches to the development of software.\
Software engineering is a computing discipline.',
'A software engineer creates programs based on logic for the computer to execute. A software engineer has to be more concerned\
about the correctness of the program in all the cases. Meanwhile, a data scientist is comfortable with uncertainty and variability.\
Developing a machine learning application is more iterative and explorative process than software engineering.'
]
documents_df=pd.DataFrame(documents,columns=['documents'])
# removing special characters and stop words from the text
stop_words_l=stopwords.words('english')
documents_df['documents_cleaned']=documents_df.documents.apply(lambda x: " ".join(re.sub(r'[^a-zA-Z]',' ',w).lower() for w in x.split() if re.sub(r'[^a-zA-Z]',' ',w).lower() not in stop_words_l) )
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
# First element of model_output contains all token embeddings
token_embeddings = model_output[0]
input_mask_expanded = attention_mask.unsqueeze(
-1).expand(token_embeddings.size()).float()
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
def most_similar(doc_id,similarity_matrix,matrix):
print (f'Document: {documents_df.iloc[doc_id]["documents"]}')
print ('\n')
print (f'Similar Documents using {matrix}:')
if matrix=='Cosine Similarity':
similar_ix=np.argsort(similarity_matrix[doc_id])[::-1]
elif matrix=='Euclidean Distance':
similar_ix=np.argsort(similarity_matrix[doc_id])
for ix in similar_ix:
if ix==doc_id:
continue
print('\n')
print (f'Document: {documents_df.iloc[ix]["documents"]}')
print (f'{matrix} : {similarity_matrix[doc_id][ix]}')
# Sentences we want sentence embeddings for later replace this with text from job and resume
sentences = ['This is an example sentence', 'Each sentence is converted']
# sbert_model = SentenceTransformer('bert-base-nli-mean-tokens')
sbert_model = SentenceTransformer('paraphrase-mpnet-base-v2')
document_embeddings = sbert_model.encode(documents_df['documents_cleaned'])
pairwise_similarities = cosine_similarity(document_embeddings)
pairwise_differences = euclidean_distances(document_embeddings)
print(pairwise_similarities)
print("##"*50)
print(pairwise_differences)
print("##"*50)
most_similar(0,pairwise_differences,'Euclidean Distance')
print("##"*50)
most_similar(0,pairwise_similarities,'Cosine Similarity')
print("##"*50)
| 50.313953 | 196 | 0.783684 |
ace34fe441667e2bf67ef1c105b0a8066eda0d36 | 3,331 | py | Python | tests/attention/test_aft_attention.py | idiap/fast-transformers | f22c13716fc748bb21a7b226ada7f7b5f87f867f | [
"MIT"
] | 1,171 | 2020-06-30T01:57:19.000Z | 2022-03-31T15:11:25.000Z | tests/attention/test_aft_attention.py | idiap/fast-transformers | f22c13716fc748bb21a7b226ada7f7b5f87f867f | [
"MIT"
] | 105 | 2020-06-30T14:40:56.000Z | 2022-02-08T16:31:45.000Z | tests/attention/test_aft_attention.py | idiap/fast-transformers | f22c13716fc748bb21a7b226ada7f7b5f87f867f | [
"MIT"
] | 127 | 2020-06-26T09:07:48.000Z | 2022-03-25T06:46:37.000Z | #
# Copyright (c) 2020 Idiap Research Institute, http://www.idiap.ch/
# Written by Angelos Katharopoulos <angelos.katharopoulos@idiap.ch>
#
import os
import time
import unittest
import torch
from fast_transformers.masking import FullMask, LengthMask
from fast_transformers.attention.aft_attention import AFTFullAttention, \
AFTSimpleAttention
class TestAFTAttention(unittest.TestCase):
def _get_inputs(self, N=10, L=5, S=8, H=4, E=32, D=32, device="cpu"):
return (
torch.rand(N, L, H, E).to(device),
torch.rand(N, S, H, E).to(device),
torch.rand(N, S, H, D).to(device),
FullMask(L, S, device=device),
FullMask(N, L, device=device),
FullMask(N, S, device=device)
)
def test_forward(self):
att = AFTFullAttention()
q, k, v, m1, m2, m3 = self._get_inputs()
v = att(q, k, v, m1, m2, m3)
self.assertTrue(v.is_contiguous())
att = AFTSimpleAttention()
q, k, v, m1, m2, m3 = self._get_inputs()
v = att(q, k, v, m1, m2, m3)
self.assertTrue(v.is_contiguous())
def test_masking(self):
q, k, v, m1, m2, m3 = self._get_inputs()
m1 = FullMask(torch.rand(5, 8) > 0.5)
att = AFTFullAttention()
v = att(q, k, v, m1, m2, m3)
att = AFTSimpleAttention()
with self.assertRaises(ValueError):
v = att(q, k, v, m1, m2, m3)
q, k, v, m1, m2, m3 = self._get_inputs(L=8, S=8)
m1 = FullMask(torch.tril(torch.ones(8, 8, dtype=torch.bool)))
v = att(q, k, v, m1, m2, m3)
@unittest.skipUnless(os.getenv("BENCHMARK_TESTS", ""), "no benchmarks")
def test_benchmark_cpu(self):
q, k, v, m1, m2, m3 = self._get_inputs(L=256, S=256, E=64, D=64)
att_full = AFTFullAttention()
att_simple = AFTSimpleAttention()
for name, att in zip(["full", "simple"], [att_full, att_simple]):
# warmup the cache
for i in range(10):
v_new = att(q, k, v, m1, m2, m3)
# measure
start = time.time()
for i in range(10):
v_new = att(q, k, v, m1, m2, m3)
end = time.time()
print("AFT", name, "CPU Time taken:", (end-start)*1000, "(ms)")
@unittest.skipUnless(torch.cuda.is_available(), "no CUDA capable device")
@unittest.skipUnless(os.getenv("BENCHMARK_TESTS", ""), "no benchmarks")
def test_benchmark_gpu(self):
q, k, v, m1, m2, m3 = self._get_inputs(L=256, S=256, E=64, D=64,
device="cuda")
att_full = AFTFullAttention().cuda()
att_simple = AFTSimpleAttention()
for name, att in zip(["full", "simple"], [att_full, att_simple]):
# warmup the caches
for i in range(10):
v_new = att(q, k, v, m1, m2, m3)
# measure
start = torch.cuda.Event(enable_timing=True)
end = torch.cuda.Event(enable_timing=True)
start.record()
for i in range(10):
v_new = att(q, k, v, m1, m2, m3)
end.record()
torch.cuda.synchronize()
print("AFT", name, "GPU time taken:", start.elapsed_time(end), "(ms)")
if __name__ == "__main__":
unittest.main()
| 33.989796 | 82 | 0.552086 |
ace352068c698a4c7aa5051208d625060f5a7325 | 1,520 | py | Python | alembic/versions/eb5ad7587e4e_refactor_permissions.py | Yashvi-Sharma/skyportal | 38f0c7b93f36e8a93e3934a2d9999c305a6b16ac | [
"BSD-3-Clause"
] | null | null | null | alembic/versions/eb5ad7587e4e_refactor_permissions.py | Yashvi-Sharma/skyportal | 38f0c7b93f36e8a93e3934a2d9999c305a6b16ac | [
"BSD-3-Clause"
] | null | null | null | alembic/versions/eb5ad7587e4e_refactor_permissions.py | Yashvi-Sharma/skyportal | 38f0c7b93f36e8a93e3934a2d9999c305a6b16ac | [
"BSD-3-Clause"
] | null | null | null | """refactor permissions
Revision ID: eb5ad7587e4e
Revises: 5f2ba5579a79
Create Date: 2020-12-07 20:02:36.221787
"""
from alembic import op
import sqlalchemy as sa
from baselayer.app.models import JoinModel
# revision identifiers, used by Alembic.
revision = 'eb5ad7587e4e'
down_revision = '5f2ba5579a79'
branch_labels = None
depends_on = None
# This migration adds an integer primary key to the join tables
mapped_classes = JoinModel.__subclasses__()
def upgrade():
# add an integer primary key to the join tables this primary key is not
# used anywhere as a foreign key so is safe to delete on schema downgrade
for cls in mapped_classes:
table = cls.__tablename__
forward_ind_name = f'{table}_forward_ind'
forward_index = getattr(cls, forward_ind_name)
colpair = forward_index.columns.keys()
op.execute(f'ALTER TABLE {table} DROP CONSTRAINT "{table}_pkey"')
op.add_column(table, sa.Column('id', sa.Integer, primary_key=True))
op.create_index(f'{table}_forward_ind', table, colpair, unique=True)
def downgrade():
# restore a composite 2-column primary key to the join tables
for cls in mapped_classes:
table = cls.__tablename__
forward_ind_name = f'{table}_forward_ind'
forward_index = getattr(cls, forward_ind_name)
colpair = forward_index.columns.keys()
op.drop_index(f'{table}_forward_ind')
op.drop_column(table, 'id')
op.create_primary_key(f'{table}_pkey', table, colpair)
| 32.340426 | 77 | 0.715789 |
ace35210124caaaff22168fe286027299ffcfd15 | 9,335 | py | Python | halo_bian/bian/util.py | halo-framework/halo-bian | 38fe577bf3e5e0fe57f5e3eb5809d609e3f880ac | [
"MIT"
] | null | null | null | halo_bian/bian/util.py | halo-framework/halo-bian | 38fe577bf3e5e0fe57f5e3eb5809d609e3f880ac | [
"MIT"
] | null | null | null | halo_bian/bian/util.py | halo-framework/halo-bian | 38fe577bf3e5e0fe57f5e3eb5809d609e3f880ac | [
"MIT"
] | null | null | null |
import logging
import json
from flask_filter.schemas import FilterSchema
from halo_app.app.cmd_assembler import CmdAssemblerFactory
from halo_app.const import OPType
from halo_app.app.command import DictHaloCommand
from halo_app.app.request import AbsHaloRequest
from halo_app.classes import AbsBaseClass
from halo_app.logs import log_json
from halo_app.reflect import Reflect
from halo_bian.bian.bian import ActionTerms, FunctionalPatterns, BehaviorQualifierType
from halo_bian.bian.app.command import DictBianCommand
from halo_bian.bian.domain.event import AbsBianEvent
from halo_bian.bian.app.context import BianContext, BianCtxFactory
from halo_bian.bian.exceptions import IllegalActionTermException, IllegalBQException, BehaviorQualifierNameException, \
FunctionalPatternNameException, BianRequestActionException, ActionTermFailException
from halo_bian.bian.app.request import BianCommandRequest, BianEventRequest, BianQueryRequest
from halo_app.settingsx import settingsx
from halo_bian.bian.app.query import BianQuery
from halo_app.app.query_filters import Filter
settings = settingsx()
logger = logging.getLogger(__name__)
class BianUtil(AbsBaseClass):
@classmethod
def create_bian_request(cls,bian_context:BianContext, usecase_id:str, vars:dict,action: ActionTerms=None,op_type:OPType=OPType.COMMAND) -> AbsHaloRequest:
logger.debug("in bian_validate_req " + str(action) + " vars=" + str(vars))
if action:
action_term = action
else:
action_term = cls.set_action(usecase_id)
if action_term not in ActionTerms.ops:
raise IllegalActionTermException(action_term)
if action_term == ActionTerms.RETRIEVE:
op_type = OPType.QUERY
if settings.COMMANDS_ONLY:
op_type = OPType.COMMAND
sd_reference_id = None
cr_reference_id = None
behavior_qualifier_type = None
behavior_qualifier = None
bq_reference_id = None
sub_qualifiers = None
collection_filter = None
body = None
if "sd_reference_id" in vars:
sd_reference_id = vars["sd_reference_id"]
if "cr_reference_id" in vars:
cr_reference_id = vars["cr_reference_id"]
if "behavior_qualifier" in vars:
behavior_qualifier = cls.get_behavior_qualifier(action_term, vars["behavior_qualifier"])
if "bq_reference_id" in vars:
bq_reference_id = vars["bq_reference_id"]
#behavior_qualifier = cls.get_behavior_qualifier_from_path(action_term,bq_reference_id)
if "sbq_reference_id" in vars:
sub_qualifiers = cls.get_sub_qualifiers(behavior_qualifier, vars)
if "collection_filter" in vars:
collection_filter = vars["collection_filter"]
if "body" in vars:
body = vars["body"]
if op_type == OPType.COMMAND:
#bian_command = DictBianCommand(method_id, vars,action_term)
cmd_assembler = CmdAssemblerFactory.get_assembler_by_method_id(usecase_id)
bian_command = cmd_assembler.write_cmd_for_method(usecase_id, vars,action_term)
bian_command.action_term = action_term
request = BianCommandRequest(bian_context,bian_command,action_term,sd_reference_id=sd_reference_id, cr_reference_id=cr_reference_id, bq_reference_id=bq_reference_id, behavior_qualifier=behavior_qualifier,body=body,sub_qualifiers=sub_qualifiers)
else:
bian_query = BianQuery(usecase_id, vars,action_term)
request = BianQueryRequest(bian_context,bian_query,action_term,sd_reference_id=sd_reference_id, cr_reference_id=cr_reference_id, bq_reference_id=bq_reference_id, behavior_qualifier=behavior_qualifier,collection_filter=collection_filter,sub_qualifiers=sub_qualifiers)
return request
@classmethod
def get_behavior_qualifier(cls, op, bq_name):
bqt_obj = cls.get_behavior_qualifier_type()
for bq_id in bqt_obj.keys():
bq_obj = bqt_obj.get(bq_id)
if bq_obj.name == bq_name.strip().replace("-","_").replace(" ","_"):
return bq_name
raise IllegalBQException(bq_name)
@classmethod
def get_behavior_qualifier_type(cls):
if settings.BEHAVIOR_QUALIFIER:
return cls.get_bq_obj()
else:
raise BehaviorQualifierNameException("missing Behavior Qualifier definition")
@classmethod
def get_bq_obj(cls):
if settings.FUNCTIONAL_PATTERN:
functional_pattern = settings.FUNCTIONAL_PATTERN
else:
raise FunctionalPatternNameException("missing Functional Pattern definition")
bq_class = FunctionalPatterns.patterns[functional_pattern][1]
bq_obj = cls.init_bq(bq_class)
return bq_obj
@classmethod
def init_bq(cls, bq_class_name):
if settings.BEHAVIOR_QUALIFIER_TYPE:
k = settings.BEHAVIOR_QUALIFIER_TYPE.rfind(".")
module_name = settings.BEHAVIOR_QUALIFIER_TYPE[:k]
class_name = settings.BEHAVIOR_QUALIFIER_TYPE[k+1:]
else:
module_name = "halo_bian.bian.bian"
class_name = bq_class_name
return Reflect.do_instantiate(module_name, class_name, BehaviorQualifierType,settings.BEHAVIOR_QUALIFIER,settings.SUB_QUALIFIER)
@classmethod
def set_action(cls,method_id:str)->ActionTerms:
if method_id.startswith("retrieve_"):
return ActionTerms.RETRIEVE
if method_id.startswith("control_"):
return ActionTerms.CONTROL
if method_id.startswith("request_"):
return ActionTerms.REQUEST
if method_id.startswith("initiate_"):
return ActionTerms.INITIATE
if method_id.startswith("execute_"):
return ActionTerms.EXECUTE
if method_id.startswith("capture_"):
return ActionTerms.CAPTURE
if method_id.startswith("create_"):
return ActionTerms.CREATE
if method_id.startswith("evaluate_"):
return ActionTerms.EVALUATE
if method_id.startswith("exchange_"):
return ActionTerms.EXCHANGE
if method_id.startswith("grant_"):
return ActionTerms.GRANT
if method_id.startswith("provide_"):
return ActionTerms.PROVIDE
if method_id.startswith("register_"):
return ActionTerms.REGISTER
if method_id.startswith("update_"):
return ActionTerms.UPDATE
if method_id.startswith("notify_"):
return ActionTerms.NOTIFY
raise IllegalActionTermException(method_id)
@classmethod
def get_headers(cls, response):
headers = {}
if response:
if response.request:
if settings.METHOD_HEADERS and response.request.method_id in settings.METHOD_HEADERS:
method_headers = settings.METHOD_HEADERS[response.request.method_id]
for h in response.request.context.keys():
val = response.request.context.get(h)
if h in method_headers:
headers[h] = val
return headers
@classmethod
def get_flask_filters(cls,collection_filter):
filter_schema = FilterSchema()
try:
collection_filter_json = json.loads(collection_filter)
if "field" in collection_filter_json:
many = False
else:
many = True
filters = filter_schema.load(collection_filter_json, many=many)
if not many:
filters = [filters]
return cls.get_app_filters(filters)
except Exception as e: # {'_schema': ['Invalid input type.']}
logger.error("Encountered a request error in filter: " + str(e))
raise e
@classmethod
def get_app_filters(cls,collection_filters):
arr = []
for f in collection_filters:
filter = Filter(f.field, f.OP, f.value)
arr.append(filter)
return arr
@classmethod
def get_bian_vars(cls,vars):
varsx = {}
if 'collection_filter' in vars or 'queryparams' in vars:
try:
if 'collection_filter' in vars:
item = vars['collection_filter']
if item:
collection_filter = cls.get_flask_filters(vars['collection_filter'])
varsx['collection_filter'] = collection_filter
if 'queryparams' in vars:
item = vars['queryparams']
if item:
queryparams = cls.get_flask_filters(vars['queryparams'])
varsx['queryparams'] = queryparams
except Exception as e:
raise e
if 'sd_reference_id' in vars:
varsx['sd_reference_id'] = vars['sd_reference_id']
if 'cr_reference_id' in vars:
varsx['cr_reference_id'] = vars['cr_reference_id']
if 'bq_reference_id' in vars:
varsx['bq_reference_id'] = vars['bq_reference_id']
if 'behavior_qualifier' in vars:
varsx['behavior_qualifier'] = vars['behavior_qualifier']
if 'body' in vars:
varsx['body'] = vars['body']
return varsx | 44.033019 | 278 | 0.665024 |
ace353c458bb75f1fa05344a4d4220b8d0538425 | 538 | py | Python | linear_regression_readline/code/code_length_10000_negative1/lr_ds10.py | rodonguyen/vres_code_2021 | cb49d941db4dfc5137e887b195f403fb4262cfd8 | [
"MIT"
] | null | null | null | linear_regression_readline/code/code_length_10000_negative1/lr_ds10.py | rodonguyen/vres_code_2021 | cb49d941db4dfc5137e887b195f403fb4262cfd8 | [
"MIT"
] | null | null | null | linear_regression_readline/code/code_length_10000_negative1/lr_ds10.py | rodonguyen/vres_code_2021 | cb49d941db4dfc5137e887b195f403fb4262cfd8 | [
"MIT"
] | null | null | null | from sklearn import linear_model
x_values = []
y_values = []
n = 10000
with open('./dataset_length_10000_negative1/dataset_10.csv') as f:
for i in range(0, n):
x,y = f.readline().replace('\n','').split(',')
x, y = float(x), float(y)
x_values.append([x,y])
y_values.append(y)
# Create linear regression object
regression = linear_model.LinearRegression()
# Train the model using the training sets
regression.fit(x_values, y_values)
print("Coefficients: \n", regression.coef_)
| 24.454545 | 67 | 0.64684 |
ace354f1257c1872a360b82e0bb3518a607a93e3 | 855 | py | Python | chatette/parsing/line_count_file_wrapper.py | ziligy/Chatette | 014c0b0a991bf66cb69fc6a69e0f6c298974eec9 | [
"MIT"
] | 1 | 2022-01-25T17:13:32.000Z | 2022-01-25T17:13:32.000Z | chatette/parsing/line_count_file_wrapper.py | ziligy/Chatette | 014c0b0a991bf66cb69fc6a69e0f6c298974eec9 | [
"MIT"
] | null | null | null | chatette/parsing/line_count_file_wrapper.py | ziligy/Chatette | 014c0b0a991bf66cb69fc6a69e0f6c298974eec9 | [
"MIT"
] | 1 | 2021-06-01T08:35:40.000Z | 2021-06-01T08:35:40.000Z | """
Module `chatette.parsing.line_count_file_wrapper`.
Contains a wrapper of `io.File` that counts on which line it is currently.
"""
import io
from chatette.utils import cast_to_unicode
class LineCountFileWrapper(object):
"""
A wrapper of `io.File` that keeps track of the line number it is reading.
"""
def __init__(self, filepath, mode='r'):
self.name = cast_to_unicode(filepath)
self.f = io.open(filepath, mode)
self.line_nb = 0
def close(self):
return self.f.close()
def closed(self):
return self.f.closed
def readline(self):
self.line_nb += 1
return self.f.readline()
# to allow using in 'with' statements
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.f.close()
self.close()
| 22.5 | 77 | 0.633918 |
ace355aa1a12227313c772f8a8b237dbf0423ac5 | 6,397 | py | Python | satchmo/apps/shipping/modules/fedex_web_services/config.py | predatell/satchmo | 6ced1f845aadec240c7e433c3cbf4caca96e0d92 | [
"BSD-3-Clause"
] | null | null | null | satchmo/apps/shipping/modules/fedex_web_services/config.py | predatell/satchmo | 6ced1f845aadec240c7e433c3cbf4caca96e0d92 | [
"BSD-3-Clause"
] | null | null | null | satchmo/apps/shipping/modules/fedex_web_services/config.py | predatell/satchmo | 6ced1f845aadec240c7e433c3cbf4caca96e0d92 | [
"BSD-3-Clause"
] | null | null | null | """
Satchmo Fedex Module that uses Fedex Web Services via the SOAP protoccol.
This is based on several people's work.
- https://github.com/jcartmell/python-fedex which was forked from
- https://github.com/gtaylor/python-fedex
- Some inspiration drawn from fedex shipping module by Chris Laux
- Some was drawn from the python-fedex examples
This requires 2 additional modules to work:
Python Fedex and the Python SOAP module suds.
http://code.google.com/p/python-fedex/
https://fedorahosted.org/suds/
Both can be installed easily:
pip install fedex
pip install suds
All values based on July 2011 Fedex Developer Guide
"""
from django.utils.translation import ugettext_lazy as _
from livesettings.values import StringValue,ConfigurationGroup,BooleanValue,MultipleStringValue
from livesettings.functions import config_register_list,config_get
SHIP_MODULES = config_get('SHIPPING', 'MODULES')
SHIP_MODULES.add_choice(('shipping.modules.fedex_web_services', 'FEDEX (fedex_web_services)'))
SHIPPING_GROUP = ConfigurationGroup('shipping.modules.fedex_web_services',
_('FedEx Web Services Shipping Settings'),
requires = SHIP_MODULES,
requiresvalue='shipping.modules.fedex_web_services',
ordering = 101
)
config_register_list(
StringValue(SHIPPING_GROUP,
'METER_NUMBER',
description=_('FedEx Meter Number'),
help_text=_('Meter Number provided by FedEx.'),
default=''),
StringValue(SHIPPING_GROUP,
'ACCOUNT',
description=_('FedEx Account Number'),
help_text=_('FedEx Account Number.'),
default=''),
StringValue(SHIPPING_GROUP,
'AUTHENTICATION_KEY',
description=_('FedEx Authentication Key'),
help_text=_('FedEx Authentication Key.'),
default=''),
StringValue(SHIPPING_GROUP,
'AUTHENTICATION_PASSWORD',
description=_('FedEx Authentication Password'),
help_text=_('FedEx Authentication Password.'),
default=''),
StringValue(SHIPPING_GROUP,
'SHIPPER_REGION',
description=_('The region you are sending your package from.'),
help_text=_('i.e. the region the package leaves from.'),
choices = (
('APAC', 'APAC'),
('CA', 'CA'),
('EMEA', 'EMEA'),
('LAC', 'LAC'),
('US', 'US'),
),
default = 'US',
),
MultipleStringValue(SHIPPING_GROUP,
'SHIPPING_CHOICES',
description=_('FedEx Shipping Choices Available to customers.'),
choices = (
('EUROPE_FIRST_INTERNATIONAL_PRIORITY', 'Europe First International Priority'),
('FEDEX_1_DAY_FREIGHT', 'Fedex 1 Day Freight'),
('FEDEX_2_DAY', 'Fedex 2 Day'),
('FEDEX_2_DAY_FREIGHT', 'Fedex 2 Day Freight'),
('FEDEX_3_DAY_FREIGHT', 'Fedex 3 Day Freight'),
('FEDEX_EXPRESS_SAVER', 'Fedex Express Saver'),
('FEDEX_GROUND', 'Fedex Ground'),
('FIRST_OVERNIGHT', 'First Overnight'),
('GROUND_HOME_DELIVERY', 'Ground Home Delivery'),
('INTERNATIONAL_ECONOMY', 'International Economy'),
('INTERNATIONAL_ECONOMY_FREIGHT', 'International Economy Freight'),
('INTERNATIONAL_FIRST', 'International First'),
('INTERNATIONAL_PRIORITY', 'International Priority'),
('INTERNATIONAL_PRIORITY_FREIGHT', 'International Priority Freight'),
('PRIORITY_OVERNIGHT', 'Priority Overnight'),
('SMART_POST', 'Smart Post'),
('STANDARD_OVERNIGHT', 'Standard Overnight'),
('FEDEX_FREIGHT', 'Fedex Freight'),
('FEDEX_NATIONAL_FREIGHT', 'Fedex National Freight'),
('INTERNATIONAL_GROUND', 'International Ground'),
),
default = 'FEDEX_GROUND'
),
StringValue(SHIPPING_GROUP,
'PACKAGING',
description = _('Type of container/package used to ship product.'),
choices = (
('YOUR_PACKAGING','YOUR_PACKAGING'),
('FEDEX_10KG_BOX','FEDEX_10KG_BOX'),
('FEDEX_25KG_BOX','FEDEX_25KG_BOX'),
('FEDEX_BOX','FEDEX_BOX'),
('FEDEX_ENVELOPE','FEDEX_ENVELOPE'),
('FEDEX_PAK','FEDEX_PAK'),
('FEDEX_TUBE','FEDEX_TUBE'),
),
default = 'YOUR_PACKAGING',
),
StringValue(SHIPPING_GROUP,
'DEFAULT_ITEM_WEIGHT',
description = _("Default/Minimum Item Weight"),
help_text = _("The default weight for items which lack a defined weight and the minimum an item is allowed to be, enter a positive value."),
default = '0.5',
),
StringValue(SHIPPING_GROUP,
'DEFAULT_WEIGHT_UNITS',
description = _("Default weight units"),
choices = (
('LB','LB'),
('KG','KG'),
),
default = "LB"
),
BooleanValue(SHIPPING_GROUP,
'SINGLE_BOX',
description=_("Single Box?"),
help_text=_("Use just one box and ship by weight? If no then every item will be sent in its own box."),
default=True
),
StringValue(SHIPPING_GROUP,
'DROPOFF_TYPE',
description = _("The method used to give the package to Fedex."),
choices = (
('REGULAR_PICKUP','REGULAR_PICKUP'),
('BUSINESS_SERVICE_CENTER','BUSINESS_SERVICE_CENTER'),
('DROP_BOX','DROP_BOX'),
('REQUEST_COURIER','REQUEST_COURIER'),
('STATION', 'STATION'),
),
help_text = _("Most users will keep the default Regular Pickup."),
default = 'REGULAR_PICKUP',
),
BooleanValue(SHIPPING_GROUP,
'VERBOSE_LOG',
description=_("Verbose logs"),
help_text=_("Send the entire request and response to the log - for debugging help when setting up FedEx."),
default=False),
BooleanValue(SHIPPING_GROUP,
'TEST_SERVER',
description=_("Use test server?"),
help_text=_("Check if you want to use the fedex test servers rather than the production server."),
default=True
),
)
| 38.536145 | 148 | 0.597468 |
ace356270cedcb5e4bbb41a2d8869ec365a727eb | 446 | py | Python | django_api/models.py | KrishnaChandrapati/django_api1 | 3ce95318301c8d1b885041a3de1fae3b1fe52a73 | [
"MIT"
] | null | null | null | django_api/models.py | KrishnaChandrapati/django_api1 | 3ce95318301c8d1b885041a3de1fae3b1fe52a73 | [
"MIT"
] | null | null | null | django_api/models.py | KrishnaChandrapati/django_api1 | 3ce95318301c8d1b885041a3de1fae3b1fe52a73 | [
"MIT"
] | null | null | null | from django.db import models
class ItemList(models.Model):
id = models.AutoField(primary_key=True)
status = models.IntegerField(default=0)
type = models.IntegerField(default=0)
name = models.TextField(max_length=40, null=False)
city = models.TextField(max_length=40, null=False)
def __str__(self):
return str(self.id)
class Meta:
verbose_name = 'ItemList'
verbose_name_plural = 'ItemLists'
| 26.235294 | 54 | 0.690583 |
ace3566c54e82aca562e1a8ec966f4706a53e9cb | 30,984 | py | Python | survol/lib_wbem.py | rchateauneu/survol | ba66d3ec453b2d9dd3a8dabc6d53f71aa9ba8c78 | [
"BSD-3-Clause"
] | 9 | 2017-10-05T23:36:23.000Z | 2021-08-09T15:40:03.000Z | survol/lib_wbem.py | rchateauneu/survol | ba66d3ec453b2d9dd3a8dabc6d53f71aa9ba8c78 | [
"BSD-3-Clause"
] | 21 | 2018-01-02T09:33:03.000Z | 2018-08-27T11:09:52.000Z | survol/lib_wbem.py | rchateauneu/survol | ba66d3ec453b2d9dd3a8dabc6d53f71aa9ba8c78 | [
"BSD-3-Clause"
] | 4 | 2018-06-23T09:05:45.000Z | 2021-01-22T15:36:50.000Z | import re
import os
import sys
import socket
import logging
import pywbem # Might be pywbem or python3-pywbem.
import lib_util
import lib_kbase
import lib_properties
import lib_common
import lib_credentials
################################################################################
# TODO: Build a moniker with cimom added at the beginning.
def WbemAllNamespacesUrl(srvr):
return lib_util.ScriptizeCimom('/namespaces_wbem.py', "", srvr)
def BuildWbemNamespaceClass(wbem_namespace, entity_type):
# Normally we should check if this class is defined in this cimom.
# For the moment, we assume, yes.
# But the namespace is not taken into account if it is empty.
if wbem_namespace:
return wbem_namespace, entity_type, wbem_namespace + ":" + entity_type
else:
return wbem_namespace, entity_type, entity_type
def BuildWbemMoniker(hostname, namespac="", class_nam=""):
# Apparently the namespace is not correctly parsed. It should not matter as it is optional.
# This also helps when this is a common class between WBEM, WMI and Survol.
if namespac:
return "%s/%s:%s." % (hostname, namespac, class_nam)
else:
return "%s/%s." % (hostname, class_nam)
# TODO: Build a moniker with cimom added at the beginning. Must check if really useful.
def NamespaceUrl(nskey, cimom_url, class_nam=""):
wbem_moniker = BuildWbemMoniker(cimom_url, nskey, class_nam)
wbem_instance_url = lib_util.EntityUrlFromMoniker(wbem_moniker, True, True)
return wbem_instance_url
def ClassUrl(nskey, cimom_url, class_nam):
wbem_moniker = BuildWbemMoniker(cimom_url, nskey, class_nam)
wbem_instance_url = lib_util.EntityUrlFromMoniker(wbem_moniker, True)
return wbem_instance_url
def WbemBuildMonikerPath( entity_namespace, entity_type, entity_id):
wbem_name_space, wbem_class, full_class_pth = BuildWbemNamespaceClass(entity_namespace, entity_type)
return full_class_pth + "." + entity_id
def WbemInstanceUrl(entity_namespace, entity_type, entity_id, cimom_srv):
wbem_full_path = WbemBuildMonikerPath(entity_namespace, entity_type, entity_id)
if wbem_full_path is None:
return None
# 'https://jdd:test@acme.com:5959/cimv2:Win32_SoftwareFeature.Name="Havana",ProductName="Havana",Version="1.0"'
wbem_moniker = cimom_srv + "/" + wbem_full_path
wbem_instance_url = lib_util.EntityUrlFromMoniker(wbem_moniker, entity_id == "")
return wbem_instance_url
def WbemGetClassKeys(wbem_name_space, wbem_class, cimom_srv):
"""Returns the list of a keys of a given WBEM class. This is is used if the key is not given for an entity.
This could be stored in a cache for better performance."""
try:
wbem_cnnct = WbemConnection(cimom_srv)
return WbemGetClassKeysFromConnection(wbem_name_space, wbem_class, wbem_cnnct)
except Exception as exc:
logging.warning("WbemGetClassKeys %s %s %s: Caught:%s", cimom_srv, wbem_name_space, wbem_class, str(exc))
return None
def WbemGetClassKeysFromConnection(wbem_name_space, wbem_class, wbem_cnnct):
# >>> conn = pywbem.WBEMConnection( "http://192.168.1.88:5988" , ('pegasus','toto') )
# >>> conn.GetClass("CIM_MediaPresent",namespace="root/cimv2")
# CIMClass(classname=u'CIM_MediaPresent', ...)
# https://pywbem.github.io/pywbem/doc/0.8.4/doc/pywbem.cim_operations.WBEMConnection-class.html#GetClass
wbem_class = wbem_cnnct.GetClass(wbem_class,
namespace=wbem_name_space,
# Indicates that inherited properties, methods, and qualifiers are to be excluded from the returned class.
LocalOnly=False,
IncludeQualifiers=False)
keys = wbem_class.properties.keys()
return keys
################################################################################
# TODO: Make SLP work properly.
def slp_wbem_services():
"""This returns accessible WBEM services as detected by SLP."""
filter = "wbem"
# "/drives/c/Program Files (x86)/OpenSLP/slptool.exe"
cmd = 'slptool findsrvs service:' + filter
# TODO: logging.debugGING PURPOSE. FIX THIS.
cmd = '"C:/Program Files (x86)/OpenSLP/slptool.exe" findsrvs service:' + filter
stream = os.popen(cmd)
# service:ftp.smallbox://192.168.100.1:21,65535
for line in stream:
match_obj = re.match(r'service:([^:]*):/?/?([^,]*)(.*)', line, re.M|re.I)
if match_obj:
yield {
"name": match_obj.group(1), # "wbem"
"url" : match_obj.group(2), # Starts with "http:" or "https:"
"rest": match_obj.group(3)}
else:
raise Exception("Invalid line " + line)
resu = stream.close()
if resu is not None:
raise Exception("Error running " + cmd)
# TODO: Alternate methods to discover WBEM servers:
# TODO: - Ping machines with WBEM port numbers 5988 and 5989.
# TODO: Will be stored in the cache filled with SLP discovery, with credentials.
# http://192.168.1.83:5988 index Namespaces
# https://192.168.1.83:5989 index Namespaces
# TODO: It could use SLP.
# TODO: No need to return all WBEM servers.
# TODO: Emulate the protocol with Jquery and Javascript, if it is HTTP.
# But for that, we would need a WBEM server sending Access-Control-Allow-Origin header.
def WbemServersList():
lst_wbem_servers = []
cred_names = lib_credentials.get_credentials_names("WBEM")
logging.debug("WbemServersList")
for url_wbem in cred_names:
# crdNam = "http://192.168.1.83:5988"
parsed_url = lib_util.survol_urlparse(url_wbem)
the_host = parsed_url.hostname
if the_host:
lst_wbem_servers.append((the_host, url_wbem))
return lst_wbem_servers
def HostnameToWbemServer(hostname):
"""This returns the WBEM server of a machine.
It checks the credentials to find the best possible Cimom."""
# TODO: This should prefer port 5988 over 5989 which does not work with pywbem anyway.
entity_ip_addr = lib_util.EntHostToIpReally(hostname)
cred_names = lib_credentials.get_credentials_names("WBEM")
for url_wbem in cred_names:
# url_wbem = "http://192.168.1.83:5988"
parsed_url = lib_util.survol_urlparse(url_wbem)
the_host = parsed_url.hostname
if the_host == hostname:
return url_wbem
if the_host == entity_ip_addr:
return url_wbem
# If no credential can be found, just return a default one.
return "http://" + entity_ip_addr + ":5988"
################################################################################
def GetWbemUrls(entity_host, entity_namespace, entity_type, entity_id):
"""This returns a list of URLs. entity_type can be None."""
logging.debug("GetWbemUrls h=%s ns=%s t=%s i=%s",entity_host, entity_namespace, entity_type, entity_id)
wbem_urls_list = []
# FIXME: entity_namespace might have a wrong separator, slash or backslash.
# TODO: Should check that the WBEM class exists in the server ?
for wbem_server in WbemServersList():
# wbem_server=(u'vps516494.ovh.net', u'http://vps516494.ovh.net:5988')
# If no host specified, returns everything.
if entity_host:
# wbem_server[1].lower()=vps516494.ovh.net entity_host.lower()=http://vps516494.ovh.net:5988
if entity_host.lower() != wbem_server[0].lower():
continue
logging.debug("GetWbemUrls found wbem_server=%s", str(wbem_server))
the_cimom = wbem_server[1]
# TODO: When running from cgiserver.py, and if QUERY_STRING is finished by a dot ".", this dot
# TODO: is removed. Workaround: Any CGI variable added after.
# TODO: Also: Several slashes "/" are merged into one.
# TODO: Example: "xid=http://192.168.1.83:5988/." becomes "xid=http:/192.168.1.83:5988/"
# TODO: Replace by "xid=http:%2F%2F192.168.1.83:5988/."
# Maybe a bad collapsing of URL ?
the_cimom = lib_credentials.key_url_cgi_encode(the_cimom)
if entity_type == "":
# TODO: This should rather display all classes for this namespace.
wbem_url = WbemAllNamespacesUrl(the_cimom)
else:
# Unique script for all types of entities.
# TODO: Pass the cimom as a host !!!
wbem_url = WbemInstanceUrl(entity_namespace, entity_type, entity_id, the_cimom)
if wbem_url is None:
continue
wbem_urls_list.append((wbem_url, wbem_server[0]))
return wbem_urls_list
def GetWbemUrlsTyped(entity_host, name_space, entity_type, entity_id):
"""This also takes into account the entity type.
If this is a CIM_ComputerSystem, it tries to connect to its WBEM server.
This code is not really mature, but it does not harm."""
# When displaying the WBEM of a computer, this attempts to point to the server of this distant machine.
# The coding of another machine looks dodgy but is simply a CIM path.
if entity_type == 'CIM_ComputerSystem':
# TODO: hostId="Unknown-30-b5-c2-02-0c-b5-2" does not work.
# This return the WBEM servers associated to this machine.
if entity_id:
# Tries to extract the host from the string "Key=Val,Name=xxxxxx,Key=Val"
# BEWARE: Some arguments should be decoded.
xid_host = {sp[0]:sp[1] for sp in [ss.split("=") for ss in entity_id.split(",")]}["Name"]
wbem_urls_list = GetWbemUrls(xid_host, name_space, entity_type, entity_id)
else:
host_alt = lib_util.currentHostname
wbem_urls_list = GetWbemUrls(host_alt, name_space, entity_type, "Name=" + host_alt + ".home")
else:
# This returns the current url server of the current machine.
wbem_urls_list = GetWbemUrls(entity_host, name_space, entity_type, entity_id)
return wbem_urls_list
def WbemConnection(cgi_url):
"""For the moment, it cannot connect to https:
#https://github.com/Napsty/check_esxi_hardware/issues/7 """
creden = lib_credentials.GetCredentials("WBEM", cgi_url)
#if creden == ('', ''):
# raise Exception("WbemConnection: No credentials for %s" % cgi_url)
logging.debug("WbemConnection creden=%s", str(creden))
# Beware: If username/password is wrong, it will only be detected at the first data access.
conn = pywbem.WBEMConnection(cgi_url, creden)
return conn
def WbemGetClassObj(conn_wbem, entity_type, wbem_namespace):
try:
wbem_klass = conn_wbem.GetClass(entity_type, namespace=wbem_namespace, LocalOnly=False, IncludeQualifiers=True)
return wbem_klass
except Exception:
return None
################################################################################
def WbemClassDescrFromClass(wbem_klass):
try:
return wbem_klass.qualifiers['Description'].value
except Exception as exc:
return "Caught:" + str(exc)
def WbemClassDescription(conn_wbem, entity_type, wbem_namespace):
try:
wbem_klass = conn_wbem.GetClass(entity_type, namespace=wbem_namespace, LocalOnly=False, IncludeQualifiers=True)
except Exception:
return None
return WbemClassDescrFromClass(wbem_klass)
################################################################################
# TODO: Should remove duplicate code.
def NamespacesEnumeration(conn):
"""
Different brokers have different CIM classes, that can be used to
enumerate namespaces. And those may be nested under miscellaneous
namespaces. This method tries all known combinations and returns
first non-empty list of namespace instance names.
@return (interopns, nsclass, nsinsts)
where
interopns is a instance name of namespace holding namespace CIM
class
nsclass is a name of class used to enumerate namespaces
nsinsts is a list of all instance names of nsclass
"""
nsclasses = ['CIM_Namespace', '__Namespace']
namespaces = ['root/cimv2', 'Interop', 'interop', 'root', 'root/interop']
interopns = None
nsclass = None
nsinsts = []
for icls in range(len(nsclasses)):
for ins in range(len(namespaces)):
try:
nsins = namespaces[ins]
nsinsts = conn.EnumerateInstanceNames(nsclasses[icls], namespace=nsins)
interopns = nsins
nsclass = nsclasses[icls]
except Exception as exc:
arg = exc.args
if arg[0] in [pywbem.CIM_ERR_INVALID_NAMESPACE,
pywbem.CIM_ERR_NOT_SUPPORTED,
pywbem.CIM_ERR_INVALID_CLASS]:
continue
else:
# Caught local variable 'url_' referenced before assignment
raise
if len(nsinsts) > 0:
break
return interopns, nsclass, nsinsts
def EnumNamespacesCapabilities(conn):
interopns, _, nsinsts = NamespacesEnumeration(conn)
nslist = [inst['Name'].strip('/') for inst in nsinsts]
if interopns not in nslist:
# Pegasus didn't get the memo that namespaces aren't hierarchical.
# This will fall apart if there exists a namespace <interopns>/<interopns>.
# Maybe we should check the Server: HTTP header instead.
nslist = [interopns + '/' + subns for subns in nslist]
nslist.append(interopns)
nslist.sort()
if 'root/PG_InterOp' in nslist or 'root/interop' in nslist:
nsd = dict([(x, 0) for x in nslist])
# Bizarrement, ca renvoie zero pour 'root/PG_InterOp' alors que
# la classe 'PG_ProviderCapabilities' a des instances ?
# Peut-etre que c'est hard-code, qu'il n'y a pas besoin de provider pour cette classe ?
caps = conn.EnumerateInstances('PG_ProviderCapabilities',
namespace='root/PG_InterOp' if 'root/PG_InterOp' in nslist else 'root/interop',
PropertyList=['Namespaces'])
for cap in caps:
for _ns in cap['Namespaces']:
try:
nsd[_ns] += 1
except KeyError:
pass
else:
nsd = {}
return nsd
def GetCapabilitiesForInstrumentation(conn, nam_spac):
"""Classes might belong to several namespaces ?"""
caps = None
last_error = AssertionError("No interop namespace found")
for interopns in ('root/PG_InterOp', 'root/interop'):
try:
caps = conn.EnumerateInstances(
ClassName='PG_ProviderCapabilities',
namespace=interopns,
PropertyList=['Namespaces', 'ClassName'])
break
except Exception as exc:
logging.error("GetCapabilitiesForInstrumentation exc=%s", str(exc))
arg = exc.args
# TODO Python 3
if arg[0] != pywbem.CIM_ERR_INVALID_NAMESPACE:
raise
last_error = arg
else:
raise last_error
resu = []
for cap in caps:
if nam_spac in cap['Namespaces']:
resu.append(cap['ClassName'])
return resu
###################################################
def EnumerateInstrumentedClasses(conn, nam_spac):
"""
Enumerates only those class names, that are instrumented (there
is a provider under broker implementing its interface.
"""
fetched_classes = []
def get_class(conn, cname):
"""Obtain class from broker and store it in cache."""
fetched_classes.append(cname)
return conn.GetClass(ClassName=cname,
LocalOnly=True, PropertyList=[],
IncludeQualifiers=False, IncludeClassOrigin=False)
start_class = '.'
caps = GetCapabilitiesForInstrumentation(conn, nam_spac)
deep_dict = {start_class:[]}
for cap in caps:
if nam_spac not in cap['Namespaces']:
continue
if cap['ClassName'] in fetched_classes:
continue
klass = get_class(conn, cap['ClassName'])
if klass.superclass is None:
deep_dict[start_class].append(klass.classname)
else:
try:
deep_dict[klass.superclass].append(klass.classname)
except KeyError:
deep_dict[klass.superclass] = [klass.classname]
while klass.superclass is not None:
if klass.superclass in fetched_classes:
break
klass = get_class(conn,klass.superclass)
if klass.superclass is None and klass.superclass not in deep_dict[start_class]:
deep_dict[start_class].append(klass.classname)
elif klass.superclass in deep_dict:
if klass.classname not in deep_dict[klass.superclass]:
deep_dict[klass.superclass].append( klass.classname)
break
else:
deep_dict[klass.superclass] = [klass.classname]
return deep_dict
###################################################
def GetClassesTree(conn, the_nam_space):
kwargs = {'DeepInheritance': True}
# kwargs['ClassName'] = None
kwargs['LocalOnly'] = True
kwargs['IncludeQualifiers'] = False
kwargs['IncludeClassOrigin'] = False
logging.debug("GetClassesTree theNamSpace=%s", the_nam_space)
klasses = conn.EnumerateClasses(namespace=the_nam_space, **kwargs)
logging.debug("GetClassesTree klasses %d elements", len(klasses))
tree_classes = dict()
for klass in klasses:
# This does not work. WHY ?
# tree_classes.get( klass.superclass, [] ).append( klass )
logging.debug("klass=%s super=%s", klass.classname, klass.superclass)
try:
tree_classes[klass.superclass].append(klass)
except KeyError:
tree_classes[klass.superclass] = [klass]
logging.debug("GetClassesTree tree_classes %d elements", len(tree_classes))
return tree_classes
###################################################
def MakeInstrumentedRecu(in_tree_class, out_tree_class, topclass_nam, the_nam_spac, instr_cla):
"""Fills with instrumented classes, i.e. classes with a provider."""
try:
if topclass_nam in instr_cla:
# print("top "+topclassNam+" instrumented<br>")
out_tree_class[topclass_nam] = []
for cl in in_tree_class[topclass_nam]:
clnam = cl.classname
MakeInstrumentedRecu(in_tree_class, out_tree_class, clnam, the_nam_spac, instr_cla)
if clnam in instr_cla or clnam in out_tree_class:
# This does not work. WHY ?
# outTreeClass.get( klass.superclass, [] ).append( clnam )
try:
out_tree_class[topclass_nam].append(cl)
except KeyError:
out_tree_class[topclass_nam] = [cl]
except KeyError:
# No subclass.
pass
def GetClassesTreeInstrumented(conn, the_nam_space):
"""This builds a dictionary indexes by class names, and the values are lists of classes objects,
which are the subclasses of the key class. The root class name is None."""
logging.debug("GetClassesTreeInstrumented theNamSpace=%s", the_nam_space)
try:
in_tree_class = GetClassesTree(conn, the_nam_space)
out_tree_class = dict()
instr_cla = GetCapabilitiesForInstrumentation(conn, the_nam_space)
MakeInstrumentedRecu(in_tree_class, out_tree_class, None, the_nam_space, instr_cla)
except Exception as exc:
lib_common.ErrorMessageHtml("Instrumented classes: ns=" + the_nam_space + " Caught:" + str(exc))
logging.debug("After MakeInstrumentedRecu out_tree_class = %d elements", len(out_tree_class))
# print("out_tree_class="+str(out_tree_class)+"<br>")
return out_tree_class
def ValidClassWbem(class_name):
"""Tells if this class for our ontology is in a given WBEM server, whatever the namespace is."""
tp_split = class_name.split("_")
tp_prefix = tp_split[0]
logging.debug("lib_wbem.ValidClassWbem className=%s tp_prefix=%s", class_name, tp_prefix)
# "PG" is Open Pegasus: http://www.opengroup.org/subjectareas/management/openpegasus
# "LMI" is OpenLmi: http://www.openlmi.org/
return tp_prefix in ["CIM", "PG", "LMI"]
# This must return the label of an url "entity_wmi.py".
# For example, the name of a process when the PID (Handle) is given.
# Due to performance problems, consider using a cache.
# Or a default value for some "expensive" classes.
def EntityToLabelWbem(namSpac, entity_type_NoNS, entity_id, entity_host):
return None
def WbemLocalConnection():
"""By default, current machine. However, WBEM does not give the possibility
to connect to the local server with the host set to None."""
machine_name = socket.gethostname()
logging.info("WbemLocalConnection machine_name=%s" % machine_name)
cimom_url = HostnameToWbemServer(machine_name)
wbem_connection = WbemConnection(cimom_url)
return wbem_connection
def extract_specific_ontology_wbem():
"""This returns an abstract ontology, which is later transformed into RDFS.
cimomUrl="http://192.168.1.83:5988" or "http://mymachine:5988" """
wbem_connection = WbemLocalConnection()
return _extract_wbem_ontology_from_connection(wbem_connection)
def _extract_wbem_ontology_from_connection(wbem_connection):
map_classes = {}
map_attributes = {}
logging.info("_extract_wbem_ontology_from_connection: Getting class tree.")
# Note: Survol assumes this namespace everywhere.
wbem_name_space = 'root/cimv2'
class_tree = GetClassesTree(wbem_connection, the_nam_space=wbem_name_space)
for super_class_name in class_tree:
class_array = class_tree[super_class_name]
for class_object in class_array:
class_name = class_object.classname
logging.debug("class_name=%s", class_name)
if super_class_name:
top_class_name = super_class_name
concat_class_name = super_class_name + "." + class_name
else:
top_class_name = ""
concat_class_name = class_name
# map_classes[concat_class_name] = {
map_classes[class_name] = {
"base_class": top_class_name,
"class_description": "Class WBEM %s" % concat_class_name}
# TODO: Do not return all keys !!!
class_keys = WbemGetClassKeysFromConnection(wbem_name_space, class_name, wbem_connection)
for key_name in class_keys:
# The same key might exist for several classes.
try:
key_attributes = map_attributes[key_name]
except KeyError:
key_attributes = {
"predicate_type": "survol_string",
"predicate_description": "Attribute WBEM %s" % key_name,
"predicate_domain": []}
map_attributes[key_name] = key_attributes
assert isinstance(concat_class_name, str)
key_attributes["predicate_domain"].append(concat_class_name)
return map_classes, map_attributes
def WbemKeyValues(key_value_items, display_none_values=False):
"""This is conceptually similar to WmiKeyValues"""
dict_key_values = {}
for wbem_key_name, wbem_value_literal in key_value_items:
wbem_property = lib_properties.MakeProp(wbem_key_name)
if isinstance(wbem_value_literal, lib_util.scalar_data_types):
wbem_value_node = lib_util.NodeLiteral(wbem_value_literal)
elif isinstance(wbem_value_literal, (tuple)):
tuple_joined = " ; ".join(wbem_value_literal)
wbem_value_node = lib_util.NodeLiteral(tuple_joined)
elif wbem_value_literal is None:
if display_none_values:
wbem_value_node = lib_util.NodeLiteral("None")
else:
wbem_value_node = lib_util.NodeLiteral("type=" + str(type(wbem_value_literal)) + ":" + str(wbem_value_literal))
#try:
# refMoniker = str(wbem_value_literal.path())
# instance_url = lib_util.EntityUrlFromMoniker(refMoniker)
# wbem_value_node = lib_common.NodeUrl(instance_url)
#except AttributeError as exc:
# wbem_value_node = lib_util.NodeLiteral(str(exc))
dict_key_values[wbem_property] = wbem_value_node
return dict_key_values
class WbemSparqlCallbackApi:
"""This is used to execute a Sparql query on WBEM objects."""
def __init__(self):
# Current host and default namespace.
self.m_wbem_connection = WbemLocalConnection()
self.m_classes = None
# Note: The class CIM_DataFile with the property Name triggers the exception message:
# "CIMError: 7: CIM_ERR_NOT_SUPPORTED: No provider or repository defined for class"
def CallbackSelect(self, grph, class_name, predicate_prefix, filtered_where_key_values):
logging.info("WbemSparqlCallbackApi.CallbackSelect class_name=%s where_key_values=%s", class_name, filtered_where_key_values)
assert class_name
# This comes from such a Sparql triple: " ?variable rdf:type rdf:type"
if class_name == "type":
return
wbem_query = lib_util.SplitMonikToWQL(filtered_where_key_values, class_name)
logging.debug("WbemSparqlCallbackApi.CallbackSelect wbem_query=%s", wbem_query)
wbem_objects = self.m_wbem_connection.ExecQuery("WQL", wbem_query, "root/cimv2")
# This returns a list of CIMInstance.
for one_wbem_object in wbem_objects:
# dir(one_wbem_object)
# ['_CIMComparisonMixin__ordering_deprecated', '__class__', '__contains__', '__delattr__', '__delitem__', '__dict__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__getitem__', '__gt__', '__hash__', '__init__', '__iter__', '__le__', '__len__', '__lt__', '__module__', '__ne__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__setitem__', '__sizeof__', '__str__', '__subclasshook__', '__weakref__', '_classname', '_cmp', '_path', '_properties', '_property_list', '_qualifiers', 'classname', 'copy', 'get', 'has_key', 'items', 'iteritems', 'iterkeys', 'itervalues', 'keys', 'path', 'properties', 'property_list', 'qualifiers', 'tocimxml', 'tocimxmlstr', 'tomof', 'update', 'update_existing', 'values']
# one_wbem_object is a CIMInstanceName
# dir(one_wbem_object.path)
# ['_CIMComparisonMixin__ordering_deprecated', '__class__', '__contains__', '__delattr__', '__delitem__', '__dict__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__getitem__', '__gt__', '__hash__', '__init__', '__iter__', '__le__', '__len__', '__lt__', '__module__', '__ne__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__setitem__', '__sizeof__', '__slotnames__', '__str__', '__subclasshook__', '__weakref__', '_classname', '_cmp', '_host', '_kbstr_to_cimval', '_keybindings', '_namespace', 'classname', 'copy', 'from_instance', 'from_wbem_uri', 'get', 'has_key', 'host', 'items', 'iteritems', 'iterkeys', 'itervalues', 'keybindings', 'keys', 'namespace', 'to_wbem_uri', 'tocimxml', 'tocimxmlstr', 'update', 'values']
object_path = one_wbem_object.path.to_wbem_uri()
# u'//vps516494.ovh.net/root/cimv2:PG_UnixProcess.CSName="vps516494.localdomain",Handle="1",OSCreationClassName="CIM_OperatingSystem",CreationClassName="PG_UnixProcess",CSCreationClassName="CIM_UnitaryComputerSystem",OSName="Fedora"'
logging.debug("object.path=%s", object_path)
dict_key_values = WbemKeyValues(one_wbem_object.iteritems())
dict_key_values[lib_kbase.PredicateIsDefinedBy] = lib_util.NodeLiteral("WBEM")
# Add it again, so the original Sparql query will work.
dict_key_values[lib_kbase.PredicateSeeAlso] = lib_util.NodeLiteral("WBEM")
dict_key_values[lib_kbase.PredicateType] = lib_properties.MakeProp(class_name)
logging.debug("dict_key_values=%s", dict_key_values)
lib_util.PathAndKeyValuePairsToRdf(grph, object_path, dict_key_values)
yield (object_path, dict_key_values)
def CallbackAssociator(
self,
grph,
result_class_name,
predicate_prefix,
associator_key_name,
subject_path):
logging.critical("THIS IS DEPRECATED")
logging.info("WbemSparqlCallbackApi.CallbackAssociator subject_path=%s result_class_name=%s associator_key_name=%s",
subject_path,
result_class_name,
associator_key_name)
assert subject_path
# https://pywbem.readthedocs.io/en/latest/client/operations.html#pywbem.WBEMConnection.Associators
instances_associators = self.m_wbem_connection.Associators(
ObjectName=subject_path,
AssocClass=associator_key_name,
ResultClass=None, # ResultClass=result_class_name,
Role=None,
ResultRole=None,
IncludeQualifiers=None,
IncludeClassOrigin=None,
PropertyList=None)
for one_instance in instances_associators:
print("Instance=", one_instance)
yield one_instance
def CallbackTypes(self, grph, see_also, where_key_values):
"""This returns the available types"""
raise NotImplementedError("CallbackTypes: Not implemented yet")
# # Data stored in a cache for later use.
# if self.m_classes == None:
# self.m_classes = self.m_wbem_connection.classes
#
# for one_class_name in self.m_classes:
# class_path = "WbemClass:" + one_class_name
#
# dict_key_values = {}
# dict_key_values[lib_kbase.PredicateIsDefinedBy] = lib_util.NodeLiteral("WBEM")
# # Add it again, so the original Sparql query will work.
# dict_key_values[lib_kbase.PredicateSeeAlso] = lib_util.NodeLiteral("WBEM")
# dict_key_values[lib_kbase.PredicateType] = lib_kbase.PredicateType
# dict_key_values[lib_util.NodeLiteral("Name")] = lib_util.NodeLiteral(one_class_name)
#
# class_node = lib_util.NodeUrl(class_path)
#
# if grph:
# grph.add((class_node, lib_kbase.PredicateType, lib_kbase.PredicateType))
#
# yield class_path, dict_key_values
def CallbackTypeTree(self, grph, see_also, class_name, associator_subject):
raise NotImplementedError("CallbackTypeTree: Not implemented yet")
| 43.394958 | 779 | 0.649432 |
ace3586e89f58110593b9f1af77558c329cdb4af | 7,227 | py | Python | tests/integration/cattletest/core/test_host.py | moul/cattle | d682921b45fce95c0886c2a8a95e7e8345d30521 | [
"Apache-2.0"
] | 1 | 2021-04-24T08:16:38.000Z | 2021-04-24T08:16:38.000Z | tests/integration/cattletest/core/test_host.py | moul/cattle | d682921b45fce95c0886c2a8a95e7e8345d30521 | [
"Apache-2.0"
] | 1 | 2022-01-21T23:49:42.000Z | 2022-01-21T23:49:42.000Z | tests/integration/cattletest/core/test_host.py | moul/cattle | d682921b45fce95c0886c2a8a95e7e8345d30521 | [
"Apache-2.0"
] | null | null | null | from common_fixtures import * # NOQA
def test_host_deactivate(super_client, new_context):
host = new_context.host
agent = super_client.reload(host).agent()
assert host.state == 'active'
agent = super_client.wait_success(agent)
assert agent.state == 'active'
host = super_client.wait_success(host.deactivate())
assert host.state == 'inactive'
agent = super_client.wait_success(agent)
assert agent.state == 'active'
def test_host_deactivate_two_hosts(super_client, new_context):
host = new_context.host
agent = super_client.reload(host).agent()
assert host.state == 'active'
agent = super_client.wait_success(agent)
assert agent.state == 'active'
# Create another host using the same agent
other_host = super_client.create_host(agentId=agent.id)
other_host = super_client.wait_success(other_host)
assert other_host.state == 'active'
assert other_host.agentId == agent.id
host = super_client.wait_success(host.deactivate())
assert host.state == 'inactive'
agent = super_client.wait_success(agent)
assert agent.state == 'active'
def test_host_activate(super_client, new_context):
host = new_context.host
agent = super_client.reload(host).agent()
assert host.state == 'active'
agent = super_client.wait_success(agent)
assert agent.state == 'active'
host = super_client.wait_success(host.deactivate())
assert host.state == 'inactive'
agent = super_client.wait_success(agent)
assert agent.state == 'active'
host = super_client.wait_success(host.activate())
assert host.state == 'active'
agent = super_client.wait_success(agent)
assert agent.state == 'active'
def test_host_purge(super_client, new_context):
account_id = new_context.project.id
image_uuid = 'sim:{}'.format(random_num())
host = new_context.host
phy_host = super_client.reload(host).physicalHost()
agent = super_client.reload(host).agent()
assert host.state == 'active'
agent = super_client.wait_success(agent)
assert agent.state == 'active'
c1 = super_client.create_container(accountId=account_id,
imageUuid=image_uuid,
requestedHostId=host.id)
c1 = super_client.wait_success(c1)
assert c1.state == 'running'
c2 = super_client.create_container(accountId=account_id,
imageUuid=image_uuid,
requestedHostId=host.id)
c2 = super_client.wait_success(c2)
assert c2.state == 'running'
host = super_client.wait_success(host.deactivate())
host = super_client.wait_success(super_client.delete(host))
assert host.state == 'removed'
assert host.removed is not None
agent = super_client.wait_success(host.agent())
assert agent.state == 'removed'
host = super_client.wait_success(host.purge())
assert host.state == 'purged'
phy_host = super_client.wait_success(phy_host)
assert phy_host.state == 'removed'
c1 = super_client.wait_success(c1)
assert c1.removed is not None
assert c1.state == 'removed'
c2 = super_client.wait_success(c2)
assert c2.removed is not None
assert c2.state == 'removed'
c1 = super_client.wait_success(c1.purge())
assert c1.state == 'purged'
volume = super_client.wait_success(c1.volumes()[0])
assert volume.state == 'removed'
volume = super_client.wait_success(volume.purge())
assert volume.state == 'purged'
def test_host_container_actions_inactive(new_context):
host = new_context.host
client = new_context.client
c = new_context.create_container()
host = client.wait_success(host.deactivate())
assert host.state == 'inactive'
c = client.wait_success(c.stop())
assert c.state == 'stopped'
c = client.wait_success(c.start())
assert c.state == 'running'
def test_host_create_container_inactive(new_context):
client = new_context.client
host = new_context.host
host = client.wait_success(host.deactivate())
assert host.state == 'inactive'
c = new_context.create_container_no_success()
assert c.transitioning == 'error'
def test_host_create_container_requested_inactive(new_context):
client = new_context.client
host = new_context.host
host = client.wait_success(host.deactivate())
assert host.state == 'inactive'
c = new_context.create_container_no_success(requestedHostId=host.id)
assert c.transitioning == 'error'
def test_host_agent_state(super_client, new_context):
agent = super_client.reload(new_context.host).agent()
agent = super_client.wait_success(agent)
assert agent.state == 'active'
agent = super_client.wait_success(agent.deactivate())
host = new_context.client.reload(new_context.host)
assert host.state == 'active'
assert agent.state == 'inactive'
assert agent.state == host.agentState
agent = super_client.wait_success(agent.activate())
host = new_context.client.reload(new_context.host)
assert host.state == 'active'
assert agent.state == 'active'
assert agent.state == host.agentState
def test_host_remove(super_client, new_context):
client = new_context.client
container = new_context.create_container()
host = super_client.reload(new_context.host)
pool = find_one(host.storagePools)
agent = host.agent()
agent_account = agent.account()
phy_host = host.physicalHost()
key = find_one(super_client.list_register, key=agent.data.registrationKey)
instances = host.instances()
assert len(instances) == 2
assert container.state == 'running'
assert host.state == 'active'
assert pool.state == 'active'
assert agent.state == 'active'
assert agent_account.state == 'active'
assert phy_host.state == 'active'
assert key.state == 'active'
assert key.secretKey is not None
host = client.wait_success(host.deactivate())
assert host.state == 'inactive'
host = client.wait_success(client.delete(host))
assert host.state == 'removed'
agent = super_client.wait_success(agent)
assert agent.state == 'removed'
pool = super_client.wait_success(pool)
assert pool.state == 'removed'
phy_host = super_client.wait_success(phy_host)
assert phy_host.state == 'removed'
key = super_client.wait_success(key)
assert key.state == 'removed'
agent_account = super_client.wait_success(agent_account)
assert agent_account.state == 'removed'
container = super_client.wait_success(container)
assert container.state == 'removed'
for c in instances:
c = super_client.wait_success(c)
assert c.state == 'removed'
def test_host_dockersocket(context, client):
host = client.reload(context.host)
dockersocket = host.dockersocket()
assert dockersocket.token.index('.') > 0
assert '/v1/dockersocket/' in dockersocket.url
def test_host_dockersocket_inactive(context, client):
host = client.wait_success(context.host.deactivate())
dockersocket = host.dockersocket()
assert dockersocket.token.index('.') > 0
assert '/v1/dockersocket/' in dockersocket.url
| 31.017167 | 78 | 0.695863 |
ace358e6bfafd31a18250bfa8091622940fc75d5 | 4,126 | py | Python | google/ads/googleads/v7/services/services/ad_group_asset_service/transports/base.py | tridge-hq/google-ads-python | 7c61e0705063324b973a6bdf1d83193c032a9012 | [
"Apache-2.0"
] | null | null | null | google/ads/googleads/v7/services/services/ad_group_asset_service/transports/base.py | tridge-hq/google-ads-python | 7c61e0705063324b973a6bdf1d83193c032a9012 | [
"Apache-2.0"
] | null | null | null | google/ads/googleads/v7/services/services/ad_group_asset_service/transports/base.py | tridge-hq/google-ads-python | 7c61e0705063324b973a6bdf1d83193c032a9012 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
import typing
import pkg_resources
from google import auth
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.ads.googleads.v7.resources.types import ad_group_asset
from google.ads.googleads.v7.services.types import ad_group_asset_service
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-ads-googleads",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class AdGroupAssetServiceTransport(metaclass=abc.ABCMeta):
"""Abstract transport class for AdGroupAssetService."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/adwords",)
def __init__(
self,
*,
host: str = "googleads.googleapis.com",
credentials: credentials.Credentials = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials is None:
credentials, _ = auth.default(scopes=self.AUTH_SCOPES)
# Save the credentials.
self._credentials = credentials
# Lifted into its own function so it can be stubbed out during tests.
self._prep_wrapped_messages(client_info)
def _prep_wrapped_messages(self, client_info):
# Precomputed wrapped methods
self._wrapped_methods = {
self.get_ad_group_asset: gapic_v1.method.wrap_method(
self.get_ad_group_asset,
default_timeout=None,
client_info=client_info,
),
self.mutate_ad_group_assets: gapic_v1.method.wrap_method(
self.mutate_ad_group_assets,
default_timeout=None,
client_info=client_info,
),
}
@property
def get_ad_group_asset(
self,
) -> typing.Callable[
[ad_group_asset_service.GetAdGroupAssetRequest],
ad_group_asset.AdGroupAsset,
]:
raise NotImplementedError
@property
def mutate_ad_group_assets(
self,
) -> typing.Callable[
[ad_group_asset_service.MutateAdGroupAssetsRequest],
ad_group_asset_service.MutateAdGroupAssetsResponse,
]:
raise NotImplementedError
__all__ = ("AdGroupAssetServiceTransport",)
| 35.264957 | 78 | 0.667959 |
ace35b2045a143fd22d66813c175a446b3dd518e | 1,369 | py | Python | Image Filtering/Embossed_Laplacian.py | Siddharth2016/PyCV | a9fd93d98b1e39edc5c715b60e6b11ccf094f92c | [
"MIT"
] | 2 | 2019-08-28T21:26:56.000Z | 2019-11-29T13:59:42.000Z | Image Filtering/Embossed_Laplacian.py | Siddharth2016/PyCV | a9fd93d98b1e39edc5c715b60e6b11ccf094f92c | [
"MIT"
] | null | null | null | Image Filtering/Embossed_Laplacian.py | Siddharth2016/PyCV | a9fd93d98b1e39edc5c715b60e6b11ccf094f92c | [
"MIT"
] | 1 | 2018-07-18T17:02:16.000Z | 2018-07-18T17:02:16.000Z | """
Email: siddharthchandragzb@gmail.com
"""
import cv2
import numpy as np
from scipy import ndimage
import imutils
class Filters:
def __init__(self, imageName):
self.image = cv2.imread(imageName, cv2.IMREAD_COLOR)
self.image = imutils.resize(self.image, 300, 600)
self.grayimage = cv2.cvtColor(self.image, cv2.COLOR_BGR2GRAY)
self.kernelemboss = np.array([[-2, -1, 0],
[-1, 1, 1],
[0, 1, 2]])
def Embossed(self):
emboss = cv2.filter2D(self.image, -1, self.kernelemboss)
return emboss
def Laplace(self):
laplace = cv2.Laplacian(self.grayimage, cv2.CV_8U, self.grayimage,
ksize = 7)
inverseAlpha = (1.0/255)*(255 - laplace)
channels = cv2.split(self.image)
for channel in channels:
channel[:] = channel*inverseAlpha
laplace = cv2.merge(channels)
return laplace
def MedianBlur(self):
blur = cv2.medianBlur(self.image, ksize = 5)
return blur
if __name__ == "__main__":
I = Filters("3.jpg")
emb = I.Embossed()
med = I.MedianBlur()
lap = I.Laplace()
cv2.imshow("Embossed", emb)
cv2.imshow("Laplacian", lap)
cv2.imshow("Median Blur", med)
cv2.waitKey(0)
cv2.destroyAllWindows()
| 27.38 | 74 | 0.573411 |
ace35da7ff32cb6bfe86db81127c0bdaf54569a5 | 5,327 | py | Python | aimpoint_mon/update_aimpoint_data.py | sot/aimpoint_mon | af211c6d1c1858847beec58e2a170168e61ed17f | [
"BSD-2-Clause"
] | 1 | 2020-05-18T05:07:07.000Z | 2020-05-18T05:07:07.000Z | aimpoint_mon/update_aimpoint_data.py | sot/aimpoint_mon | af211c6d1c1858847beec58e2a170168e61ed17f | [
"BSD-2-Clause"
] | 14 | 2015-10-29T15:17:52.000Z | 2021-02-04T11:42:13.000Z | aimpoint_mon/update_aimpoint_data.py | sot/aimpoint_mon | af211c6d1c1858847beec58e2a170168e61ed17f | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python
import os
import re
import argparse
import tables
from pathlib import Path
import pickle
import numpy as np
from Chandra.Time import DateTime
from astropy.table import Table, Column, vstack
from astropy.time import Time
from mica.archive import asp_l1
from Ska.DBI import DBI
from mica.common import MICA_ARCHIVE
import pyyaks.logger
# Set up logging
loglevel = pyyaks.logger.INFO
logger = pyyaks.logger.get_logger(name='update_aimpoint_data', level=loglevel,
format="%(asctime)s %(message)s")
def get_opt():
parser = argparse.ArgumentParser(description='Get aimpoint drift data '
'from aspect solution files')
parser.add_argument("--data-root",
default=".",
help="Root directory for asol and index files")
parser.add_argument("--start",
help="Start time for processing (default=stop - 30 days)")
parser.add_argument("--stop",
help="Processing stop date (default=NOW)")
parser.add_argument("--dt",
default=1.0,
help="Sample delta time (ksec, default=1.0)")
return parser.parse_args()
def get_asol(obsid, asol_files, dt):
logger.info('Reading...\n{}'.format('\n'.join(asol_files)))
asols = [Table.read(asol_file) for asol_file in asol_files]
# Check to see if the asol files have raw columns ( >= DS 10.8.3)
has_raws = ['ady' in asol.colnames for asol in asols]
if np.any(has_raws) and not np.all(has_raws):
raise ValueError("Some asol files have raw cols and some do not")
# Reduce to just the columns needed by the tool
if np.any(has_raws):
cols = ('time', 'ady', 'adz', 'adtheta')
else:
cols = ('time', 'dy', 'dz', 'dtheta')
asols = [asol[cols] for asol in asols]
asol = vstack(asols, metadata_conflicts='silent')
# And rename any raw columns to use the old names
if np.any(has_raws):
asol.rename_column('ady', 'dy')
asol.rename_column('adz', 'dz')
asol.rename_column('adtheta', 'dtheta')
t0, t1 = asol['time'][[10, -10]]
n_times = 2 + int((t1 - t0) // (dt * 1000))
times = np.linspace(t0, t1, n_times)
idx = np.searchsorted(asol['time'], times)
asol = asol[idx]
asol = Table([col.astype(col.dtype.str[1:]) for col in asol.columns.values()])
asol.add_column(Column([obsid] * len(asol), name='obsid'), index=0)
return asol
def add_asol_to_h5(filename, asol):
asol = asol.as_array()
with tables.open_file(filename, mode='a',
filters=tables.Filters(complevel=5, complib='zlib')) as h5:
try:
logger.info('Appending {} records to {}'.format(len(asol), filename))
h5.root.data.append(asol)
except tables.NoSuchNodeError:
logger.info('Creating {}'.format(filename))
h5.createTable(h5.root, 'data', asol, "Aimpoint drift", expectedrows=1e6)
h5.root.data.flush()
def main():
# Get options
opt = get_opt()
stop = DateTime(opt.stop)
start = stop - 10 if (opt.start is None) else DateTime(opt.start)
logger.info('Processsing from {} to {}'.format(start.date, stop.date))
# Define file names
h5_file = os.path.join(opt.data_root, 'aimpoint_asol_values.h5')
# Get obsids in date range
mica_obspar_db = os.path.join(MICA_ARCHIVE, 'obspar', 'archfiles.db3')
with DBI(dbi='sqlite', server=mica_obspar_db) as db:
obs = db.fetchall('select obsid, tstart from archfiles where tstart > {}'
' and tstart < {}'
.format(start.secs, stop.secs))
# Get unique obsids and then sort by tstart
idx = np.unique(obs['obsid'], return_index=True)[1]
obs = Table(obs[idx])
obs.sort('tstart')
obs['datestart'] = Time(obs['tstart'], format='cxcsec').yday
obs.pprint(max_lines=-1)
# Dict of obsid => list of ASPSOL files. This keeps track of obsids that have
# been processed.
obsid_file = Path(opt.data_root) / 'aimpoint_obsid_index.pkl'
if obsid_file.exists():
obsid_index = pickle.load(open(obsid_file, 'rb'))
else:
obsid_index = {}
# Go through obsids and either process or skip
for obsid in obs['obsid']:
if str(obsid) in obsid_index:
logger.info('Skipping obsid {} - already in archive'.format(obsid))
continue
logger.info('Processing obsid {}'.format(obsid))
asol_files = sorted(asp_l1.get_files(obsid=obsid, content='ASPSOL'))
if not asol_files:
logger.info('Skipping obsid {} - no asol files'.format(obsid))
continue
asol = get_asol(obsid, asol_files, opt.dt)
add_asol_to_h5(h5_file, asol)
obsid_index[str(obsid)] = asol_files
pickle.dump(obsid_index, open(obsid_file, 'wb'))
logger.info('File {} updated'.format(h5_file))
logger.info('File {} updated'.format(obsid_file))
# Write out to FITS
fits_file = re.sub(r'\.h5$', '.fits', h5_file)
dat = Table.read(h5_file, path='data')
dat.meta.clear()
dat.write(fits_file, overwrite=True)
logger.info('File {} updated'.format(fits_file))
if __name__ == '__main__':
main()
| 34.590909 | 85 | 0.620237 |
ace35de01266b2cdf1a9b4d8c91075358bf4e02e | 49,053 | py | Python | examples/copr/copr_cli/main.py | hroncok/argparse-manpage | 42460d51d2c00974f8ed112e2da6e11fdb3efc8f | [
"Apache-2.0"
] | 20 | 2018-09-13T02:05:50.000Z | 2021-11-08T11:42:45.000Z | examples/copr/copr_cli/main.py | hroncok/argparse-manpage | 42460d51d2c00974f8ed112e2da6e11fdb3efc8f | [
"Apache-2.0"
] | 31 | 2018-01-22T14:52:24.000Z | 2022-03-21T16:07:58.000Z | examples/copr/copr_cli/main.py | hroncok/argparse-manpage | 42460d51d2c00974f8ed112e2da6e11fdb3efc8f | [
"Apache-2.0"
] | 15 | 2018-03-08T19:54:53.000Z | 2022-03-08T10:06:51.000Z | #!/usr/bin/python3
# -*- coding: UTF-8 -*-
import os
import re
import subprocess
import argparse
import sys
import datetime
import time
import six
import simplejson
from collections import defaultdict
import logging
if six.PY2:
from urlparse import urlparse
else:
from urllib.parse import urlparse
if sys.version_info < (2, 7):
class NullHandler(logging.Handler):
def emit(self, record):
pass
else:
from logging import NullHandler
log = logging.getLogger(__name__)
log.addHandler(NullHandler())
from copr import CoprClient
from copr.client.responses import CoprResponse
import copr.exceptions as copr_exceptions
from .util import ProgressBar
from .build_config import MockProfile
import pkg_resources
ON_OFF_MAP = {
'on': True,
'off': False,
None: None,
}
no_config_warning = """
================= WARNING: =======================
File '{0}' is missing or incorrect.
See documentation: man copr-cli.
Any operation requiring credentials will fail!
==================================================
"""
class Commands(object):
def __init__(self, config):
self.config = config
try:
self.client = CoprClient.create_from_file_config(config)
except (copr_exceptions.CoprNoConfException,
copr_exceptions.CoprConfigException):
sys.stderr.write(no_config_warning.format(config or "~/.config/copr"))
self.client = CoprClient(
copr_url=u"http://copr.fedoraproject.org",
no_config=True
)
def requires_api_auth(func):
""" Decorator that checks config presence
"""
def wrapper(self, args):
if self.client.no_config:
sys.stderr.write("Error: Operation requires api authentication\n")
sys.exit(6)
return func(self, args)
wrapper.__doc__ = func.__doc__
wrapper.__name__ = func.__name__
return wrapper
def check_username_presence(func):
""" Decorator that checks if username was provided
"""
def wrapper(self, args):
if self.client.no_config and args.username is None:
sys.stderr.write(
"Error: Operation requires username\n"
"Pass username to command or create `~/.config/copr`\n")
sys.exit(6)
if args.username is None and self.client.username is None:
sys.stderr.write(
"Error: Operation requires username\n"
"Pass username to command or add it to `~/.config/copr`\n")
sys.exit(6)
return func(self, args)
wrapper.__doc__ = func.__doc__
wrapper.__name__ = func.__name__
return wrapper
def _watch_builds(self, build_ids):
"""
:param build_ids: list of build IDs
"""
print("Watching build(s): (this may be safely interrupted)")
prevstatus = defaultdict(lambda: None)
failed_ids = []
watched = set(build_ids)
done = set()
try:
while watched != done:
for build_id in watched:
if build_id in done:
continue
build_details = self.client.get_build_details(build_id)
if build_details.output != "ok":
errmsg = " Build {1}: Unable to get build status: {0}". \
format(build_details.error, build_id)
raise copr_exceptions.CoprRequestException(errmsg)
now = datetime.datetime.now()
if prevstatus[build_id] != build_details.status:
prevstatus[build_id] = build_details.status
print(" {0} Build {2}: {1}".format(
now.strftime("%H:%M:%S"),
build_details.status, build_id))
if build_details.status in ["failed"]:
failed_ids.append(build_id)
if build_details.status in ["succeeded", "skipped",
"failed", "canceled"]:
done.add(build_id)
if build_details.status == "unknown":
raise copr_exceptions.CoprBuildException(
"Unknown status.")
if watched == done:
break
time.sleep(30)
if failed_ids:
raise copr_exceptions.CoprBuildException(
"Build(s) {0} failed.".format(
", ".join(str(x) for x in failed_ids)))
except KeyboardInterrupt:
pass
def action_whoami(self, args):
"""
Simply print out the current user as defined in copr config.
"""
print(self.client.username)
@requires_api_auth
def action_build(self, args):
""" Method called when the 'build' action has been selected by the
user.
:param args: argparse arguments provided by the user
"""
self.client.authentication_check()
bar = None
progress_callback = None
builds = []
for pkg in args.pkgs:
if os.path.exists(pkg):
bar = ProgressBar(max=os.path.getsize(pkg))
# pylint: disable=function-redefined
def progress_callback(monitor):
bar.next(n=8192)
print('Uploading package {0}'.format(pkg))
data = {
"pkgs": [pkg],
"progress_callback": progress_callback,
}
builds.append(self.process_build(args, self.client.create_new_build, data, bar=bar))
return builds
@requires_api_auth
def action_build_pypi(self, args):
"""
Method called when the 'buildpypi' action has been selected by the user.
:param args: argparse arguments provided by the user
"""
username, copr = parse_name(args.copr)
data = {
"pypi_package_name": args.packagename,
"pypi_package_version": args.packageversion,
"python_versions": args.pythonversions,
}
return self.process_build(args, self.client.create_new_build_pypi, data)
@requires_api_auth
def action_build_tito(self, args):
"""
Method called when the 'buildtito' action has been selected by the user.
:param args: argparse arguments provided by the user
"""
data = {
"git_url": args.git_url,
"git_dir": args.git_dir,
"git_branch": args.git_branch,
"tito_test": args.tito_test,
}
return self.process_build(args, self.client.create_new_build_tito, data)
@requires_api_auth
def action_build_mock(self, args):
"""
Method called when the 'build-mock' action has been selected by the user.
:param args: argparse arguments provided by the user
"""
data = {
"scm_type": args.scm_type,
"scm_url": args.scm_url,
"scm_branch": args.scm_branch,
"spec": args.spec,
}
return self.process_build(args, self.client.create_new_build_mock, data)
@requires_api_auth
def action_build_rubygems(self, args):
"""
Method called when the 'buildgem' action has been selected by the user.
:param args: argparse arguments provided by the user
"""
data = {"gem_name": args.gem_name}
return self.process_build(args, self.client.create_new_build_rubygems, data)
@requires_api_auth
def action_build_distgit(self, args):
"""
Method called when the 'buildfedpkg' action has been selected by the user.
:param args: argparse arguments provided by the user
"""
data = {"clone_url": args.clone_url, "branch": args.branch}
return self.process_build(args, self.client.create_new_build_distgit, data)
def process_build(self, args, build_function, data, bar=None):
username, copr = parse_name(args.copr)
result = build_function(username=username, projectname=copr, chroots=args.chroots, memory=args.memory,
timeout=args.timeout, background=args.background, **data)
if bar:
bar.finish()
if result.output != "ok":
sys.stderr.write(result.error + "\n")
return
print(result.message)
build_ids = [bw.build_id for bw in result.builds_list]
print("Created builds: {0}".format(" ".join(map(str, build_ids))))
if not args.nowait:
self._watch_builds(build_ids)
@requires_api_auth
def action_create(self, args):
""" Method called when the 'create' action has been selected by the
user.
:param args: argparse arguments provided by the user
"""
username, copr = parse_name(args.name)
result = self.client.create_project(
username=username, projectname=copr, description=args.description,
instructions=args.instructions, chroots=args.chroots,
repos=args.repos, initial_pkgs=args.initial_pkgs,
disable_createrepo=args.disable_createrepo,
unlisted_on_hp=ON_OFF_MAP[args.unlisted_on_hp],
enable_net=ON_OFF_MAP[args.enable_net],
persistent=args.persistent,
auto_prune=ON_OFF_MAP[args.auto_prune]
)
print(result.message)
@requires_api_auth
def action_modify_project(self, args):
""" Method called when the 'modify' action has been selected by the
user.
:param args: argparse arguments provided by the user
"""
username, copr = parse_name(args.name)
result = self.client.modify_project(
username=username, projectname=copr,
description=args.description, instructions=args.instructions,
repos=args.repos, disable_createrepo=args.disable_createrepo,
unlisted_on_hp=ON_OFF_MAP[args.unlisted_on_hp],
enable_net=ON_OFF_MAP[args.enable_net],
auto_prune=ON_OFF_MAP[args.auto_prune],
chroots=args.chroots,
)
@requires_api_auth
def action_delete(self, args):
""" Method called when the 'delete' action has been selected by the
user.
:param args: argparse arguments provided by the user
"""
username, copr = parse_name(args.copr)
result = self.client.delete_project(username=username, projectname=copr)
print(result.message)
@requires_api_auth
def action_fork(self, args):
""" Method called when the 'fork' action has been selected by the
user.
:param args: argparse arguments provided by the user
"""
username, copr = parse_name(args.dst)
result = self.client.fork_project(source=args.src, username=username, projectname=copr, confirm=args.confirm)
print(result.message)
def action_mock_config(self, args):
""" Method called when the 'list' action has been selected by the
user.
:param args: argparse arguments provided by the user
"""
username = self.client.username
project = args.project.split("/")
if len(project) != 2:
args.project = username + "/" + args.project
result = self.client.get_build_config(args.project, args.chroot)
if result.output != "ok":
sys.stderr.write(result.error + "\n")
sys.stderr.write("Un-expected data returned, please report this issue\n")
print(MockProfile(result.build_config))
@check_username_presence
def action_list(self, args):
""" Method called when the 'list' action has been selected by the
user.
:param args: argparse arguments provided by the user
"""
username = args.username or self.client.username
result = self.client.get_projects_list(username)
# import ipdb; ipdb.set_trace()
if result.output != "ok":
sys.stderr.write(result.error + "\n")
sys.stderr.write("Un-expected data returned, please report this issue\n")
elif not result.projects_list:
sys.stderr.write("No copr retrieved for user: '{0}'\n".format(username))
return
for prj in result.projects_list:
print(prj)
def action_status(self, args):
result = self.client.get_build_details(args.build_id)
print(result.status)
def action_download_build(self, args):
result = self.client.get_build_details(args.build_id)
base_len = len(os.path.split(result.results))
for chroot, url in result.results_by_chroot.items():
if args.chroots and chroot not in args.chroots:
continue
cmd = "wget -r -nH --no-parent --reject 'index.html*'".split(' ')
cmd.extend(['-P', os.path.join(args.dest, chroot)])
cmd.extend(['--cut-dirs', str(base_len + 4)])
cmd.append(url)
subprocess.call(cmd)
@requires_api_auth
def action_cancel(self, args):
""" Method called when the 'cancel' action has been selected by the
user.
:param args: argparse arguments provided by the user
"""
result = self.client.cancel_build(args.build_id)
print(result.status)
def action_watch_build(self, args):
self._watch_builds(args.build_id)
def action_delete_build(self, args):
result = self.client.delete_build(args.build_id)
print(result.status)
#########################################################
### Chroot actions ###
#########################################################
@requires_api_auth
def action_edit_chroot(self, args):
""" Method called when the 'edit-chroot' action has been selected by the
user.
:param args: argparse arguments provided by the user
"""
owner, copr, chroot = parse_chroot_path(args.coprchroot)
result = self.client.edit_chroot(
ownername=owner, projectname=copr, chrootname=chroot,
upload_comps=args.upload_comps, delete_comps=args.delete_comps,
packages=args.packages, repos=args.repos
)
print(result.message)
def action_get_chroot(self, args):
""" Method called when the 'get-chroot' action has been selected by the
user.
:param args: argparse arguments provided by the user
"""
owner, copr, chroot = parse_chroot_path(args.coprchroot)
result = self.client.get_chroot(
ownername=owner, projectname=copr, chrootname=chroot
)
print(simplejson.dumps(result.chroot, indent=4, sort_keys=True, for_json=True))
#########################################################
### Package actions ###
#########################################################
@requires_api_auth
def action_add_or_edit_package_tito(self, args):
ownername, projectname = parse_name(args.copr)
data = {
"package_name": args.name,
"git_url": args.git_url,
"git_dir": args.git_dir,
"git_branch": args.git_branch,
"tito_test": ON_OFF_MAP[args.tito_test],
"webhook_rebuild": ON_OFF_MAP[args.webhook_rebuild],
}
if args.create:
result = self.client.add_package_tito(ownername=ownername, projectname=projectname, **data)
else:
result = self.client.edit_package_tito(ownername=ownername, projectname=projectname, **data)
print(result.message)
@requires_api_auth
def action_add_or_edit_package_pypi(self, args):
ownername, projectname = parse_name(args.copr)
data = {
"package_name": args.name,
"pypi_package_name": args.packagename,
"pypi_package_version": args.packageversion,
"python_versions": args.pythonversions,
"webhook_rebuild": ON_OFF_MAP[args.webhook_rebuild],
}
if args.create:
result = self.client.add_package_pypi(ownername=ownername, projectname=projectname, **data)
else:
result = self.client.edit_package_pypi(ownername=ownername, projectname=projectname, **data)
print(result.message)
@requires_api_auth
def action_add_or_edit_package_mockscm(self, args):
ownername, projectname = parse_name(args.copr)
data = {
"package_name": args.name,
"scm_type": args.scm_type,
"scm_url": args.scm_url,
"scm_branch": args.scm_branch,
"spec": args.spec,
"webhook_rebuild": ON_OFF_MAP[args.webhook_rebuild],
}
if args.create:
result = self.client.add_package_mockscm(ownername=ownername, projectname=projectname, **data)
else:
result = self.client.edit_package_mockscm(ownername=ownername, projectname=projectname, **data)
print(result.message)
@requires_api_auth
def action_add_or_edit_package_rubygems(self, args):
ownername, projectname = parse_name(args.copr)
data = {
"package_name": args.name,
"gem_name": args.gem_name,
"webhook_rebuild": ON_OFF_MAP[args.webhook_rebuild],
}
if args.create:
result = self.client.add_package_rubygems(ownername=ownername, projectname=projectname, **data)
else:
result = self.client.edit_package_rubygems(ownername=ownername, projectname=projectname, **data)
print(result.message)
def action_list_packages(self, args):
ownername, projectname = parse_name(args.copr)
data = {
"with_latest_build": args.with_latest_build,
"with_latest_succeeded_build": args.with_latest_succeeded_build,
"with_all_builds": args.with_all_builds,
}
result = self.client.get_packages_list(ownername=ownername, projectname=projectname, **data)
print(simplejson.dumps(result.packages_list, indent=4, sort_keys=True, for_json=True))
def action_list_package_names(self, args):
ownername, projectname = parse_name(args.copr)
result = self.client.get_packages_list(ownername=ownername, projectname=projectname)
for package in result.packages_list:
print(package.name)
def action_get_package(self, args):
ownername, projectname = parse_name(args.copr)
data = {
"pkg_name": args.name,
"with_latest_build": args.with_latest_build,
"with_latest_succeeded_build": args.with_latest_succeeded_build,
"with_all_builds": args.with_all_builds,
}
result = self.client.get_package(ownername=ownername, projectname=projectname, **data)
print(simplejson.dumps(result.package, indent=4, sort_keys=True, for_json=True))
def action_delete_package(self, args):
ownername, projectname = parse_name(args.copr)
data = { "pkg_name": args.name }
result = self.client.delete_package(ownername=ownername, projectname=projectname, **data)
print(result.message)
def action_reset_package(self, args):
ownername, projectname = parse_name(args.copr)
data = { "pkg_name": args.name }
result = self.client.reset_package(ownername=ownername, projectname=projectname, **data)
print(result.message)
def action_build_package(self, args):
ownername, projectname = parse_name(args.copr)
data = {
"pkg_name": args.name,
"chroots": args.chroots,
#"memory": args.memory,
"timeout": args.timeout
}
result = self.client.build_package(ownername=ownername, projectname=projectname, **data)
if result.output != "ok":
sys.stderr.write(result.error + "\n")
return
print(result.message)
build_ids = [bw.build_id for bw in result.builds_list]
print("Created builds: {0}".format(" ".join(map(str, build_ids))))
if not args.nowait:
self._watch_builds(build_ids)
def action_build_module(self, args):
"""
Build module via Copr MBS
"""
ownername, projectname = parse_name(args.copr or "")
modulemd = open(args.yaml, "rb") if args.yaml else args.url
response = self.client.build_module(modulemd, ownername, projectname)
print(response.message if response.output == "ok" else response.error)
def version():
if 'BUILD_MANPAGES_RUNNING' in os.environ:
# pkg_resources magic doesn't work during 'setup.py build', but it
# doesn't matter as the version in the manual page is read directly from
# setuptools.
return 'fake'
return pkg_resources.require('copr-cli')[0].version
def setup_parser():
"""
Set the main arguments.
"""
#########################################################
### General options ###
#########################################################
parser = argparse.ArgumentParser(prog="copr", epilog="dummy text")
# General connection options
parser.add_argument("--debug", dest="debug", action="store_true",
help="Enable debug output")
parser.add_argument("--config", dest="config",
help="Path to an alternative configuration file")
parser.add_argument("--version", action="version",
version="%(prog)s version " + version())
subparsers = parser.add_subparsers()
#########################################################
### Project options ###
#########################################################
parser_whoami = subparsers.add_parser(
"whoami",
help="Print username that the client authenticates with against copr-frontend"
)
parser_whoami.set_defaults(func="action_whoami")
# create the parser for the "list" command
parser_list = subparsers.add_parser(
"list",
help="List all the copr of the "
"provided "
)
parser_list.add_argument(
"username", metavar="username|@groupname", nargs="?",
help="The username or @groupname that you would like to "
"list the coprs of (defaults to current user)"
)
parser_list.set_defaults(func="action_list")
parser_mock_config = subparsers.add_parser(
"mock-config",
help="Get the mock profile (similar to koji mock-config)"
)
parser_mock_config.add_argument(
"project",
help="Expected format is <user>/<project>, <group>/<project> (including '@') or <project> (name of project you own)."
)
parser_mock_config.add_argument(
"chroot",
help="chroot id, e.g. 'fedora-rawhide-x86_64'"
)
parser_mock_config.set_defaults(func="action_mock_config")
# create the parser for the "create" command
parser_create = subparsers.add_parser("create", help="Create a new copr")
parser_create.add_argument("name",
help="The name of the copr to create")
parser_create.add_argument("--chroot", dest="chroots", action="append",
help="Chroot to use for this copr")
parser_create.add_argument("--repo", dest="repos", action="append",
help="Repository to add to this copr")
parser_create.add_argument("--initial-pkgs", dest="initial_pkgs",
action="append",
help="List of packages URL to build in this "
"new copr")
parser_create.add_argument("--description",
help="Description of the copr")
parser_create.add_argument("--instructions",
help="Instructions for the copr")
parser_create.add_argument("--disable_createrepo",
help="Disable metadata auto generation")
parser_create.add_argument("--enable-net", choices=["on", "off"], default="off",
help="If net should be enabled for builds in this project (default is off)")
parser_create.add_argument("--unlisted-on-hp", choices=["on", "off"],
help="The project will not be shown on COPR home page")
parser_create.add_argument("--persistent", action="store_true",
help="Project and its builds will be undeletable. This option can only be specified by a COPR admin.")
parser_create.add_argument("--auto-prune", choices=["on", "off"], default="on",
help="If auto-deletion of project's obsoleted builds should be enabled (default is on).\
This option can only be specified by a COPR admin.")
parser_create.set_defaults(func="action_create")
# create the parser for the "modify_project" command
parser_modify = subparsers.add_parser("modify", help="Modify existing copr")
parser_modify.add_argument("name", help="The name of the copr to modify")
parser_modify.add_argument("--chroot", dest="chroots", action="append",
help="Chroot to use for this copr")
parser_modify.add_argument("--description",
help="Description of the copr")
parser_modify.add_argument("--instructions",
help="Instructions for the copr")
parser_modify.add_argument("--repo", dest="repos", action="append",
help="Repository to add to this copr")
parser_modify.add_argument("--disable_createrepo",
help="Disable metadata auto generation")
parser_modify.add_argument("--enable-net", choices=["on", "off"],
help="If net should be enabled for builds in this project (default is \"don't change\")")
parser_modify.add_argument("--unlisted-on-hp", choices=["on", "off"],
help="The project will not be shown on COPR home page")
parser_modify.add_argument("--auto-prune", choices=["on", "off"],
help="If auto-deletion of project's obsoleted builds should be enabled.\
This option can only be specified by a COPR admin.")
parser_modify.set_defaults(func="action_modify_project")
# create the parser for the "delete" command
parser_delete = subparsers.add_parser("delete", help="Deletes the entire project")
parser_delete.add_argument("copr", help="Name of your project to be deleted.")
parser_delete.set_defaults(func="action_delete")
# create the parser for the "fork" command
parser_delete = subparsers.add_parser("fork", help="Fork the project and builds in it")
parser_delete.add_argument("src", help="Which project should be forked")
parser_delete.add_argument("dst", help="Name of the new project")
parser_delete.add_argument("--confirm", action="store_true", help="Confirm forking into existing project")
parser_delete.set_defaults(func="action_fork")
#########################################################
### Source-type related options ###
#########################################################
parser_tito_args_parent = argparse.ArgumentParser(add_help=False)
parser_tito_args_parent.add_argument("--git-url", metavar="URL", dest="git_url", required=True,
help="URL to a project managed by Tito")
parser_tito_args_parent.add_argument("--git-dir", metavar="DIRECTORY", dest="git_dir",
help="Relative path from Git root to directory containing .spec file")
parser_tito_args_parent.add_argument("--git-branch", metavar="BRANCH", dest="git_branch",
help="Git branch that you want to build from")
parser_tito_args_parent.add_argument("--test", dest="tito_test", choices=["on", "off"],
help="Build the last commit instead of the last release tag")
parser_pypi_args_parent = argparse.ArgumentParser(add_help=False)
parser_pypi_args_parent.add_argument("--pythonversions", nargs="*", type=int, metavar="VERSION", default=[3, 2],
help="For what Python versions to build (by default: 3 2)")
parser_pypi_args_parent.add_argument("--packageversion", metavar = "PYPIVERSION",
help="Version of the PyPI package to be built (by default latest)")
parser_pypi_args_parent.add_argument("--packagename", required=True, metavar="PYPINAME",
help="Name of the PyPI package to be built, required.")
parser_mockscm_args_parent = argparse.ArgumentParser(add_help=False)
parser_mockscm_args_parent.add_argument("--scm-type", metavar="TYPE", dest="scm_type", choices=["git", "svn"], default="git",
help="specify versioning tool, default is 'git'")
parser_mockscm_args_parent.add_argument("--scm-url", metavar="URL", dest="scm_url",
help="url to a project versioned by Git or SVN, required")
parser_mockscm_args_parent.add_argument("--scm-branch", metavar="BRANCH", dest="scm_branch", help="")
parser_mockscm_args_parent.add_argument("--spec", dest="spec", metavar="FILE",
help="relative path from SCM root to .spec file, required")
parser_rubygems_args_parent = argparse.ArgumentParser(add_help=False)
parser_rubygems_args_parent.add_argument("--gem", metavar="GEM", dest="gem_name",
help="Specify gem name")
parser_distgit_args_parent = argparse.ArgumentParser(add_help=False)
parser_distgit_args_parent.add_argument("--clone-url", metavar="URL", dest="clone_url", required=True,
help="Specify clone url for the distgit repository")
parser_distgit_args_parent.add_argument("--branch", metavar="BRANCH", dest="branch",
help="Specify branch to be used")
#########################################################
### Build options ###
#########################################################
# parent parser for the builds commands below
parser_build_parent = argparse.ArgumentParser(add_help=False)
parser_build_parent.add_argument("copr",
help="The copr repo to build the package in. Can be just name of project or even in format username/project or @groupname/project.")
parser_build_parent.add_argument("--memory", dest="memory",
help="")
parser_build_parent.add_argument("--timeout", dest="timeout",
help="")
parser_build_parent.add_argument("--nowait", action="store_true", default=False,
help="Don't wait for build")
parser_build_parent.add_argument("-r", "--chroot", dest="chroots", action="append",
help="If you don't need this build for all the project's chroots. You can use it several times for each chroot you need.")
parser_build_parent.add_argument("--background", dest="background", action="store_true", default=False,
help="Mark the build as a background job. It will have lesser priority than regular builds.")
# create the parser for the "build" (url/upload) command
parser_build = subparsers.add_parser("build", parents=[parser_build_parent],
help="Build packages to a specified copr")
parser_build.add_argument("pkgs", nargs="+",
help="filename of SRPM or URL of packages to build")
parser_build.set_defaults(func="action_build")
# create the parser for the "buildpypi" command
parser_build_pypi = subparsers.add_parser("buildpypi", parents=[parser_pypi_args_parent, parser_build_parent],
help="Build PyPI package to a specified copr")
parser_build_pypi.set_defaults(func="action_build_pypi")
# create the parser for the "buildgem" command
parser_build_rubygems = subparsers.add_parser("buildgem", parents=[parser_rubygems_args_parent, parser_build_parent],
help="Build gem from rubygems.org to a specified copr")
parser_build_rubygems.set_defaults(func="action_build_rubygems")
# create the parser for the "buildfedpkg" command
parser_build_distgit = subparsers.add_parser("buildfedpkg", parents=[parser_distgit_args_parent, parser_build_parent],
help="Build package from pkgs.fedoraproject.org")
parser_build_distgit.set_defaults(func="action_build_distgit")
# create the parser for the "buildtito" command
parser_build_tito = subparsers.add_parser("buildtito", parents=[parser_tito_args_parent, parser_build_parent],
help="submit a build from Git repository via Tito to a specified copr")
parser_build_tito.set_defaults(func="action_build_tito")
# create the parser for the "buildmock" command
parser_build_mock = subparsers.add_parser("buildmock", parents=[parser_mockscm_args_parent, parser_build_parent],
help="submit a build from SCM repository via Mock to a specified copr")
parser_build_mock.set_defaults(func="action_build_mock")
# create the parser for the "status" command
parser_status = subparsers.add_parser("status", help="Get build status of build specified by its ID")
parser_status.add_argument("build_id", help="Build ID", type=int)
parser_status.set_defaults(func="action_status")
# create the parser for the "download-build" command
parser_download_build = subparsers.add_parser("download-build", help="Fetches built packages")
parser_download_build.add_argument("build_id", help="Build ID")
parser_download_build.add_argument("-r", "--chroot", dest="chroots", action="append",
help="Select chroots to fetch")
parser_download_build.add_argument("--dest", "-d", dest="dest",
help="Base directory to store packages", default=".")
parser_download_build.set_defaults(func="action_download_build")
# create the parser for the "cancel" command
parser_cancel = subparsers.add_parser("cancel", help="Cancel build specified by its ID")
parser_cancel.add_argument("build_id", help="Build ID")
parser_cancel.set_defaults(func="action_cancel")
# create the parser for the "watch-build" command
parser_watch = subparsers.add_parser("watch-build",
help="Watch status and progress of build(s)"
" specified by their ID")
parser_watch.add_argument("build_id", nargs="+",
help="Build ID", type=int)
parser_watch.set_defaults(func="action_watch_build")
# create the parser for the "delete-build" command
parser_delete = subparsers.add_parser("delete-build",
help="Delete build specified by its ID")
parser_delete.add_argument("build_id", help="Build ID", type=int)
parser_delete.set_defaults(func="action_delete_build")
#########################################################
### Chroot options ###
#########################################################
parser_edit_chroot = subparsers.add_parser("edit-chroot", help="Edit chroot of a project")
parser_edit_chroot.add_argument("coprchroot", help="Path to a project chroot as owner/project/chroot or project/chroot")
parser_edit_chroot_comps_group = parser_edit_chroot.add_mutually_exclusive_group()
parser_edit_chroot_comps_group.add_argument("--upload-comps", metavar="FILEPATH",
help="filepath to the comps.xml file to be uploaded")
parser_edit_chroot_comps_group.add_argument("--delete-comps", action="store_true",
help="deletes already existing comps.xml for the chroot")
parser_edit_chroot.add_argument("--packages",
help="space separated string of package names to be added to buildroot")
parser_edit_chroot.add_argument("--repos",
help="space separated string of additional repo urls for chroot")
parser_edit_chroot.set_defaults(func="action_edit_chroot")
parser_get_chroot = subparsers.add_parser("get-chroot", help="Get chroot of a project")
parser_get_chroot.add_argument("coprchroot", help="Path to a project chroot as owner/project/chroot or project/chroot")
parser_get_chroot.set_defaults(func="action_get_chroot")
#########################################################
### Package options ###
#########################################################
# package edit/create parent
parser_add_or_edit_package_parent = argparse.ArgumentParser(add_help=False)
parser_add_or_edit_package_parent.add_argument("--name",
help="Name of the package to be edited or created",
metavar="PKGNAME", required=True)
parser_add_or_edit_package_parent.add_argument("copr",
help="The copr repo for the package. Can be just name of project or even in format username/project or @groupname/project.")
parser_add_or_edit_package_parent.add_argument("--webhook-rebuild",
choices=["on", "off"], help="Enable auto-rebuilding.")
# Tito edit/create
parser_add_package_tito = subparsers.add_parser("add-package-tito",
help="Creates a new Tito package",
parents=[parser_tito_args_parent, parser_add_or_edit_package_parent])
parser_add_package_tito.set_defaults(func="action_add_or_edit_package_tito", create=True)
parser_edit_package_tito = subparsers.add_parser("edit-package-tito",
help="Edits an existing Tito package",
parents=[parser_tito_args_parent, parser_add_or_edit_package_parent])
parser_edit_package_tito.set_defaults(func="action_add_or_edit_package_tito", create=False)
# PyPI edit/create
parser_add_package_pypi = subparsers.add_parser("add-package-pypi",
help="Creates a new PyPI package",
parents=[parser_pypi_args_parent, parser_add_or_edit_package_parent])
parser_add_package_pypi.set_defaults(func="action_add_or_edit_package_pypi", create=True)
parser_edit_package_pypi = subparsers.add_parser("edit-package-pypi",
help="Edits an existing PyPI package",
parents=[parser_pypi_args_parent, parser_add_or_edit_package_parent])
parser_edit_package_pypi.set_defaults(func="action_add_or_edit_package_pypi", create=False)
# MockSCM edit/create
parser_add_package_mockscm = subparsers.add_parser("add-package-mockscm",
help="Creates a new Mock-SCM package",
parents=[parser_mockscm_args_parent, parser_add_or_edit_package_parent])
parser_add_package_mockscm.set_defaults(func="action_add_or_edit_package_mockscm", create=True)
parser_edit_package_mockscm = subparsers.add_parser("edit-package-mockscm",
help="Edits an existing Mock-SCM package",
parents=[parser_mockscm_args_parent, parser_add_or_edit_package_parent])
parser_edit_package_mockscm.set_defaults(func="action_add_or_edit_package_mockscm", create=False)
# Rubygems edit/create
parser_add_package_rubygems = subparsers.add_parser("add-package-rubygems",
help="Creates a new RubyGems package",
parents=[parser_rubygems_args_parent, parser_add_or_edit_package_parent])
parser_add_package_rubygems.set_defaults(func="action_add_or_edit_package_rubygems", create=True)
parser_edit_package_rubygems = subparsers.add_parser("edit-package-rubygems",
help="Edits a new RubyGems package",
parents=[parser_rubygems_args_parent, parser_add_or_edit_package_parent])
parser_edit_package_rubygems.set_defaults(func="action_add_or_edit_package_rubygems", create=False)
# package listing
parser_list_packages = subparsers.add_parser("list-packages",
help="Returns list of packages in the given copr")
parser_list_packages.add_argument("copr",
help="The copr repo to list the packages of. Can be just name of project or even in format owner/project.")
parser_list_packages.add_argument("--with-latest-build", action="store_true",
help="Also display data related to the latest build for the package.")
parser_list_packages.add_argument("--with-latest-succeeded-build", action="store_true",
help="Also display data related to the latest succeeded build for the package.")
parser_list_packages.add_argument("--with-all-builds", action="store_true",
help="Also display data related to the builds for the package.")
parser_list_packages.set_defaults(func="action_list_packages")
# package names listing
parser_list_package_names = subparsers.add_parser("list-package-names",
help="Returns list of package names in the given copr")
parser_list_package_names.add_argument("copr",
help="The copr repo to list the packages of. Can be just name of project or even in format owner/project.")
parser_list_package_names.set_defaults(func="action_list_package_names")
# single package fetching
parser_get_package = subparsers.add_parser("get-package",
help="Returns package of the given name in the given copr")
parser_get_package.add_argument("copr",
help="The copr repo to list the packages of. Can be just name of project or even in format owner/project.")
parser_get_package.add_argument("--name",
help="Name of a single package to be displayed",
metavar="PKGNAME", required=True)
parser_get_package.add_argument("--with-latest-build", action="store_true",
help="Also display data related to the latest build for each package.")
parser_get_package.add_argument("--with-latest-succeeded-build", action="store_true",
help="Also display data related to the latest succeeded build for each package.")
parser_get_package.add_argument("--with-all-builds", action="store_true",
help="Also display data related to the builds for each package.")
parser_get_package.set_defaults(func="action_get_package")
# package deletion
parser_delete_package = subparsers.add_parser("delete-package",
help="Deletes the specified package")
parser_delete_package.add_argument("copr",
help="The copr repo to list the packages of. Can be just name of project or even in format owner/project.")
parser_delete_package.add_argument("--name",
help="Name of a package to be deleted",
metavar="PKGNAME", required=True)
parser_delete_package.set_defaults(func="action_delete_package")
# package reseting
parser_reset_package = subparsers.add_parser("reset-package",
help="Resets (clears) default source of the specified package")
parser_reset_package.add_argument("copr",
help="The copr repo to list the packages of. Can be just name of project or even in format owner/project.")
parser_reset_package.add_argument("--name",
help="Name of a package to be reseted",
metavar="PKGNAME", required=True)
parser_reset_package.set_defaults(func="action_reset_package")
# package building
parser_build_package = subparsers.add_parser("build-package", parents=[parser_build_parent],
help="Builds the package from its default source")
parser_build_package.add_argument("--name",
help="Name of a package to be built",
metavar="PKGNAME", required=True)
parser_build_package.set_defaults(func="action_build_package")
# module building
parser_build_module = subparsers.add_parser("build-module", help="Builds a given module in Copr")
parser_build_module.add_argument("copr", help="The copr repo to list the packages of. Can be just name of project or even in format owner/project.", nargs="?")
parser_build_module_mmd_source = parser_build_module.add_mutually_exclusive_group(required=True)
parser_build_module_mmd_source.add_argument("--url", help="SCM with modulemd file in yaml format")
parser_build_module_mmd_source.add_argument("--yaml", help="Path to modulemd file in yaml format")
parser_build_module.set_defaults(func="action_build_module")
return parser
def parse_name(name):
m = re.match(r"([^/]+)/(.*)", name)
if m:
owner = m.group(1)
name = m.group(2)
else:
owner = None
return owner, name
def parse_chroot_path(path):
m = re.match(r"(([^/]+)/)?([^/]+)/(.*)", path)
if m:
return m.group(2), m.group(3), m.group(4)
return None
def enable_debug():
logging.basicConfig(
level=logging.DEBUG,
format='[%(asctime)s] {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s',
datefmt='%H:%M:%S'
)
log.debug("# Debug log enabled #")
def main(argv=sys.argv[1:]):
try:
# Set up parser for global args
parser = setup_parser()
# Parse the commandline
arg = parser.parse_args(argv)
if arg.debug:
enable_debug()
if not "func" in arg:
parser.print_help()
return
commands = Commands(arg.config)
getattr(commands, arg.func)(arg)
except KeyboardInterrupt:
sys.stderr.write("\nInterrupted by user.")
sys.exit(1)
except copr_exceptions.CoprBuildException as e:
sys.stderr.write("\nBuild error: {0}\n".format(e))
sys.exit(4)
except copr_exceptions.CoprUnknownResponseException as e:
sys.stderr.write("\nError: {0}\n".format(e))
sys.exit(5)
except copr_exceptions.CoprRequestException as e:
sys.stderr.write("\nSomething went wrong:")
sys.stderr.write("\nError: {0}\n".format(e))
sys.exit(1)
except argparse.ArgumentTypeError as e:
sys.stderr.write("\nError: {0}".format(e))
sys.exit(2)
except copr_exceptions.CoprException as e:
sys.stderr.write("\nError: {0}\n".format(e))
sys.exit(3)
# except Exception as e:
# print "Error: {0}".format(e)
# sys.exit(100)
if __name__ == "__main__":
main()
| 45.758396 | 175 | 0.600575 |
ace35e0698b42ad728fc39b43376756574423c0f | 3,071 | py | Python | build/sdk/sdk_common.py | allansrc/fuchsia | a2c235b33fc4305044d496354a08775f30cdcf37 | [
"BSD-2-Clause"
] | 5 | 2022-01-10T20:22:17.000Z | 2022-01-21T20:14:17.000Z | build/sdk/sdk_common.py | allansrc/fuchsia | a2c235b33fc4305044d496354a08775f30cdcf37 | [
"BSD-2-Clause"
] | 2 | 2021-09-19T21:55:09.000Z | 2021-12-19T03:34:53.000Z | build/sdk/sdk_common.py | allansrc/fuchsia | a2c235b33fc4305044d496354a08775f30cdcf37 | [
"BSD-2-Clause"
] | 1 | 2021-08-23T11:33:57.000Z | 2021-08-23T11:33:57.000Z | # Copyright 2018 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import functools
import json
class File(object):
'''Wrapper class for file definitions.'''
def __init__(self, json):
self.source = json['source']
self.destination = json['destination']
def __str__(self):
return '{%s <-- %s}' % (self.destination, self.source)
@functools.total_ordering
class Atom(object):
'''Wrapper class for atom data, adding convenience methods.'''
def __init__(self, json):
self.json = json
self.id = json['id']
self.metadata = json['meta']
self.label = json['gn-label']
self.category = json['category']
self.deps = json['deps']
self.files = [File(f) for f in json['files']]
self.type = json['type']
def __str__(self):
return str(self.id)
def __hash__(self):
return hash(self.label)
def __eq__(self, other):
return self.label == other.label
def __ne__(self, other):
return not __eq__(self, other)
def __lt__(self, other):
return self.id < other.id
def gather_dependencies(manifests):
'''Extracts the set of all required atoms from the given manifests, as well
as the set of names of all the direct dependencies.
'''
direct_deps = set()
atoms = set()
if manifests is None:
return (direct_deps, atoms)
for dep in manifests:
with open(dep, 'r') as dep_file:
dep_manifest = json.load(dep_file)
direct_deps.update(dep_manifest['ids'])
atoms.update([Atom(a) for a in dep_manifest['atoms']])
return (direct_deps, atoms)
def detect_collisions(atoms):
'''Detects name collisions in a given atom list.'''
mappings = collections.defaultdict(lambda: [])
for atom in atoms:
mappings[atom.id].append(atom)
has_collisions = False
for id, group in mappings.items():
if len(group) == 1:
continue
has_collisions = True
labels = [a.label for a in group]
print('Targets sharing the SDK id %s:' % id)
for label in labels:
print(' - %s' % label)
return has_collisions
CATEGORIES = [
'excluded',
'experimental',
'internal',
'cts',
'partner',
'public',
]
def index_for_category(category):
if not category in CATEGORIES:
raise Exception('Unknown SDK category "%s"' % category)
return CATEGORIES.index(category)
def detect_category_violations(category, atoms):
'''Detects mismatches in publication categories.'''
has_violations = False
category_index = index_for_category(category)
for atom in atoms:
if index_for_category(atom.category) < category_index:
has_violations = True
print(
'%s has publication level %s, incompatible with %s' %
(atom, atom.category, category))
return has_violations
| 27.176991 | 79 | 0.624552 |
ace35e9e38a8e2f6aad4149aae164c7c6cbb549d | 1,059 | py | Python | loginpage.py | andrewM202/social-media-app | ae179841c07a15772e967be994f84e62e494edfa | [
"MIT"
] | 1 | 2021-09-12T11:33:01.000Z | 2021-09-12T11:33:01.000Z | loginpage.py | andrewM202/social-media-app | ae179841c07a15772e967be994f84e62e494edfa | [
"MIT"
] | 1 | 2021-04-23T18:35:18.000Z | 2021-04-23T18:35:18.000Z | loginpage.py | andrewM202/social-media-app | ae179841c07a15772e967be994f84e62e494edfa | [
"MIT"
] | null | null | null | from flask import Blueprint, request, Flask, render_template, redirect
from flask_login import current_user, login_user, logout_user
import flask
from models import db, userInformation, login
bp = Blueprint("loginpage", __name__)
@bp.route("/login", methods = ['POST', 'GET'])
def loginroute():
""" Login route for social media app """
if current_user.is_authenticated:
return redirect("/")
if request.method == "POST":
email = request.form['user-email']
user = userInformation.query.filter_by(email = email).first()
if user is not None and user.check_password(request.form['user-password']):
login_user(user)
# Send in the login message. loggedin variable so index.html knows not to display certain nav links if user is logged in
return render_template("index.html")
return render_template("login.html")
@bp.route("/logout", methods = ['POST', 'GET'])
def logout():
""" Route for logging out the user """
logout_user()
return redirect("/")
| 31.147059 | 132 | 0.667611 |
ace35eb8b2489f439e7f818730c249097d64afb4 | 1,499 | py | Python | lib/django-1.5/django/http/utils.py | MiCHiLU/google_appengine_sdk | 3da9f20d7e65e26c4938d2c4054bc4f39cbc5522 | [
"Apache-2.0"
] | 26 | 2015-01-20T08:02:38.000Z | 2020-06-10T04:57:41.000Z | lib/django-1.5/django/http/utils.py | MiCHiLU/google_appengine_sdk | 3da9f20d7e65e26c4938d2c4054bc4f39cbc5522 | [
"Apache-2.0"
] | 4 | 2016-02-28T05:53:54.000Z | 2017-01-03T07:39:50.000Z | lib/django-1.5/django/http/utils.py | MiCHiLU/google_appengine_sdk | 3da9f20d7e65e26c4938d2c4054bc4f39cbc5522 | [
"Apache-2.0"
] | 13 | 2016-02-28T00:14:23.000Z | 2021-05-03T15:47:36.000Z | """
Functions that modify an HTTP request or response in some way.
"""
# This group of functions are run as part of the response handling, after
# everything else, including all response middleware. Think of them as
# "compulsory response middleware". Be careful about what goes here, because
# it's a little fiddly to override this behavior, so they should be truly
# universally applicable.
def fix_location_header(request, response):
"""
Ensures that we always use an absolute URI in any location header in the
response. This is required by RFC 2616, section 14.30.
Code constructing response objects is free to insert relative paths, as
this function converts them to absolute paths.
"""
if 'Location' in response and request.get_host():
response['Location'] = request.build_absolute_uri(response['Location'])
return response
def conditional_content_removal(request, response):
"""
Removes the content of responses for HEAD requests, 1xx, 204 and 304
responses. Ensures compliance with RFC 2616, section 4.3.
"""
if 100 <= response.status_code < 200 or response.status_code in (204, 304):
if response.streaming:
response.streaming_content = []
else:
response.content = ''
response['Content-Length'] = '0'
if request.method == 'HEAD':
if response.streaming:
response.streaming_content = []
else:
response.content = ''
return response
| 35.690476 | 79 | 0.689793 |
ace35f13e65cfe6dc1cc698468fbe54d22cb3f66 | 7,562 | py | Python | lib/misc.py | hlzhang109/DDG | eb03042430b16f5d88f0083872aa6f872418d871 | [
"MIT"
] | 9 | 2022-01-23T07:32:31.000Z | 2022-03-28T21:36:36.000Z | lib/misc.py | hlzhang109/DDG | eb03042430b16f5d88f0083872aa6f872418d871 | [
"MIT"
] | 5 | 2022-02-19T06:54:30.000Z | 2022-03-31T14:55:54.000Z | lib/misc.py | hlzhang109/DDG | eb03042430b16f5d88f0083872aa6f872418d871 | [
"MIT"
] | null | null | null | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
"""
Things that don't belong anywhere else
"""
import hashlib
import json
import os
import sys
from shutil import copyfile
import lib.augmentations as augmentations
import numpy as np
import torch
import tqdm
from collections import Counter
from sklearn.metrics import confusion_matrix
def make_weights_for_balanced_classes(dataset):
counts = Counter()
classes = []
for _, y in dataset:
y = int(y)
counts[y] += 1
classes.append(y)
n_classes = len(counts)
weight_per_class = {}
for y in counts:
weight_per_class[y] = 1 / (counts[y] * n_classes)
weights = torch.zeros(len(dataset))
for i, y in enumerate(classes):
weights[i] = weight_per_class[int(y)]
return weights
def pdb():
sys.stdout = sys.__stdout__
import pdb
print("Launching PDB, enter 'n' to step to parent function.")
pdb.set_trace()
def seed_hash(*args):
"""
Derive an integer hash from all args, for use as a random seed.
"""
args_str = str(args)
return int(hashlib.md5(args_str.encode("utf-8")).hexdigest(), 16) % (2**31)
def print_separator():
print("="*80)
def print_row(row, colwidth=10, latex=False):
if latex:
sep = " & "
end_ = "\\\\"
else:
sep = " "
end_ = ""
def format_val(x):
if np.issubdtype(type(x), np.floating):
x = "{:.10f}".format(x)
return str(x).ljust(colwidth)[:colwidth]
print(sep.join([format_val(x) for x in row]), end_)
class _SplitDataset(torch.utils.data.Dataset):
"""Used by split_dataset"""
def __init__(self, underlying_dataset, keys):
super(_SplitDataset, self).__init__()
self.underlying_dataset = underlying_dataset
self.keys = keys
def __getitem__(self, key):
return self.underlying_dataset[self.keys[key]]
def __len__(self):
return len(self.keys)
def split_dataset(dataset, n, seed=0):
"""
Return a pair of datasets corresponding to a random split of the given
dataset, with n datapoints in the first dataset and the rest in the last,
using the given random seed
"""
assert(n <= len(dataset))
keys = list(range(len(dataset)))
np.random.RandomState(seed).shuffle(keys)
keys_1 = keys[:n]
keys_2 = keys[n:]
return _SplitDataset(dataset, keys_1), _SplitDataset(dataset, keys_2)
def random_pairs_of_minibatches(minibatches):
perm = torch.randperm(len(minibatches)).tolist()
pairs = []
for i in range(len(minibatches)):
j = i + 1 if i < (len(minibatches) - 1) else 0
xi, yi = minibatches[perm[i]][0], minibatches[perm[i]][1]
xj, yj = minibatches[perm[j]][0], minibatches[perm[j]][1]
min_n = min(len(xi), len(xj))
pairs.append(((xi[:min_n], yi[:min_n]), (xj[:min_n], yj[:min_n])))
return pairs
def sample_tuple_of_minibatches(minibatches, device):
disc_labels = torch.cat([
torch.full((x.shape[0], ), i, dtype=torch.int64, device=device)
for i, (x, y) in enumerate(minibatches)
])
perm = torch.randperm(len(minibatches)).tolist()
tuples = []
labels = np.array([minibatches[i][1] for i in range(len(minibatches))])
for i in range(len(minibatches)):
x, y, d = minibatches[i][0], minibatches[i][1], disc_labels[i]
x_n, y_n, d_n = minibatches[perm[i]][0], minibatches[perm[i]][1], disc_labels[perm[i]]
while y_n == y:
i = perm[i]
x_n, y_n = minibatches[perm[i]][0], minibatches[perm[i]][1], disc_labels[perm[i]]
pos_ind = np.argwhere(labels == y); pos_n_ind = np.where(labels == y_n)
x_p, x_np = minibatches[pos_ind[0]][0], minibatches[pos_n_ind[0]][0]
tuples.append((x, y, d, x_p), (x_n, y_n, d_n, x_np))
return tuples
def plot_confusion(matrix):
pass
def accuracy(network, loader, weights, device, args=None, step=None, is_ddg=False):
correct = 0
total = 0
weights_offset = 0
network.eval()
with torch.no_grad():
if is_ddg:
for x, y, _ in loader:
x = x.to(device)
y = y.to(device)
p = network.predict(x)
if weights is None:
batch_weights = torch.ones(len(x))
else:
batch_weights = weights[weights_offset : weights_offset + len(x)]
weights_offset += len(x)
batch_weights = batch_weights.to(device)
if p.size(1) == 1:
correct += (p.gt(0).eq(y).float() * batch_weights.view(-1, 1)).sum().item()
else:
correct += (p.argmax(1).eq(y).float() * batch_weights).sum().item()
total += batch_weights.sum().item()
else:
for x, y in loader:
x = x.to(device)
y = y.to(device)
p = network.predict(x)
#if step % 50 == 0 and args.dataset != 'WILDSCamelyon':
# pass
# confusion = confusion_matrix(p.gt(0).cpu().data, y.cpu().data)
# with open(gen_dir + '/confusion_{}_{}_d{}/confusion{}.npy'.format(args.algorithm, args.dataset, step), 'wb') as f:
# np.save(f, confusion)
if weights is None:
batch_weights = torch.ones(len(x))
else:
batch_weights = weights[weights_offset : weights_offset + len(x)]
weights_offset += len(x)
batch_weights = batch_weights.to(device)
if p.size(1) == 1:
correct += (p.gt(0).eq(y).float() * batch_weights.view(-1, 1)).sum().item()
else:
correct += (p.argmax(1).eq(y).float() * batch_weights).sum().item()
total += batch_weights.sum().item()
network.train()
return correct / total
class Tee:
def __init__(self, fname, mode="a"):
self.stdout = sys.stdout
self.file = open(fname, mode)
def write(self, message):
self.stdout.write(message)
self.file.write(message)
self.flush()
def flush(self):
self.stdout.flush()
self.file.flush()
augmentations.IMAGE_SIZE = 224
def aug(image, preprocess):
"""Perform AugMix augmentations and compute mixture.
Args:
image: PIL.Image input image
preprocess: Preprocessing function which should return a torch tensor.
Returns:
mixed: Augmented and mixed image.
"""
aug_list = augmentations.augmentations
mixture_width = 3
mixture_depth = -1
aug_severity = 1
ws = np.float32(
np.random.dirichlet([1] * mixture_width))
m = np.float32(np.random.beta(1, 1))
mix = torch.zeros_like(preprocess(image))
for i in range(mixture_width):
image_aug = image.copy()
depth = mixture_depth if mixture_depth > 0 else np.random.randint(
1, 4)
for _ in range(depth):
op = np.random.choice(aug_list)
image_aug = op(image_aug, aug_severity)
# Preprocessing commutes since all coefficients are convex
mix += ws[i] * preprocess(image_aug)
mixed = (1 - m) * preprocess(image) + m * mix
return mixed
def Augmix(x, preprocess, no_jsd):
if no_jsd:
return aug(x, preprocess)
else:
return preprocess(x), aug(x, preprocess), aug(x, preprocess) | 32.042373 | 135 | 0.582253 |
ace360268540a63bfe5f579a8f0362c3b3a5d41a | 5,113 | py | Python | govtrack/migrations/0001_initial.py | Joeyrsp/climate-emergency-declarations | 115981c478b1b8a36419893026d695c87a884174 | [
"MIT"
] | null | null | null | govtrack/migrations/0001_initial.py | Joeyrsp/climate-emergency-declarations | 115981c478b1b8a36419893026d695c87a884174 | [
"MIT"
] | 6 | 2019-09-02T12:27:21.000Z | 2020-05-10T00:31:37.000Z | govtrack/migrations/0001_initial.py | Joeyrsp/climate-emergency-declarations | 115981c478b1b8a36419893026d695c87a884174 | [
"MIT"
] | 4 | 2019-08-25T07:16:45.000Z | 2020-03-10T10:14:12.000Z | # Generated by Django 2.2.3 on 2019-07-16 00:36
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="Country",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=36)),
("region", models.CharField(max_length=36)),
("population", models.PositiveIntegerField(default=0)),
("country_code", models.CharField(max_length=3)),
],
),
migrations.CreateModel(
name="Node",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=64)),
("area", models.CharField(blank=True, max_length=36, null=True)),
(
"population",
models.PositiveIntegerField(blank=True, default=0, null=True),
),
("reference_links", models.TextField(blank=True, null=True)),
("comment_public", models.TextField(blank=True, null=True)),
("comment_private", models.TextField(blank=True, null=True)),
("is_governing", models.BooleanField(default=True)),
("sort_name", models.CharField(blank=True, max_length=64, null=True)),
("count_population", models.SmallIntegerField(default=0)),
(
"country",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="govtrack.Country",
),
),
],
),
migrations.CreateModel(
name="Government",
fields=[
(
"node_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="govtrack.Node",
),
),
(
"status",
models.CharField(
choices=[
("D", "Declared"),
("N", "Non-declared"),
("P", "In Progress"),
],
default="N",
max_length=1,
),
),
(
"date_declared",
models.DateField(
blank=True, null=True, verbose_name="date declared"
),
),
("declaration_links", models.TextField(blank=True, null=True)),
],
bases=("govtrack.node",),
),
migrations.CreateModel(
name="NodeType",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=64)),
("level", models.PositiveSmallIntegerField()),
("count_population", models.BooleanField(default=True)),
(
"country",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="govtrack.Country",
),
),
(
"parent",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="govtrack.NodeType",
),
),
],
),
migrations.AddField(
model_name="node",
name="nodetype",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="govtrack.NodeType"
),
),
migrations.AddField(
model_name="node",
name="parent",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="govtrack.Node"
),
),
]
| 34.782313 | 86 | 0.399961 |
ace361a6b809aaf1877410c734198db3e1e4b11f | 5,929 | py | Python | layerindex/tools/import_project.py | WindRiver-OpenSourceLabs/layerindex-web | a7820077b0a2c7ad36c934e7c1e11f19fb336081 | [
"MIT"
] | null | null | null | layerindex/tools/import_project.py | WindRiver-OpenSourceLabs/layerindex-web | a7820077b0a2c7ad36c934e7c1e11f19fb336081 | [
"MIT"
] | null | null | null | layerindex/tools/import_project.py | WindRiver-OpenSourceLabs/layerindex-web | a7820077b0a2c7ad36c934e7c1e11f19fb336081 | [
"MIT"
] | 1 | 2017-09-27T17:09:29.000Z | 2017-09-27T17:09:29.000Z | #!/usr/bin/python3
# Import a project into the database.
# This will scan through the directories in a project and find any layer and
# call import_layer.
#
#
# Copyright (C) 2016 Wind River Systems
# Author: Liam R. Howlett <liam.howlett@windriver.com>
#
# Licensed under the MIT license, see COPYING.MIT for details
from urllib.parse import urlparse
import logging
import optparse
import os, fnmatch
import sys
sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__), '..')))
import import_layer
import update
import utils
class ImportProject:
logger = utils.logger_create('ProjectIndexImport')
def find_layers(self, path):
self.logger.debug("finding layer..")
result = []
for root, _, files in os.walk(path, followlinks=True):
for _ in fnmatch.filter(files, 'layer.conf'):
if not root.endswith('conf'):
continue
self.logger.debug("Found %s" % root)
result.append(root)
return result
def main(self):
parser = optparse.OptionParser(
usage="""
%prog [options] [directory]""")
parser.add_option("-d", "--debug",
help="Enable debug output",
action="store_const", const=logging.DEBUG,
dest="loglevel", default=logging.INFO)
parser.add_option("-n", "--dry-run",
help="Don't write any data back to the database",
action="store_true", dest="dryrun")
self.options, args = parser.parse_args(sys.argv)
self.logger.setLevel(self.options.loglevel)
if len(args) == 1:
print("Please provide a directory.")
sys.exit(1)
install_dir = args[1]
lc_list = self.find_layers(install_dir)
core_layer = self.add_core(lc_list)
if core_layer:
lc_list.remove(core_layer)
for layer in lc_list:
self.add_layer(layer)
def add_layer(self, layer):
self.logger.debug("Processing layer %s" % layer)
try:
git_dir = utils.runcmd("git rev-parse --show-toplevel", destdir=layer, logger=self.logger)
except Exception as e:
self.logger.error("Cannot get root dir for layer %s: %s - Skipping." % (layer, str(e)))
return 1
layer_name = layer.split('/')[-2]
layer_subdir = None
if os.path.basename(git_dir) != layer_name:
layer_subdir = layer_name
layer_name = self.get_layer_name(layer)
for i in [1, 2, 3]:
remote = utils.runcmd("git remote", destdir=git_dir, logger=self.logger)
if not remote:
self.logger.warning("Cannot find remote git for %s" % layer_name)
return 1
try:
git_url = utils.runcmd("git config --get remote.%s.url" % remote, destdir=git_dir, logger=self.logger)
except Exception as e:
self.logger.info("Cannot get remote.%s.url for git dir %s: %s" % (remote, git_dir, str(e)))
if not os.path.exists(git_url):
# Assume this is remote.
self.logger.debug("Found git url = %s" % git_url)
remote_branch = utils.runcmd( "git rev-parse --abbrev-ref --symbolic-full-name @\{u\}", destdir=git_dir, logger=self.logger)
if remote_branch.startswith(remote):
actual_branch = remote_branch[len(remote) + 1:]
break
self.logger.debug("Iterating to find git url into %s" % git_dir)
git_dir = git_url
if not git_url:
self.logger.warning("Cannot find layer %s git url" % layer)
return 1
cmd = ['import_layer.py']
if self.options.loglevel == logging.DEBUG:
cmd.append("-d")
if layer_subdir:
cmd.append("-s")
cmd.append(layer_subdir)
if actual_branch:
cmd.append("-a")
cmd.append(actual_branch)
cmd.append(git_url)
cmd.append(layer_name)
prefix = "Calling"
if self.options.dryrun:
prefix = "Would Call"
self.logger.info("%s import_layer.main with %s for dir %s" % (prefix, str(cmd), layer))
sys.argv = cmd
if not self.options.dryrun:
try:
import_layer.main()
except SystemExit as see:
return see.code
return 0
def get_layer_name(self, layerconfdir):
layer_name = layerconfdir.split('/')[-2]
self.logger.debug('getting layer %s' % layerconfdir)
layer_conf = os.path.join(layerconfdir, 'layer.conf')
if os.path.isfile(layer_conf):
with open(layer_conf) as conf:
for line in conf:
if 'BBLAYERS_LAYERINDEX_NAME' in line:
layer_name = line.split('=')[1].strip(' "\n')
return layer_name
def add_core(self, layers):
utils.setup_django()
core = None
import settings
for layer in layers:
layer_name = self.get_layer_name(layer)
if layer_name == settings.CORE_LAYER_NAME:
if self.add_layer(layer):
self.logger.info('Failed to add core layer\n')
core = layer
self.update()
break
return core
def update(self):
update_py = os.path.realpath(os.path.join(os.path.dirname(__file__), '../update.py'))
cmd = [update_py]
if self.options.loglevel == logging.DEBUG:
cmd.append("-d")
sys.argv = cmd
self.logger.info("update")
if not self.options.dryrun:
try:
update.main()
except SystemExit:
return 1
return 0
if __name__ == "__main__":
x = ImportProject()
x.main()
| 31.041885 | 140 | 0.567212 |
ace361d73e583c9602d416465fb29c2ff633b273 | 2,242 | py | Python | pydens/classifiers/xgboost.py | zkurtz/pydens | 0a38020daa745621e47602b4f2583b76d60b6591 | [
"MIT"
] | 6 | 2019-05-06T15:05:20.000Z | 2021-06-29T07:20:35.000Z | pydens/classifiers/xgboost.py | zkurtz/pydens | 0a38020daa745621e47602b4f2583b76d60b6591 | [
"MIT"
] | 1 | 2019-04-23T18:39:28.000Z | 2019-05-05T14:38:58.000Z | pydens/classifiers/xgboost.py | zkurtz/pydens | 0a38020daa745621e47602b4f2583b76d60b6591 | [
"MIT"
] | 3 | 2019-06-23T22:05:05.000Z | 2022-02-01T13:34:49.000Z | import copy
import os
import pandas as pd
import pickle
from psutil import cpu_count
import tempfile
from time import time
import warnings
import xgboost as xgb
from .base import AbstractLearner
class Xgbm(AbstractLearner):
def __init__(self, params=None, verbose=False):
super().__init__(params, verbose)
self.nround = self.params.pop('num_boost_round')
def default_params(self):
return {
'task': 'train',
'boosting_type': 'gbdt',
'objective': 'binary',
'learning_rate': 0.1,
'max_depth': 6,
'verbose': -1,
'nrounds': 60,
'nthreads': cpu_count(logical=False)
}
def as_xgb_data(self, data):
self._parse_categoricals()
return xgb.Dataset(
data.X,
data.y
)
def train(self, data):
'''
:param data: a pydens.data.Data instance
'''
t0 = time()
ld = self.as_lgb_data(data)
self.bst = lgb.train(
params=copy.deepcopy(self.params),
train_set=ld,
num_boost_round=self.nround,
verbose_eval=False
)
tdiff = str(round(time() - t0))
self.vp('Xgboost training took ' + tdiff + ' seconds')
def predict(self, X):
return self.bst.predict(X)
def freeze(self):
''' Attach self.bst as a binary attribute
This is necessary to be able to preserve by-reference internals during a
serialization-unserialization cycle
'''
assert self.bst is not None
_, filename = tempfile.mkstemp()
self.bst.save_model(filename)
with open(filename, 'rb') as file:
self.bst_binary = file.read()
os.remove(filename)
def thaw(self):
''' Unserialize self.bst_binary '''
assert hasattr(self, 'bst_binary')
assert self.bst_binary is not None
self.bst = pickle.loads(self.bst_binary)
def importance(self):
return pd.DataFrame({
'feature': self.features,
'gain': self.bst.feature_importance(importance_type='gain')
}).sort_values('gain', ascending=False
).reset_index(drop=True)
| 27.341463 | 80 | 0.58207 |
ace36289a44f1d18d647cda481cfb7fe43a6d330 | 1,826 | py | Python | utils/gcm/test.py | floyd-fuh/mitra | bd741fdbac24f9c1e7a8b7b7c07a72188d22c374 | [
"MIT"
] | 1 | 2020-11-13T08:20:26.000Z | 2020-11-13T08:20:26.000Z | utils/gcm/test.py | floyd-fuh/mitra | bd741fdbac24f9c1e7a8b7b7c07a72188d22c374 | [
"MIT"
] | null | null | null | utils/gcm/test.py | floyd-fuh/mitra | bd741fdbac24f9c1e7a8b7b7c07a72188d22c374 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import sys
import binascii
import struct
from Crypto.Util.number import long_to_bytes,bytes_to_long
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.backends import default_backend
def gcm_decrypt(_key,_nonce,_ctxt,_tag,_ad):
decryptor = Cipher(
algorithms.AES(_key),
modes.GCM(_nonce,_tag),
backend=default_backend()
).decryptor()
decryptor.authenticate_additional_data(_ad)
pt = decryptor.update(_ctxt) + decryptor.finalize()
return pt
if __name__=='__main__':
fname = sys.argv[1]
with open(fname, "rb") as f:
lines = f.readlines()
for line in lines:
line = line.strip()
l = line.split(b": ")
vars()[l[0].decode("utf-8")] = l[1].strip().decode("utf-8")
for v in ["key1", "key2", "adata", "nonce", "ciphertext", "tag"]:
vars()[v] = binascii.unhexlify(vars()[v])
assert not key1 == key2
plaintxt1 = gcm_decrypt(key1, nonce, ciphertext, tag, adata)
plaintxt2 = gcm_decrypt(key2, nonce, ciphertext, tag, adata)
assert not plaintxt1 == plaintxt2
success = False
try:
invalidkey = b'\x07'*16
plaintxt1 = gcm_decrypt(invalidkey, nonce, ciphertext, tag, adata)
except Exception:
success = True
if not success:
print("Decryption with other key failed didn't fail as expected")
exts = exts.split(" ")[-2:]
with open("output1.%s" % exts[0], "wb") as salfile:
salfile.write(plaintxt1)
with open("output2.%s" % exts[1], "wb") as pixfile:
pixfile.write(plaintxt2)
print("key1:", key1.rstrip(b"\0"))
print("key1:", key2.rstrip(b"\0"))
print("ad:", adata.rstrip(b"\0"))
print("nonce:", bytes_to_long(nonce))
print("tag:", binascii.hexlify(tag))
print("Success!")
print()
print("plaintext1:", binascii.hexlify(plaintxt1[:16]),"...")
print("plaintext2:", binascii.hexlify(plaintxt2[:16]),"...")
| 26.852941 | 76 | 0.694962 |
ace3633496240eda8e97668f702fa0d0e3522c74 | 10,223 | py | Python | docs/source/conf.py | jackhamburger/llvmlite | 2871ce0a4b09075c8890ccc2c4ca452dbeea2a98 | [
"BSD-2-Clause"
] | null | null | null | docs/source/conf.py | jackhamburger/llvmlite | 2871ce0a4b09075c8890ccc2c4ca452dbeea2a98 | [
"BSD-2-Clause"
] | null | null | null | docs/source/conf.py | jackhamburger/llvmlite | 2871ce0a4b09075c8890ccc2c4ca452dbeea2a98 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# llvmlite documentation build configuration file, created by
# sphinx-quickstart on Wed Apr 29 14:18:42 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
from datetime import datetime
on_rtd = os.environ.get('READTHEDOCS') == 'True'
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
sys.path.append(os.path.abspath(os.path.join('..', '..')))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.intersphinx',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'llvmlite'
copyright = '2015, Continuum Analytics'
author = 'Continuum Analytics'
if on_rtd:
# RTD replaces the last update date. So we need to hack it in here.
copyright += '. Last updated on {}'.format(datetime.utcnow().strftime('%b %d, %Y'))
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
import llvmlite
# The short X.Y version.
version = llvmlite.__version__.split('-', 1)[0]
# The full version, including alpha/beta/rc tags.
release = llvmlite.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd:
# only import and set the theme if we're building docs locally
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'llvmlitedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'llvmlite.tex', 'llvmlite Documentation',
'Continuum Analytics', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'llvmlite', 'llvmlite Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'llvmlite', 'llvmlite Documentation',
author, 'llvmlite', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('https://docs.python.org/3', None),
'llvm': ('http://llvm.org/releases/9.0.0/docs', None),
}
| 32.351266 | 87 | 0.717011 |
ace3634b867f7b189013ed6648ee0fe52454acf4 | 4,667 | py | Python | tfx/components/bulk_inferrer/component.py | htahir1/tfx | 6528292918bf746fb5e143a8e1b276a29bbdfe8f | [
"Apache-2.0"
] | null | null | null | tfx/components/bulk_inferrer/component.py | htahir1/tfx | 6528292918bf746fb5e143a8e1b276a29bbdfe8f | [
"Apache-2.0"
] | null | null | null | tfx/components/bulk_inferrer/component.py | htahir1/tfx | 6528292918bf746fb5e143a8e1b276a29bbdfe8f | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TFX BulkInferrer component definition."""
from typing import Any, Dict, Optional, Text, Union
from tfx import types
from tfx.components.bulk_inferrer import executor
from tfx.dsl.components.base import base_beam_component
from tfx.dsl.components.base import executor_spec
from tfx.proto import bulk_inferrer_pb2
from tfx.types import standard_artifacts
from tfx.types.standard_component_specs import BulkInferrerSpec
class BulkInferrer(base_beam_component.BaseBeamComponent):
"""A TFX component to do batch inference on a model with unlabelled examples.
BulkInferrer consumes examples data and a model, and produces the inference
results to an external location as PredictionLog proto.
BulkInferrer will infer on validated model.
## Example
```
# Uses BulkInferrer to inference on examples.
bulk_inferrer = BulkInferrer(
examples=example_gen.outputs['examples'],
model=trainer.outputs['model'])
```
Component `outputs` contains:
- `inference_result`: Channel of type `standard_artifacts.InferenceResult`
to store the inference results.
- `output_examples`: Channel of type `standard_artifacts.Examples`
to store the output examples. This is optional
controlled by `output_example_spec`.
See [the BulkInferrer
guide](https://www.tensorflow.org/tfx/guide/bulkinferrer) for more details.
"""
SPEC_CLASS = BulkInferrerSpec
EXECUTOR_SPEC = executor_spec.BeamExecutorSpec(executor.Executor)
def __init__(
self,
examples: types.Channel,
model: Optional[types.Channel] = None,
model_blessing: Optional[types.Channel] = None,
data_spec: Optional[Union[bulk_inferrer_pb2.DataSpec, Dict[Text,
Any]]] = None,
model_spec: Optional[Union[bulk_inferrer_pb2.ModelSpec,
Dict[Text, Any]]] = None,
output_example_spec: Optional[Union[bulk_inferrer_pb2.OutputExampleSpec,
Dict[Text, Any]]] = None):
"""Construct an BulkInferrer component.
Args:
examples: A Channel of type `standard_artifacts.Examples`, usually
produced by an ExampleGen component. _required_
model: A Channel of type `standard_artifacts.Model`, usually produced by
a Trainer component.
model_blessing: A Channel of type `standard_artifacts.ModelBlessing`,
usually produced by a ModelValidator component.
data_spec: bulk_inferrer_pb2.DataSpec instance that describes data
selection. If any field is provided as a RuntimeParameter, data_spec
should be constructed as a dict with the same field names as DataSpec
proto message.
model_spec: bulk_inferrer_pb2.ModelSpec instance that describes model
specification. If any field is provided as a RuntimeParameter,
model_spec should be constructed as a dict with the same field names as
ModelSpec proto message.
output_example_spec: bulk_inferrer_pb2.OutputExampleSpec instance, specify
if you want BulkInferrer to output examples instead of inference result.
If any field is provided as a RuntimeParameter, output_example_spec
should be constructed as a dict with the same field names as
OutputExampleSpec proto message.
"""
if output_example_spec:
output_examples = types.Channel(type=standard_artifacts.Examples)
inference_result = None
else:
inference_result = types.Channel(type=standard_artifacts.InferenceResult)
output_examples = None
spec = BulkInferrerSpec(
examples=examples,
model=model,
model_blessing=model_blessing,
data_spec=data_spec or bulk_inferrer_pb2.DataSpec(),
model_spec=model_spec or bulk_inferrer_pb2.ModelSpec(),
output_example_spec=output_example_spec,
inference_result=inference_result,
output_examples=output_examples)
super(BulkInferrer, self).__init__(spec=spec)
| 43.212963 | 80 | 0.717592 |
ace3637d16ca685ad0e4649094357bb81e79c01f | 1,249 | py | Python | h2o-py/tests/testdir_algos/gbm/pyunit_DEPRECATED_weights_gammaGBM.py | huamichaelchen/h2o-3 | 2b52f2240652a1c73c1708762248c0773d0c073e | [
"Apache-2.0"
] | null | null | null | h2o-py/tests/testdir_algos/gbm/pyunit_DEPRECATED_weights_gammaGBM.py | huamichaelchen/h2o-3 | 2b52f2240652a1c73c1708762248c0773d0c073e | [
"Apache-2.0"
] | null | null | null | h2o-py/tests/testdir_algos/gbm/pyunit_DEPRECATED_weights_gammaGBM.py | huamichaelchen/h2o-3 | 2b52f2240652a1c73c1708762248c0773d0c073e | [
"Apache-2.0"
] | null | null | null | import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
def weights_gamma():
htable = h2o.upload_file(pyunit_utils.locate("smalldata/gbm_test/moppe.csv"))
htable["premiekl"] = htable["premiekl"].asfactor()
htable["moptva"] = htable["moptva"].asfactor()
htable["zon"] = htable["zon"]
#gg = gbm(formula = medskad ~ premiekl + moptva + zon,data = table.1.2,distribution = "gamma", weights = table.1.2$antskad ,
# n.trees = 20,interaction.depth = 1,n.minobsinnode = 1,shrinkage = 1,bag.fraction = 1,train.fraction = 1)
#pr = predict(gg,newdata = table.1.2,type = "response")
#htable= as.h2o(table.1.2,destination_frame = "htable")
hh = h2o.gbm(x=htable[0:3],y=htable["medskad"],training_frame=htable,distribution="gamma",weights_column="antskad",
ntrees=20,max_depth=1,min_rows=1,learn_rate=1)
ph = hh.predict(htable)
assert abs(8.804447-hh._model_json['output']['init_f']) < 1e-6*8.804447
assert abs(3751.01-ph[0].min()) < 1e-4*3751.01
assert abs(15298.87-ph[0].max()) < 1e-4*15298.87
assert abs(8121.98-ph[0].mean()[0]) < 1e-4*8121.98
if __name__ == "__main__":
pyunit_utils.standalone_test(weights_gamma)
else:
weights_gamma()
| 36.735294 | 128 | 0.661329 |
ace363943b6d4b99212338317714efcd29d66866 | 2,223 | py | Python | docsrc/source/conf.py | codenamenadja/python-ddd | 8f82c493715e31a9aa6f6ab68aebc942b9dcd8aa | [
"MIT"
] | null | null | null | docsrc/source/conf.py | codenamenadja/python-ddd | 8f82c493715e31a9aa6f6ab68aebc942b9dcd8aa | [
"MIT"
] | null | null | null | docsrc/source/conf.py | codenamenadja/python-ddd | 8f82c493715e31a9aa6f6ab68aebc942b9dcd8aa | [
"MIT"
] | null | null | null | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
sys.setrecursionlimit(1500)
# -- Project information -----------------------------------------------------
project = 'documentation to DDD'
copyright = '2019, junehan'
author = 'junehan'
# The full version, including alpha/beta/rc tags
release = '0.1a'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinxmark',
'sphinx.ext.autosummary',
'sphinx.ext.doctest',
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
]
sphinxmark_enable = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
| 34.2 | 79 | 0.65632 |
ace3646436913177ede4e5c8591f14ecc9f7987f | 1,323 | py | Python | blog/views.py | jesusmgg/jesusmg.net | 2a50eb7575eab5983311ef8fddb0ddeecb01b572 | [
"Unlicense"
] | null | null | null | blog/views.py | jesusmgg/jesusmg.net | 2a50eb7575eab5983311ef8fddb0ddeecb01b572 | [
"Unlicense"
] | null | null | null | blog/views.py | jesusmgg/jesusmg.net | 2a50eb7575eab5983311ef8fddb0ddeecb01b572 | [
"Unlicense"
] | null | null | null | from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
from django.shortcuts import render, redirect
from blog.models import Post
from common.views import get_redirected
def blog_view(request):
posts = Post.objects.filter(published=True).order_by('-date')
filter_search = request.GET.get('filter_search')
filter_category = request.GET.get('filter_category')
if filter_search != '' and filter_search is not None:
posts = posts.filter(title__icontains=filter_search)
if filter_category != '' and filter_category is not None:
posts = posts.filter(category__name__icontains=filter_category)
paginator = Paginator(posts, 10)
page = request.GET.get('page')
try:
posts = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
posts = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
posts = paginator.page(paginator.num_pages)
return render(request, 'blog/home.html', {'posts': posts})
def post_view(request, slug, id):
post, post_url = get_redirected(Post, {'pk': id}, {'slug': slug})
if post_url:
return redirect(post_url)
return render(request, 'blog/post.html', {'post': post, })
| 32.268293 | 76 | 0.695389 |
ace364d0547c2c22e5a3eff8fc250c47ab24e6d1 | 309,781 | py | Python | pysnmp-with-texts/HUAWEI-HQOS-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/HUAWEI-HQOS-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/HUAWEI-HQOS-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module HUAWEI-HQOS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HUAWEI-HQOS-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:44:54 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsUnion, ValueSizeConstraint, ValueRangeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsIntersection")
entPhysicalIndex, entPhysicalName = mibBuilder.importSymbols("ENTITY-MIB", "entPhysicalIndex", "entPhysicalName")
hwDatacomm, = mibBuilder.importSymbols("HUAWEI-MIB", "hwDatacomm")
InterfaceIndex, = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex")
ObjectGroup, NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "NotificationGroup", "ModuleCompliance")
Unsigned32, Bits, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, ModuleIdentity, iso, Counter32, MibIdentifier, NotificationType, ObjectIdentity, Counter64, Gauge32, Integer32, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "Unsigned32", "Bits", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ModuleIdentity", "iso", "Counter32", "MibIdentifier", "NotificationType", "ObjectIdentity", "Counter64", "Gauge32", "Integer32", "IpAddress")
DisplayString, TextualConvention, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention", "RowStatus")
hwHQOS = ModuleIdentity((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132))
hwHQOS.setRevisions(('2014-08-04 15:58', '2014-07-23 15:58', '2014-06-03 14:55', '2014-05-06 19:15', '2013-04-24 14:43', '2014-03-25 14:33', '2013-11-20 14:43', '2013-09-30 14:43', '2013-09-16 16:09', '2013-07-29 14:43', '2013-04-10 14:43', '2014-08-04 15:58', '2014-12-22 15:58',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: hwHQOS.setRevisionsDescriptions(('MOD hwhqosUserQueueShapeAllTrafficEntry', 'ADD TABLE', 'DELETE hwhqosFlowQueueCfgWeightPercentageValue OF hwhqosFlowQueueCfgEntry', 'DELETE CIR LEAF OF HQOS CONFIG NODE', 'MOD hwhqosProfileName NODE', 'ADD IF USER QUEUE NODE', 'MOD VALUE-RANGE', 'ADD ALARM NODE', 'ADD ALARM NODE', 'ADD ALARM TABLE', 'ADD ALARM NODE', 'ADD ALARM NODE', 'ADD ALARM NODE',))
if mibBuilder.loadTexts: hwHQOS.setLastUpdated('201408041558Z')
if mibBuilder.loadTexts: hwHQOS.setOrganization('Huawei Technologies Co.,Ltd.')
if mibBuilder.loadTexts: hwHQOS.setContactInfo("Huawei Industrial Base Bantian, Longgang Shenzhen 518129 People's Republic of China Website: http://www.huawei.com Email: support@huawei.com ")
if mibBuilder.loadTexts: hwHQOS.setDescription('mib of Hierarchy Quality Of Service module the huawei-hqos-mib is only defined about statistic information now. ')
class CosType(TextualConvention, Integer32):
description = ' BE(1) AF1(2) AF2(3) AF3(4) AF4(5) EF(6) CS6(7) CS7(8) '
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))
namedValues = NamedValues(("be", 1), ("af1", 2), ("af2", 3), ("af3", 4), ("af4", 5), ("ef", 6), ("cs6", 7), ("cs7", 8))
hwhqosStat = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1))
hwhqosIfStatTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 1), )
if mibBuilder.loadTexts: hwhqosIfStatTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfStatTable.setDescription("Table of Hierarchy QoS's statistic information.")
hwhqosIfStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 1, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosIfIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosDirection"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserLayer1"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserLayer2"), (0, "HUAWEI-HQOS-MIB", "hwhqosQueueIndex"))
if mibBuilder.loadTexts: hwhqosIfStatEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfStatEntry.setDescription("The table have multilevel index if don't have some or other index. please fill the MAX value 2147483647 for example : MA52 don't have statistic of user's queue, but have statistic of user. please set QueueIndex the MAX value 2147483647. ")
hwhqosIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)))
if mibBuilder.loadTexts: hwhqosIfIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfIndex.setDescription('The value of this object identifies the index of an interface. The interface can be a physical interface or a logical interface. ')
hwhqosDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 255))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2), ("absent", 255))))
if mibBuilder.loadTexts: hwhqosDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosDirection.setDescription('The value of this object identifies the incoming and outgoing directions. The object can be set to 0 when the direction is not specified. ')
hwhqosUserLayer1 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)))
if mibBuilder.loadTexts: hwhqosUserLayer1.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserLayer1.setDescription('The value of this object identifies the outer identifier of a user group. ')
hwhqosUserLayer2 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)))
if mibBuilder.loadTexts: hwhqosUserLayer2.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserLayer2.setDescription('The value of this object identifies the outer identifier of a user. ')
hwhqosQueueIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)))
if mibBuilder.loadTexts: hwhqosQueueIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosQueueIndex.setDescription('The value of this object identifies the queue index of a user. Each user has multiple queues and the index indicates the queues for each user. Each user of the NE40E has eight queues. If this object does not exist, you can set it to 0. ')
hwhqosQueueForwardPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 1, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosQueueForwardPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosQueueForwardPackets.setDescription('The value of this object identifies the packets forwarded in the queue. If this object is not supported, fill 0. ')
hwhqosQueueForwardBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 1, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosQueueForwardBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosQueueForwardBytes.setDescription('The value of this object identifies the bytes forwarded in the queue. If this object is not supported, fill 0. ')
hwhqosQueueDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 1, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosQueueDropPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosQueueDropPackets.setDescription('This object indicates the number of packets discarded from the queue. If this object is not supported, fill 0. ')
hwhqosQueueDropBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 1, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosQueueDropBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosQueueDropBytes.setDescription('This object indicates the number of bytes discarded from the queue. If this object is not supported, fill 0. ')
hwhqosQueueRemarkPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 1, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosQueueRemarkPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosQueueRemarkPackets.setDescription('RemarkPackets number of queue. if not support, please fill 0. ')
hwhqosQueueRemarkBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 1, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosQueueRemarkBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosQueueRemarkBytes.setDescription('RemarkBytes number of queue. if not support, please fill 0. ')
hwhqosSetZero = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("setZero", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwhqosSetZero.setStatus('current')
if mibBuilder.loadTexts: hwhqosSetZero.setDescription('This object indicates that the statistics are cleared.')
hwhqosQueueForwardPacketRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 1, 1, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosQueueForwardPacketRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosQueueForwardPacketRate.setDescription('This object indicates the packet forwarding rate in the queue, in pps.')
hwhqosQueueForwardByteRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 1, 1, 14), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosQueueForwardByteRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosQueueForwardByteRate.setDescription('This object indicates the byte forwarding rate in the queue, in bps.')
hwhqosQueueDropPacketRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 1, 1, 15), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosQueueDropPacketRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosQueueDropPacketRate.setDescription('This object indicates the packet discarding rate in the queue, in pps.')
hwhqosQueueDropByteRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 1, 1, 16), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosQueueDropByteRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosQueueDropByteRate.setDescription('This object indicates the byte discarding rate in the queue, in bps.')
hwhqosAtmPvcStatTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 2), )
if mibBuilder.loadTexts: hwhqosAtmPvcStatTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosAtmPvcStatTable.setDescription("Table of Hierarchy QoS's statistic information.")
hwhqosAtmPvcStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 2, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosAtmPvcIfIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosAtmPvcVPI"), (0, "HUAWEI-HQOS-MIB", "hwhqosAtmPvcVCI"), (0, "HUAWEI-HQOS-MIB", "hwhqosAtmPvcDirection"), (0, "HUAWEI-HQOS-MIB", "hwhqosAtmPvcUserLayer1"), (0, "HUAWEI-HQOS-MIB", "hwhqosAtmPvcUserLayer2"), (0, "HUAWEI-HQOS-MIB", "hwhqosAtmPvcQueueIndex"))
if mibBuilder.loadTexts: hwhqosAtmPvcStatEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosAtmPvcStatEntry.setDescription("The table have multilevel index if don't have some or other index. please fill the MAX value 2147483647 for example : MA52 don't have statistic of user's queue, but have statistic of user. please set QueueIndex the MAX value 2147483647. ")
hwhqosAtmPvcIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)))
if mibBuilder.loadTexts: hwhqosAtmPvcIfIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosAtmPvcIfIndex.setDescription('ATM interfaceindex. ')
hwhqosAtmPvcVPI = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)))
if mibBuilder.loadTexts: hwhqosAtmPvcVPI.setStatus('current')
if mibBuilder.loadTexts: hwhqosAtmPvcVPI.setDescription('VPI NUMBER. ')
hwhqosAtmPvcVCI = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)))
if mibBuilder.loadTexts: hwhqosAtmPvcVCI.setStatus('current')
if mibBuilder.loadTexts: hwhqosAtmPvcVCI.setDescription('VCI NUMBER. ')
hwhqosAtmPvcDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 255))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2), ("absent", 255))))
if mibBuilder.loadTexts: hwhqosAtmPvcDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosAtmPvcDirection.setDescription("Direction: inbound;outbound. if don't have, please fill 255. ")
hwhqosAtmPvcUserLayer1 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)))
if mibBuilder.loadTexts: hwhqosAtmPvcUserLayer1.setStatus('current')
if mibBuilder.loadTexts: hwhqosAtmPvcUserLayer1.setDescription("Usergroupid: just the Usergroup's configure sequence Usergroupname is identifier in Hierarchy QoS. ")
hwhqosAtmPvcUserLayer2 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)))
if mibBuilder.loadTexts: hwhqosAtmPvcUserLayer2.setStatus('current')
if mibBuilder.loadTexts: hwhqosAtmPvcUserLayer2.setDescription("Userid: just the User's configure sequence Username is identifier in Hierarchy QoS. ")
hwhqosAtmPvcQueueIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)))
if mibBuilder.loadTexts: hwhqosAtmPvcQueueIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosAtmPvcQueueIndex.setDescription("index of user's queue. 8031: everyuser have 4 queues MA52: everyuser have 8 queues 8090: everyuser have 8 queues if don't have, please fill 0. ")
hwhqosAtmPvcQueueForwardPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 2, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosAtmPvcQueueForwardPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosAtmPvcQueueForwardPackets.setDescription('ForwardPackets number of queue. if not support, please fill 0. ')
hwhqosAtmPvcQueueForwardBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 2, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosAtmPvcQueueForwardBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosAtmPvcQueueForwardBytes.setDescription('ForwardBytes number of queue. if not support, please fill 0. ')
hwhqosAtmPvcQueueDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 2, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosAtmPvcQueueDropPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosAtmPvcQueueDropPackets.setDescription('DropPackets number of queue. if not support, please fill 0. ')
hwhqosAtmPvcQueueDropBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 2, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosAtmPvcQueueDropBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosAtmPvcQueueDropBytes.setDescription('DropBytes number of queue. if not support, please fill 0. ')
hwhqosAtmPvcQueueRemarkPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 2, 1, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosAtmPvcQueueRemarkPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosAtmPvcQueueRemarkPackets.setDescription('RemarkPackets number of queue. if not support, please fill 0. ')
hwhqosAtmPvcQueueRemarkBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 2, 1, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosAtmPvcQueueRemarkBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosAtmPvcQueueRemarkBytes.setDescription('RemarkBytes number of queue. if not support, please fill 0. ')
hwhqosPortQueueTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 3), )
if mibBuilder.loadTexts: hwhqosPortQueueTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueTable.setDescription('Table of configuration about a port-queue.')
hwhqosPortQueueEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 3, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosPortQueueIfIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosPortQueueCosValue"))
if mibBuilder.loadTexts: hwhqosPortQueueEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueEntry.setDescription('Information about configuration of an interface cos-queue.')
hwhqosPortQueueIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosPortQueueIfIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueIfIndex.setDescription('The value of this object identifies the index of a physical port.The value ranges from 0 to 2147483647.')
hwhqosPortQueueCosValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("portqueueBE", 1), ("portqueueAF1", 2), ("portqueueAF2", 3), ("portqueueAF3", 4), ("portqueueAF4", 5), ("portqueueEF", 6), ("portqueueCS6", 7), ("portqueueCS7", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosPortQueueCosValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueCosValue.setDescription('The value of this object identifies the queue type.The value can be any of the following: portqueueBE(1) portqueueAF1(2) portqueueAF2(3) portqueueAF3(4) portqueueAF4(5) portqueueEF(6) portqueueCS6(7) portqueueCS7(8) ')
hwhqosPortQueueArithmetic = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 3, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("portqueuePQ", 1), ("portqueueWFQ", 2), ("portqueueLPQ", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosPortQueueArithmetic.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueArithmetic.setDescription('The value of this object identifies the queue scheduling arithmetic. The value can be any of the following: portqueuePQ(1) portqueueWFQ(2) portqueueLPQ(3)')
hwhqosPortQueueWeightValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 3, 1, 12), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosPortQueueWeightValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueWeightValue.setDescription('The value of this object identifies the weight of the Weighted Fair Queuing (WFQ) scheduling arithmetic.')
hwhqosPortQueueShaValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 3, 1, 13), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosPortQueueShaValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueShaValue.setDescription('The value of this object identifies the bandwidth allocated to the queue.')
hwhqosPortQueueShaPercent = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 3, 1, 14), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosPortQueueShaPercent.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueShaPercent.setDescription('The value of this object identifies the proportion of the queue bandwidth to the bandwidth of the port.')
hwhqosPortQueueWredName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 3, 1, 15), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosPortQueueWredName.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueWredName.setDescription('This object indicates the name of the Weighted Random Early Detection (WRED) template. The object is a string of 1 to 31 characters.')
hwhqosPortQueuePbsValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 3, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 262144))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosPortQueuePbsValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueuePbsValue.setDescription('The object specifies the value of the port-queue specific queue pbs.')
hwhqosPortQueueRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 3, 1, 51), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosPortQueueRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueRowStatus.setDescription('This object indicates the row status.')
hwhqosWredTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 4), )
if mibBuilder.loadTexts: hwhqosWredTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredTable.setDescription('Table of configuration about a wred template.')
hwhqosWredEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 4, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosWredName"))
if mibBuilder.loadTexts: hwhqosWredEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredEntry.setDescription('Information about configuration of the value of wred color .')
hwhqosWredName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 4, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31)))
if mibBuilder.loadTexts: hwhqosWredName.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredName.setDescription('The value of this object identifies the name of the WRED template. The object is a string of 1 to 31 characters.')
hwhqosWredGreenLowLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 4, 1, 11), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosWredGreenLowLimit.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredGreenLowLimit.setDescription('The value of this object identifies the lower threshold of the green port queue.')
hwhqosWredGreenHighLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 4, 1, 12), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosWredGreenHighLimit.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredGreenHighLimit.setDescription('The value of this object identifies the upper threshold of the green port queue.')
hwhqosWredGreenDiscardPercent = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 4, 1, 13), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosWredGreenDiscardPercent.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredGreenDiscardPercent.setDescription('The value of this object identifies the packet loss ratio between the upper threshold and the lower threshold of the green port queue.')
hwhqosWredYellowLowLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 4, 1, 14), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosWredYellowLowLimit.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredYellowLowLimit.setDescription('The value of this object identifies the lower threshold of the yellow port queue.')
hwhqosWredYellowHighLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 4, 1, 15), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosWredYellowHighLimit.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredYellowHighLimit.setDescription('The value of this object identifies the upper threshold of the yellow port queue.')
hwhqosWredYellowDiscardPercent = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 4, 1, 16), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosWredYellowDiscardPercent.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredYellowDiscardPercent.setDescription('The value of this object identifies the packet loss ratio between the upper threshold and the lower threshold of the yellow port queue.')
hwhqosWredRedLowLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 4, 1, 17), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosWredRedLowLimit.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredRedLowLimit.setDescription('The value of this object identifies the lower threshold of the red port queue.')
hwhqosWredRedHighLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 4, 1, 18), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosWredRedHighLimit.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredRedHighLimit.setDescription('The value of this object identifies the upper threshold of the red port queue.')
hwhqosWredRedDiscardPercent = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 4, 1, 19), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosWredRedDiscardPercent.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredRedDiscardPercent.setDescription('The value of this object identifies the packet loss ratio between the upper threshold and the lower threshold of the red port queue.')
hwhqosWredRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 4, 1, 51), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosWredRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredRowStatus.setDescription('This object indicates the row status.')
hwhqosIfQueueStatTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 5), )
if mibBuilder.loadTexts: hwhqosIfQueueStatTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfQueueStatTable.setDescription('Interface queue statistic table.')
hwhqosIfQueueStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 5, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosIfQueueStatIfIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosIfQueueStatQueueIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosIfQueueStatDirection"))
if mibBuilder.loadTexts: hwhqosIfQueueStatEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfQueueStatEntry.setDescription('Interface Queue statistic table entry.')
hwhqosIfQueueStatIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 5, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: hwhqosIfQueueStatIfIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfQueueStatIfIndex.setDescription('Interface index. ')
hwhqosIfQueueStatQueueIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 5, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("be", 1), ("af1", 2), ("af2", 3), ("af3", 4), ("af4", 5), ("ef", 6), ("cs6", 7), ("cs7", 8))))
if mibBuilder.loadTexts: hwhqosIfQueueStatQueueIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfQueueStatQueueIndex.setDescription('Index number of queues with priority. The values and meanings are as follows: 1 be 2 af1 3 af2 4 af3 5 af4 6 ef 7 cs6 8 cs7 ')
hwhqosIfQueueStatDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 5, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2))))
if mibBuilder.loadTexts: hwhqosIfQueueStatDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfQueueStatDirection.setDescription('Direction: inbound 1,outbound 2')
hwhqosIfQueueStatForwardPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 5, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosIfQueueStatForwardPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfQueueStatForwardPackets.setDescription('Number of forwarded packets.')
hwhqosIfQueueStatForwardBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 5, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosIfQueueStatForwardBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfQueueStatForwardBytes.setDescription('Number of forwarded bytes.')
hwhqosIfQueueStatDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 5, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosIfQueueStatDropPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfQueueStatDropPackets.setDescription('Number of discarded packets.')
hwhqosIfQueueStatDropBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 5, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosIfQueueStatDropBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfQueueStatDropBytes.setDescription('Number of discarded bytes.')
hwhqosUserQueueStatTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 6), )
if mibBuilder.loadTexts: hwhqosUserQueueStatTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatTable.setDescription('User queue statistic table.')
hwhqosUserQueueStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 6, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosUserQueueStatType"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserQueueStatNameString"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserQueueStatDirection"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserQueueStatQueueIndex"))
if mibBuilder.loadTexts: hwhqosUserQueueStatEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatEntry.setDescription('User Queue statistic table entry')
hwhqosUserQueueStatType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 6, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("interface", 1), ("mactunel", 2), ("userclassifier", 3))))
if mibBuilder.loadTexts: hwhqosUserQueueStatType.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatType.setDescription('Types of statistic: interface(1), mactunel(2), userclassifier(3) ')
hwhqosUserQueueStatNameString = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 6, 1, 2), OctetString())
if mibBuilder.loadTexts: hwhqosUserQueueStatNameString.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatNameString.setDescription('Name character string: If the statistic is based on mac-tunnel, this field is the name of the mac-tunnel. If the statistic is based on user classification, this field is the name of the user classification. If the statistic is based on an interface, this field is the name of the interface. ')
hwhqosUserQueueStatDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 6, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2))))
if mibBuilder.loadTexts: hwhqosUserQueueStatDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatDirection.setDescription('Direction: If the statistic is based on user classification, the direction is divided to upstream (1) and downstream (2). If the Statistic is based on mac-tunnel, the direction is applied only on the downstream (2).')
hwhqosUserQueueStatQueueIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 6, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("be", 1), ("af1", 2), ("af2", 3), ("af3", 4), ("af4", 5), ("ef", 6), ("cs6", 7), ("cs7", 8), ("total", 9))))
if mibBuilder.loadTexts: hwhqosUserQueueStatQueueIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatQueueIndex.setDescription('Index number of the queues. The values and meanings are as follows: 1 be 2 af1 3 af2 4 af3 5 af4 6 ef 7 cs6 8 cs7 9 total ')
hwhqosUserQueueStatForwardPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 6, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueStatForwardPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatForwardPackets.setDescription('The number of packets that pass through.')
hwhqosUserQueueStatForwardBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 6, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueStatForwardBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatForwardBytes.setDescription('The number of bytes that pass through.')
hwhqosUserQueueStatDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 6, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueStatDropPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatDropPackets.setDescription('The number of discarded packets.')
hwhqosUserQueueStatDropBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 6, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueStatDropBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatDropBytes.setDescription('The number of discarded bytes.')
hwhqosUserQueueStatReset = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 6, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("reset", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwhqosUserQueueStatReset.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatReset.setDescription('Counter resetting. If the value is 1, the object resets the statistics through the set operation. It is no of use to access the value of this object. ')
hwhqosUserQueueStatLastResetTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 6, 1, 10), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueStatLastResetTime.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatLastResetTime.setDescription('The time stamp that the counter is reset last.')
hwhqosUserQueueStatPerDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 6, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueStatPerDropPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatPerDropPackets.setDescription('The number of discarded packets in a certain period.')
hwhqosUserGroupQueueStatTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 7), )
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatTable.setDescription('User group queue statistic table.')
hwhqosUserGroupQueueStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 7, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatGroupName"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatDirection"))
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatEntry.setDescription('user group queue statistic table entry')
hwhqosUserGroupQueueStatGroupName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 7, 1, 1), OctetString())
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatGroupName.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatGroupName.setDescription('User group name. ')
hwhqosUserGroupQueueStatDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 7, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbount", 1), ("outbound", 2))))
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatDirection.setDescription('Direction: upstream (1), downstream (2);')
hwhqosUserGroupQueueForwardPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 7, 1, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueForwardPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueForwardPackets.setDescription('The number of packets that pass through.')
hwhqosUserGroupQueueForwardBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 7, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueForwardBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueForwardBytes.setDescription('The number of bytes that pass through.')
hwhqosUserGroupQueueDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 7, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueDropPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueDropPackets.setDescription('The number of discarded packets.')
hwhqosUserGroupQueueDropBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 7, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueDropBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueDropBytes.setDescription('The number of discarded bytes.')
hwhqosUserGroupQueueStatReset = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 7, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("reset", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatReset.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatReset.setDescription('Counter resetting. If the value is reset(1), the object resets the statistics through the set operation. It is no of use to access the value of this object.')
hwhqosUserGroupQueueStatLastResetTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 7, 1, 8), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatLastResetTime.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatLastResetTime.setDescription('The time stamp that the counter is reset last.')
hwVPNHQoSTunnelStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 8), )
if mibBuilder.loadTexts: hwVPNHQoSTunnelStatisticsTable.setStatus('current')
if mibBuilder.loadTexts: hwVPNHQoSTunnelStatisticsTable.setDescription('VPN qos tunnel statistic table.')
hwVPNHQoSTunnelStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 8, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwVPNHQoSTunnelIfIndex"), (0, "HUAWEI-HQOS-MIB", "hwVPNHQoSVPNType"), (0, "HUAWEI-HQOS-MIB", "hwVPNHQoSVPNValue"))
if mibBuilder.loadTexts: hwVPNHQoSTunnelStatisticsEntry.setStatus('current')
if mibBuilder.loadTexts: hwVPNHQoSTunnelStatisticsEntry.setDescription('VPN qos tunnel statistic table entry: L3VPN,VPLS and VLL are all supported. ')
hwVPNHQoSTunnelIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 8, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwVPNHQoSTunnelIfIndex.setStatus('current')
if mibBuilder.loadTexts: hwVPNHQoSTunnelIfIndex.setDescription('VPN Tunnel interface index.')
hwVPNHQoSVPNType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 8, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 3))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwVPNHQoSVPNType.setStatus('current')
if mibBuilder.loadTexts: hwVPNHQoSVPNType.setDescription('VPN Type: Tunnel (0), L3VPN (1), VPLS (2), VLL(3);')
hwVPNHQoSVPNValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 8, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwVPNHQoSVPNValue.setStatus('current')
if mibBuilder.loadTexts: hwVPNHQoSVPNValue.setDescription('Name of VPN Instance.')
hwVPNHQoSPassBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 8, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwVPNHQoSPassBytes.setStatus('current')
if mibBuilder.loadTexts: hwVPNHQoSPassBytes.setDescription('The number of bytes that pass through.')
hwVPNHQoSPassPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 8, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwVPNHQoSPassPackets.setStatus('current')
if mibBuilder.loadTexts: hwVPNHQoSPassPackets.setDescription('The number of packets that pass through.')
hwVPNHQoSDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 8, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwVPNHQoSDropPackets.setStatus('current')
if mibBuilder.loadTexts: hwVPNHQoSDropPackets.setDescription('The number of discarded packets.')
hwVPNHQoSDropBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 8, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwVPNHQoSDropBytes.setStatus('current')
if mibBuilder.loadTexts: hwVPNHQoSDropBytes.setDescription(' The number of discarded bytes.')
hwhqosTunnelStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 9), )
if mibBuilder.loadTexts: hwhqosTunnelStatisticsTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosTunnelStatisticsTable.setDescription('Tunnel statistic table.')
hwhqosTunnelStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 9, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosTunnelIfIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosTunnelCosType"), (0, "HUAWEI-HQOS-MIB", "hwhqosTunnelVPNType"), (0, "HUAWEI-HQOS-MIB", "hwhqosTunnelVPNName"))
if mibBuilder.loadTexts: hwhqosTunnelStatisticsEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosTunnelStatisticsEntry.setDescription('Tunnel statistic table entry: L3VPN,VPLS and VLL are all supported. ')
hwhqosTunnelIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 9, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosTunnelIfIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosTunnelIfIndex.setDescription('Tunnel interface index.')
hwhqosTunnelCosType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 9, 1, 2), CosType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosTunnelCosType.setStatus('current')
if mibBuilder.loadTexts: hwhqosTunnelCosType.setDescription('BE(1) AF1(2) AF2(3) AF3(4) AF4(5) EF(6) CS6(7) CS7(8)')
hwhqosTunnelVPNType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 9, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 3))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosTunnelVPNType.setStatus('current')
if mibBuilder.loadTexts: hwhqosTunnelVPNType.setDescription('VPN Type: Tunnel (0), L3VPN (1), VPLS (2), VLL(3);')
hwhqosTunnelVPNName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 9, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosTunnelVPNName.setStatus('current')
if mibBuilder.loadTexts: hwhqosTunnelVPNName.setDescription('Name of VPN Instance.')
hwhqosTunnelPassBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 9, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosTunnelPassBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosTunnelPassBytes.setDescription('The number of bytes that pass through.')
hwhqosTunnelPassPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 9, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosTunnelPassPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosTunnelPassPackets.setDescription('The number of packets that pass through.')
hwhqosTunnelDropBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 9, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosTunnelDropBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosTunnelDropBytes.setDescription(' The number of discarded bytes.')
hwhqosTunnelDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 9, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosTunnelDropPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosTunnelDropPackets.setDescription('The number of discarded packets.')
hwhqosTunnelPassedByteRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 9, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosTunnelPassedByteRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosTunnelPassedByteRate.setDescription('Rate of bytes passed of enqueue. Unit: Bps')
hwhqosTunnelPassPacketRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 9, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosTunnelPassPacketRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosTunnelPassPacketRate.setDescription('Rate of packets passed of enqueue. Unit: pps')
hwhqosProfileTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 10), )
if mibBuilder.loadTexts: hwhqosProfileTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileTable.setDescription(' hwhqosProfileTable ')
hwhqosProfileEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 10, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosProfileName"))
if mibBuilder.loadTexts: hwhqosProfileEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileEntry.setDescription(' hwhqosProfileEntry ')
hwhqosProfileName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 10, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileName.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileName.setDescription('The value of this object identifies the name of a QoS profile.')
hwhqosProfileDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 10, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 63))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileDescription.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileDescription.setDescription(' hwhqosProfileDescription ')
hwhqosProfileRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 10, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileRowStatus.setDescription('This object indicates the row status.')
hwhqosProfileSuppressionTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 11), )
if mibBuilder.loadTexts: hwhqosProfileSuppressionTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileSuppressionTable.setDescription(' hwhqosProfileSuppressionTable ')
hwhqosProfileSuppressionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 11, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosProfileName"), (0, "HUAWEI-HQOS-MIB", "hwhqosSuppressionDirection"), (0, "HUAWEI-HQOS-MIB", "hwhqosSuppressionType"))
if mibBuilder.loadTexts: hwhqosProfileSuppressionEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileSuppressionEntry.setDescription(' hwhqosProfileSuppressionEntry ')
hwhqosSuppressionDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2), ("inout", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosSuppressionDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosSuppressionDirection.setDescription('This object indicates the direction where packets are suppressed.')
hwhqosSuppressionType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 11, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("broadcast", 1), ("multicast", 2), ("unkonwnUnicast", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosSuppressionType.setStatus('current')
if mibBuilder.loadTexts: hwhqosSuppressionType.setDescription('This object indicates the suppression type.')
hwhqosSuppressionCirValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 11, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(100, 10000000))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosSuppressionCirValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosSuppressionCirValue.setDescription('The value of this object identifies the CIR.')
hwhqosSuppressionCbsValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 11, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(100, 33554432))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosSuppressionCbsValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosSuppressionCbsValue.setDescription('The value of this object identifies the CBS.')
hwhqosSuppressionRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 11, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosSuppressionRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosSuppressionRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosProfileCarTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 12), )
if mibBuilder.loadTexts: hwhqosProfileCarTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarTable.setDescription(' hwhqosProfileCarTable ')
hwhqosProfileCarEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 12, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosProfileName"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileCarDirection"))
if mibBuilder.loadTexts: hwhqosProfileCarEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarEntry.setDescription(' hwhqosProfileCarEntry ')
hwhqosProfileCarDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2), ("inout", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileCarDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarDirection.setDescription('This object indicates the direction where traffic is policed.')
hwhqosProfileCarCirValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 12, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileCarCirValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarCirValue.setDescription('The value of this object identifies the CIR.')
hwhqosProfileCarPirValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 12, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileCarPirValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarPirValue.setDescription('The value of this object identifies the PIR.')
hwhqosProfileCarCbsValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 12, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileCarCbsValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarCbsValue.setDescription('The value of this object identifies the CBS.')
hwhqosProfileCarPbsValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 12, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileCarPbsValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarPbsValue.setDescription('The value of this object identifies the PBS.')
hwhqosProfileCarGreenAction = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 12, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("pass", 1), ("discard", 2))).clone(1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileCarGreenAction.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarGreenAction.setDescription('This object indicates the actions (pass and discard) taken on the packets marked in green.')
hwhqosProfileCarYellowAction = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 12, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("pass", 1), ("discard", 2))).clone(1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileCarYellowAction.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarYellowAction.setDescription('This object indicates the actions (pass and discard) taken on the packets marked in yellow.')
hwhqosProfileCarRedAction = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 12, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("pass", 1), ("discard", 2))).clone(2)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileCarRedAction.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarRedAction.setDescription('This object indicates the actions (pass and discard) taken on the packets marked in red.')
hwhqosProfileCarRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 12, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileCarRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosProfileUserQueueTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 13), )
if mibBuilder.loadTexts: hwhqosProfileUserQueueTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueTable.setDescription(' hwhqosProfileUserQueueTable ')
hwhqosProfileUserQueueEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 13, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosProfileName"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueDirection"))
if mibBuilder.loadTexts: hwhqosProfileUserQueueEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueEntry.setDescription(' hwhqosProfileUserQueueEntry ')
hwhqosProfileUserQueueDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 13, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2), ("inout", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileUserQueueDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueDirection.setDescription('This object indicates the direction where a user queue is scheduled.')
hwhqosProfileUserQueueCirValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 13, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(-1, -1), ValueRangeConstraint(0, 0), ValueRangeConstraint(16, 10000000), ))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileUserQueueCirValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueCirValue.setDescription('The value of this object identifies the guaranteed bandwidth of a user queue.')
hwhqosProfileUserQueuePirValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 13, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(-1, -1), ValueRangeConstraint(0, 0), ValueRangeConstraint(16, 10000000), ))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileUserQueuePirValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueuePirValue.setDescription('The value of this object identifies the PIR of a user queue.')
hwhqosProfileUserQueueFlowQueueName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 13, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileUserQueueFlowQueueName.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueFlowQueueName.setDescription('The value of this object identifies the name of a flow queue template.')
hwhqosProfileUserQueueMappingName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 13, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileUserQueueMappingName.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueMappingName.setDescription('The value of this object identifies the name of a flow queue mapping object.')
hwhqosProfileUserQueueGroupName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 13, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileUserQueueGroupName.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueGroupName.setDescription('The value of this object identifies the name of a user group queue.')
hwhqosProfileUserQueueServiceTemplateName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 13, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileUserQueueServiceTemplateName.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueServiceTemplateName.setDescription('The value of this object identifies the name of a user-defined service template.')
hwhqosProfileUserQueueRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 13, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileUserQueueRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosProfileApplyTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 14), )
if mibBuilder.loadTexts: hwhqosProfileApplyTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyTable.setDescription(' hwhqosProfileApplyTable ')
hwhqosProfileApplyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 14, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosProfileInterfaceIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileApplyDirection"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileApplyPevid"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileApplyCevid"))
if mibBuilder.loadTexts: hwhqosProfileApplyEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyEntry.setDescription(' hwhqosProfileApplyEntry ')
hwhqosProfileInterfaceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 14, 1, 1), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileInterfaceIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileInterfaceIndex.setDescription('This object indicates the interface index.')
hwhqosProfileApplyDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 14, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileApplyDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyDirection.setDescription('This object indicates the direction where a QoS profile is applied.')
hwhqosProfileApplyPevid = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 14, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileApplyPevid.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyPevid.setDescription('The value of this object identifies the VLAN ID of a specified PE.')
hwhqosProfileApplyCevid = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 14, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileApplyCevid.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyCevid.setDescription('The value of this object identifies the VLAN ID of a specified CE.')
hwhqosProfileApplyName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 14, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 63))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileApplyName.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyName.setDescription('The value of this object identifies the name of a QoS profile.')
hwhqosProfileApplyIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 14, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("none", 1), ("vlanId", 2), ("ceVid", 3), ("peVid", 4), ("peCeVid", 5))).clone(1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileApplyIdentifier.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyIdentifier.setDescription('This object indicates the queue where packets enter.')
hwhqosGroupName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 14, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosGroupName.setStatus('current')
if mibBuilder.loadTexts: hwhqosGroupName.setDescription('The value of this object identifies the name of a shared group of QoS profile instances.')
hwhqosProfileApplyRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 14, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileApplyRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosFlowMappingTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 15), )
if mibBuilder.loadTexts: hwhqosFlowMappingTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowMappingTable.setDescription(' hwhqosFlowMappingTable ')
hwhqosFlowMappingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 15, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosFlowMappingName"))
if mibBuilder.loadTexts: hwhqosFlowMappingEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowMappingEntry.setDescription(' hwhqosFlowMappingEntry ')
hwhqosFlowMappingName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 15, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosFlowMappingName.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowMappingName.setDescription('The value of this object identifies the name of a flow queue mapping object.')
hwhqosFlowMappingRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 15, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowMappingRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowMappingRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosFlowMappingCfgTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 16), )
if mibBuilder.loadTexts: hwhqosFlowMappingCfgTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowMappingCfgTable.setDescription(' hwhqosFlowMappingCfgTable ')
hwhqosFlowMappingCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 16, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosFlowMappingName"), (0, "HUAWEI-HQOS-MIB", "hwhqosFolwMappingCfgQueueCosValue"))
if mibBuilder.loadTexts: hwhqosFlowMappingCfgEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowMappingCfgEntry.setDescription(' hwhqosFlowMappingCfgEntry ')
hwhqosFolwMappingCfgQueueCosValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 16, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("be", 1), ("af1", 2), ("af2", 3), ("af3", 4), ("af4", 5), ("ef", 6), ("cs6", 7), ("cs7", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosFolwMappingCfgQueueCosValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosFolwMappingCfgQueueCosValue.setDescription('This object indicates the service type of a class queue.')
hwhqosFlowMappingCfgPortQueueCosValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 16, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("be", 1), ("af1", 2), ("af2", 3), ("af3", 4), ("af4", 5), ("ef", 6), ("cs6", 7), ("cs7", 8)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowMappingCfgPortQueueCosValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowMappingCfgPortQueueCosValue.setDescription('This object indicates the service type of a port queue.')
hwhqosFlowMappingCfgRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 16, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowMappingCfgRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowMappingCfgRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosFlowQueueTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 17), )
if mibBuilder.loadTexts: hwhqosFlowQueueTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueTable.setDescription(' hwhqosFlowQueueTable ')
hwhqosFlowQueueEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 17, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosFlowQueueName"))
if mibBuilder.loadTexts: hwhqosFlowQueueEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueEntry.setDescription(' hwhqosFlowQueueEntry ')
hwhqosFlowQueueName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 17, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosFlowQueueName.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueName.setDescription('The value of this object identifies the name of a flow queue template.')
hwhqosFlowQueueRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 17, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosFlowQueueCfgTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 18), )
if mibBuilder.loadTexts: hwhqosFlowQueueCfgTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueCfgTable.setDescription(' hwhqosFlowQueueCfgTable ')
hwhqosFlowQueueCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 18, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosFlowQueueName"), (0, "HUAWEI-HQOS-MIB", "hwhqosFlowQueueCfgCosValue"))
if mibBuilder.loadTexts: hwhqosFlowQueueCfgEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueCfgEntry.setDescription(' hwhqosFlowQueueCfgEntry ')
hwhqosFlowQueueCfgCosValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 18, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("be", 1), ("af1", 2), ("af2", 3), ("af3", 4), ("af4", 5), ("ef", 6), ("cs6", 7), ("cs7", 8)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueCfgCosValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueCfgCosValue.setDescription('This object indicates the configured priority of a flow queue.')
hwhqosFlowQueueCfgType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 18, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("pq", 1), ("wfq", 2), ("lpq", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueCfgType.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueCfgType.setDescription('This object indicates the scheduling mode of a flow queue.')
hwhqosFlowQueueCfgWeightValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 18, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(1, 100), ValueRangeConstraint(2147483647, 2147483647), ))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueCfgWeightValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueCfgWeightValue.setDescription('The value of this object identifies the WFQ scheduling weight.')
hwhqosFlowQueueCfgShapingValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 18, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(8, 4294967295))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueCfgShapingValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueCfgShapingValue.setDescription('The value of this object identifies the shaping rate, namely, the configured interface bandwidth. The value of this object is equal to the PIR value.')
hwhqosFlowQueueCfgShapingPercentageValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 18, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 100), ValueRangeConstraint(2147483647, 2147483647), ))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueCfgShapingPercentageValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueCfgShapingPercentageValue.setDescription('The value of this object identifies the percentage of the shaping rate, that is, the percentage of the traffic-shaping bandwidth to the interface bandwidth.')
hwhqosFlowQueueCfgWredName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 18, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueCfgWredName.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueCfgWredName.setDescription('The value of this object identifies the WRED object used by a flow queue.')
hwhqosFlowQueueCfgPbsValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 18, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4194304))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueCfgPbsValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueCfgPbsValue.setDescription('The object specifies the value of the flow-queue specific queue pbs.')
hwhqosFlowQueueCfgRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 18, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueCfgRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueCfgRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosFlowWredTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 19), )
if mibBuilder.loadTexts: hwhqosFlowWredTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowWredTable.setDescription(' hwhqosFlowWredTable ')
hwhqosFlowWredEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 19, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosFlowWredName"))
if mibBuilder.loadTexts: hwhqosFlowWredEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowWredEntry.setDescription(' hwhqosFlowQueueEntry ')
hwhqosFlowWredName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 19, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosFlowWredName.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowWredName.setDescription('The value of this object identifies the name of a flow queue template.')
hwhqosFlowWredRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 19, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowWredRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowWredRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosFlowWredColorTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 20), )
if mibBuilder.loadTexts: hwhqosFlowWredColorTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowWredColorTable.setDescription(' hwhqosFlowWredColorTable ')
hwhqosFlowWredColorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 20, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosFlowWredName"), (0, "HUAWEI-HQOS-MIB", "hwhqosFlowWredColor"))
if mibBuilder.loadTexts: hwhqosFlowWredColorEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowWredColorEntry.setDescription(' hwhqosFlowWredColorEntry ')
hwhqosFlowWredColor = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("green", 1), ("yellow", 2), ("red", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosFlowWredColor.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowWredColor.setDescription('This object indicates the color for marking packets.')
hwhqosFlowWredColorLowlimitPercentage = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 20, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100)).clone(100)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowWredColorLowlimitPercentage.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowWredColorLowlimitPercentage.setDescription('The value of this object identifies the percentage of the WRED lower threshold, that is, the percentage of the WRED lower threshold to the flow queue length. When the average queue length reaches this number, packets are discarded.')
hwhqosFlowWredColorHighlimitPercentage = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 20, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100)).clone(100)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowWredColorHighlimitPercentage.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowWredColorHighlimitPercentage.setDescription('The value of this object identifies the percentage of the WRED upper threshold, that is, the percentage of the WRED upper threshold to the flow queue length. When the average queue length reaches this number, packets are discarded.')
hwhqosFlowWredColorDiscardPercentage = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 20, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 100)).clone(100)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowWredColorDiscardPercentage.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowWredColorDiscardPercentage.setDescription('The value of this object identifies the percentage of the packets that are discarded by WRED.')
hwhqosFlowWredColorRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 20, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowWredColorRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowWredColorRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosUserGroupQueueTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 21), )
if mibBuilder.loadTexts: hwhqosUserGroupQueueTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueTable.setDescription(' hwhqosUserGroupQueueTable ')
hwhqosUserGroupQueueEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 21, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueName"))
if mibBuilder.loadTexts: hwhqosUserGroupQueueEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueEntry.setDescription(' hwhqosUserGroupQueueEntry ')
hwhqosUserGroupQueueName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 21, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueName.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueName.setDescription('The value of this object identifies the name of a user group queue.')
hwhqosUserGroupQueueSlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 21, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 8), ))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserGroupQueueSlotNumber.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueSlotNumber.setDescription('The value of this object identifies the number of the slot where a user group queue object is located.')
hwhqosUserGroupQueueRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 21, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserGroupQueueRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosUserGroupQueueShapingTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 22), )
if mibBuilder.loadTexts: hwhqosUserGroupQueueShapingTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueShapingTable.setDescription(' hwhqosUserGroupQueueShapingTable ')
hwhqosUserGroupQueueShapingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 22, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueName"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueShapingDirection"))
if mibBuilder.loadTexts: hwhqosUserGroupQueueShapingEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueShapingEntry.setDescription(' hwhqosUserGroupQueueShapingEntry ')
hwhqosUserGroupQueueShapingDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 22, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueShapingDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueShapingDirection.setDescription('This object indicates the direction where a user group queue is scheduled.')
hwhqosUserGroupQueueShapingValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 22, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(66, 10000000))).setUnits('Kbps').setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserGroupQueueShapingValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueShapingValue.setDescription('The value of this object identifies the PIR of a user group queue.')
hwhqosUserGroupQueuePbsValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 22, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(64, 2097152))).setUnits('bytes').setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserGroupQueuePbsValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueuePbsValue.setDescription(' The object specifies the value of the user group queue pbs. ')
hwhqosUserGroupQueueShapingRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 22, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserGroupQueueShapingRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueShapingRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosUserQueueTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 23), )
if mibBuilder.loadTexts: hwhqosUserQueueTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueTable.setDescription(' hwhqosUserQueueTable ')
hwhqosUserQueueEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 23, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosUserQueueInterfaceIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserQueueDirection"))
if mibBuilder.loadTexts: hwhqosUserQueueEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueEntry.setDescription(' hwhqosUserQueueEntry ')
hwhqosUserQueueInterfaceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 23, 1, 1), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueInterfaceIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueInterfaceIndex.setDescription('This object indicates the interface index.')
hwhqosUserQueueDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 23, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueDirection.setDescription('This object indicates the direction where a user queue is scheduled.')
hwhqosUserQueueCirValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 23, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(16, 1000000), ))).setUnits('Kbps').setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserQueueCirValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueCirValue.setDescription('The value of this object identifies the guaranteed bandwidth of a user queue.')
hwhqosUserQueuePirValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 23, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(16, 1000000), ))).setUnits('Kbps').setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserQueuePirValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueuePirValue.setDescription('The value of this object identifies the PIR of a user queue.')
hwhqosUserQueueFlowQueueName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 23, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserQueueFlowQueueName.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueFlowQueueName.setDescription('The value of this object identifies the flow queue template that is applied.')
hwhqosUserQueueFlowMappingName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 23, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserQueueFlowMappingName.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueFlowMappingName.setDescription('The value of this object identifies the flow queue mapping object that is applied.')
hwhqosUserQueueGroupName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 23, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserQueueGroupName.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueGroupName.setDescription('The value of this object identifies the name of the user group queue that is applied.')
hwhqosUserQueueServiceTemplateName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 23, 1, 8), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserQueueServiceTemplateName.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueServiceTemplateName.setDescription('The value of this object identifies the name of the service template that is applied.')
hwhqosUserQueueRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 23, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserQueueRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosBehaviorUserQueueTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 24), )
if mibBuilder.loadTexts: hwhqosBehaviorUserQueueTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosBehaviorUserQueueTable.setDescription(' hwBehaviorhqosUserQueueTable ')
hwhqosBehaviorUserQueueEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 24, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosBehaviorName"))
if mibBuilder.loadTexts: hwhqosBehaviorUserQueueEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosBehaviorUserQueueEntry.setDescription(' hwhqosBehaviorUserQueueEntry ')
hwhqosBehaviorName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 24, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosBehaviorName.setStatus('current')
if mibBuilder.loadTexts: hwhqosBehaviorName.setDescription('The value of this object identifies the name of a flow behavior.')
hwhqosBehaviorCirValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 24, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(16, 10000000), ))).setUnits('Kbps').setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosBehaviorCirValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosBehaviorCirValue.setDescription('The value of this object identifies the guaranteed bandwidth of a flow user queue.')
hwhqosBehaviorPirValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 24, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(16, 10000000), ))).setUnits('Kbps').setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosBehaviorPirValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosBehaviorPirValue.setDescription('The value of this object identifies the PIR of a flow user queue.')
hwhqosBehaviorFlowQueueName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 24, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosBehaviorFlowQueueName.setStatus('current')
if mibBuilder.loadTexts: hwhqosBehaviorFlowQueueName.setDescription('The value of this object identifies the flow queue template that is applied.')
hwhqosBehaviorFlowMappingName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 24, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosBehaviorFlowMappingName.setStatus('current')
if mibBuilder.loadTexts: hwhqosBehaviorFlowMappingName.setDescription('The value of this object identifies the flow queue mapping object that is applied.')
hwhqosBehaviorGroupName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 24, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosBehaviorGroupName.setStatus('current')
if mibBuilder.loadTexts: hwhqosBehaviorGroupName.setDescription('The value of this object identifies the name of the user group queue that is applied.')
hwhqosBehaviorServiceTemplateName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 24, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosBehaviorServiceTemplateName.setStatus('current')
if mibBuilder.loadTexts: hwhqosBehaviorServiceTemplateName.setDescription('The value of this object identifies the name of the service template that is applied.')
hwhqosBehaviorUserQueueRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 24, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosBehaviorUserQueueRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosBehaviorUserQueueRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosBandwidthTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 25), )
if mibBuilder.loadTexts: hwhqosBandwidthTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosBandwidthTable.setDescription(' hwhqosBandwidthTable ')
hwhqosBandwidthEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 25, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosBandwidthInterfaceIndex"))
if mibBuilder.loadTexts: hwhqosBandwidthEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosBandwidthEntry.setDescription(' hwhqosBandwidthEntry ')
hwhqosBandwidthInterfaceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 25, 1, 1), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosBandwidthInterfaceIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosBandwidthInterfaceIndex.setDescription('This object indicates the name of a trunk interface.')
hwhqosBandwidthValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 25, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 10000)).clone(1000)).setUnits('Mbps').setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosBandwidthValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosBandwidthValue.setDescription('The value of this object identifies the bandwidth of a trunk interface.')
hwhqosBandwidthRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 25, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosBandwidthRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosBandwidthRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosServiceTemplateTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 26), )
if mibBuilder.loadTexts: hwhqosServiceTemplateTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceTemplateTable.setDescription(' hwhqosServiceTemplateTable ')
hwhqosServiceTemplateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 26, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosServiceTemplateName"))
if mibBuilder.loadTexts: hwhqosServiceTemplateEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceTemplateEntry.setDescription(' hwhqosServiceTemplateEntry ')
hwhqosServiceTemplateName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 26, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosServiceTemplateName.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceTemplateName.setDescription('The value of this object identifies the name of a user-defined service template.')
hwhqosSlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 26, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosSlotNumber.setStatus('current')
if mibBuilder.loadTexts: hwhqosSlotNumber.setDescription('The value of this object identifies the number of the slot where a user-defined service template is located.')
hwhqosServiceTemplateRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 26, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosServiceTemplateRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceTemplateRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosNetworkHeaderLengthTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 27), )
if mibBuilder.loadTexts: hwhqosNetworkHeaderLengthTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosNetworkHeaderLengthTable.setDescription(' hwhqosNetworkHeaderLengthTable ')
hwhqosNetworkHeaderLengthEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 27, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosServiceTemplateName"), (0, "HUAWEI-HQOS-MIB", "hwhqosNetworkHeaderLengthDirection"))
if mibBuilder.loadTexts: hwhqosNetworkHeaderLengthEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosNetworkHeaderLengthEntry.setDescription(' hwhqosNetworkHeaderLengthEntry ')
hwhqosNetworkHeaderLengthDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 27, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosNetworkHeaderLengthDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosNetworkHeaderLengthDirection.setDescription('This object indicates the direction where the packet loss compensation of a service template is configured.')
hwhqosNetWorkHeaderLengthValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 27, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-63, 63))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosNetWorkHeaderLengthValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosNetWorkHeaderLengthValue.setDescription('The value of this object identifies the accuracy of the length for the packet loss compensation of a service template.')
hwhqosNetWorkHeaderLengthRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 27, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosNetWorkHeaderLengthRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosNetWorkHeaderLengthRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosServiceTemplateApplyTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 28), )
if mibBuilder.loadTexts: hwhqosServiceTemplateApplyTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceTemplateApplyTable.setDescription(' hwhqosServiceTemplateApplyTable ')
hwhqosServiceTemplateApplyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 28, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosServiceTemplateApplyInterfaceIndex"))
if mibBuilder.loadTexts: hwhqosServiceTemplateApplyEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceTemplateApplyEntry.setDescription(' hwhqosServiceTemplateApplyEntry ')
hwhqosServiceTemplateApplyInterfaceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 28, 1, 1), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosServiceTemplateApplyInterfaceIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceTemplateApplyInterfaceIndex.setDescription('This object indicates the name of the interface.')
hwhqosApplyServiceTemplateName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 28, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosApplyServiceTemplateName.setStatus('current')
if mibBuilder.loadTexts: hwhqosApplyServiceTemplateName.setDescription('The value of this object identifies the name of a user-defined service template.')
hwhqosServiceTemplateApplyRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 28, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosServiceTemplateApplyRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceTemplateApplyRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosProfileUserQueueStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29), )
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsTable.setDescription(' hwhqosProfileUserQueueStatisticsTable ')
hwhqosProfileUserQueueStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsInterfaceIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsDirection"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsPevid"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsCevid"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsSlotNumber"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsQueueIndex"))
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsEntry.setDescription(' hwhqosProfileUserQueueStatisticsEntry ')
hwhqosProfileUserQueueStatisticsInterfaceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1, 1), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsInterfaceIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsInterfaceIndex.setDescription('This object indicates the interface index.')
hwhqosProfileUserQueueStatisticsDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsDirection.setDescription('This object indicates the direction where a QoS profile is applied.')
hwhqosProfileUserQueueStatisticsPevid = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsPevid.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsPevid.setDescription('The value of this object identifies the PE VID (outer tag value) of a QoS profile.')
hwhqosProfileUserQueueStatisticsCevid = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsCevid.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsCevid.setDescription('The value of this object identifies the CE VID (inner tag value) or the VLAN ID of a QoS profile.')
hwhqosProfileUserQueueStatisticsSlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsSlotNumber.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsSlotNumber.setDescription('The value of this object identifies the number of the slot where the interface for applying a QoS profile is located.')
hwhqosProfileUserQueueStatisticsQueueIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("be", 1), ("af1", 2), ("af2", 3), ("af3", 4), ("af4", 5), ("ef", 6), ("cs6", 7), ("cs7", 8), ("total", 9)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsQueueIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsQueueIndex.setDescription('This object indicates the priority of the user queue whose statistics are queried.')
hwhqosProfileUserQueueStatisticsReset = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("reset", 1)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsReset.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsReset.setDescription('This object indicates the resetting of statistics.')
hwhqosProfileUserQueueStatisticsPassPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1, 8), Counter64()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsPassPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsPassPackets.setDescription('This object indicates the number of passed packets.')
hwhqosProfileUserQueueStatisticsPassBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1, 9), Counter64()).setUnits('bytes').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsPassBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsPassBytes.setDescription('This object indicates the number of passed bytes.')
hwhqosProfileUserQueueStatisticsDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1, 10), Counter64()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsDropPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsDropPackets.setDescription('This object indicates the number of discarded packets.')
hwhqosProfileUserQueueStatisticsDropBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1, 11), Counter64()).setUnits('bytes').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsDropBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsDropBytes.setDescription('This object indicates the number of discarded bytes.')
hwhqosProfileUserQueueStatisticsPassPacketsRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1, 12), Counter64()).setUnits('pps').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsPassPacketsRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsPassPacketsRate.setDescription('This object indicates the packet pass rate.')
hwhqosProfileUserQueueStatisticsPassBytesRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1, 13), Counter64()).setUnits('bps').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsPassBytesRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsPassBytesRate.setDescription('This object indicates the byte pass rate.')
hwhqosProfileUserQueueStatisticsDropPacketsRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1, 14), Counter64()).setUnits('pps').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsDropPacketsRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsDropPacketsRate.setDescription('This object indicates the packet discarding rate.')
hwhqosProfileUserQueueStatisticsDropBytesRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1, 15), Counter64()).setUnits('bps').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsDropBytesRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsDropBytesRate.setDescription('This object indicates the byte discarding rate.')
hwhqosProfileUserQueueStatisticsTrafficTowardsThisInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1, 16), Counter64()).setUnits('bps').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsTrafficTowardsThisInterface.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsTrafficTowardsThisInterface.setDescription('This object indicates the traffic which towards this interface.')
hwhqosProfileUserQueueStatisticsConfiguredCir = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1, 17), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(16, 4294967294), ))).setUnits('kbps').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsConfiguredCir.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsConfiguredCir.setDescription('This object indicates the configured CIR.')
hwhqosProfileUserQueueStatisticsConfiguredPir = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 29, 1, 18), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(16, 4294967294), ))).setUnits('kbps').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsConfiguredPir.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsConfiguredPir.setDescription('This object indicates the configured PIR.')
hwhqosProfileCarStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 30), )
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsTable.setDescription(' hwhqosProfileCarStatisticsTable ')
hwhqosProfileCarStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 30, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsInterfaceIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsDirection"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsPevid"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsCevid"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsType"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsSlotNumber"))
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsEntry.setDescription(' hwhqosProfileCarStatisticsEntry ')
hwhqosProfileCarStatisticsInterfaceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 30, 1, 1), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsInterfaceIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsInterfaceIndex.setDescription('This object indicates the interface index.')
hwhqosProfileCarStatisticsDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 30, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsDirection.setDescription('This object indicates the direction where a QoS profile is applied.')
hwhqosProfileCarStatisticsPevid = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 30, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsPevid.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsPevid.setDescription('The value of this object identifies the PE VID (outer tag value) of a QoS profile.')
hwhqosProfileCarStatisticsCevid = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 30, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsCevid.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsCevid.setDescription('The value of this object identifies the CE VID (inner tag value) or the VLAN ID of a QoS profile.')
hwhqosProfileCarStatisticsType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 30, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("car", 1), ("broadcastSuppression", 2), ("multicastSuppression", 3), ("unknownUnicastSuppression", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsType.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsType.setDescription('This object indicates the type of the statistics on the CAR or suppression to be queried.')
hwhqosProfileCarStatisticsSlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 30, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsSlotNumber.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsSlotNumber.setDescription('The value of this object identifies the number of the slot where the interface for applying a QoS profile is located.')
hwhqosProfileCarStatisticsReset = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 30, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("reset", 1)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsReset.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsReset.setDescription('This object indicates the resetting of statistics.')
hwhqosProfileCarStatisticsPassPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 30, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsPassPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsPassPackets.setDescription('This object indicates the number of passed packets.')
hwhqosProfileCarStatisticsPassBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 30, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsPassBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsPassBytes.setDescription('This object indicates the number of passed bytes.')
hwhqosProfileCarStatisticsDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 30, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsDropPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsDropPackets.setDescription('This object indicates the number of discarded packets.')
hwhqosProfileCarStatisticsDropBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 30, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsDropBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsDropBytes.setDescription('This object indicates the number of discarded bytes.')
hwhqosProfileCarStatisticsPassPacketsRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 30, 1, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsPassPacketsRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsPassPacketsRate.setDescription('This object indicates the packet pass rate.')
hwhqosProfileCarStatisticsPassBytesRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 30, 1, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsPassBytesRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsPassBytesRate.setDescription('This object indicates the byte pass rate.')
hwhqosProfileCarStatisticsDropPacketsRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 30, 1, 14), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsDropPacketsRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsDropPacketsRate.setDescription('This object indicates the packet discarding rate.')
hwhqosProfileCarStatisticsDropBytesRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 30, 1, 15), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsDropBytesRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsDropBytesRate.setDescription('This object indicates the byte discarding rate.')
hwhqosUserQueueStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 31), )
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsTable.setDescription(' hwhqosUserQueueStatisticsTable ')
hwhqosUserQueueStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 31, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosUserQueueStatisticsInterfaceIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserQueueStatisticsDirection"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserQueueStatisticsSlotNumber"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserQueueStatisticsQueueIndex"))
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsEntry.setDescription(' hwhqosUserQueueStatisticsEntry ')
hwhqosUserQueueStatisticsInterfaceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 31, 1, 1), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsInterfaceIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsInterfaceIndex.setDescription('This object indicates the interface index.')
hwhqosUserQueueStatisticsDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 31, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsDirection.setDescription('This object indicates the direction where a user queue is applied.')
hwhqosUserQueueStatisticsSlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 31, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsSlotNumber.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsSlotNumber.setDescription('The value of this object identifies the number of the slot where the interface for applying a user queue is located.')
hwhqosUserQueueStatisticsQueueIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 31, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("be", 1), ("af1", 2), ("af2", 3), ("af3", 4), ("af4", 5), ("ef", 6), ("cs6", 7), ("cs7", 8), ("total", 9)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsQueueIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsQueueIndex.setDescription('This object indicates the priority of a user queue.')
hwhqosUserQueueStatisticsReset = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 31, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("reset", 1)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsReset.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsReset.setDescription('This object indicates the resetting of statistics.')
hwhqosUserQueueStatisticsPassPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 31, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsPassPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsPassPackets.setDescription('This object indicates the number of passed packets.')
hwhqosUserQueueStatisticsPassBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 31, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsPassBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsPassBytes.setDescription('This object indicates the number of passed bytes.')
hwhqosUserQueueStatisticsDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 31, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsDropPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsDropPackets.setDescription('This object indicates the number of discarded packets.')
hwhqosUserQueueStatisticsDropBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 31, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsDropBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsDropBytes.setDescription('This object indicates the number of discarded bytes.')
hwhqosUserQueueStatisticsPassPacketsRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 31, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsPassPacketsRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsPassPacketsRate.setDescription('This object indicates the packet pass rate.')
hwhqosUserQueueStatisticsPassBytesRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 31, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsPassBytesRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsPassBytesRate.setDescription('This object indicates the byte pass rate.')
hwhqosUserQueueStatisticsDropPacketsRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 31, 1, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsDropPacketsRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsDropPacketsRate.setDescription('This object indicates the packet discarding rate.')
hwhqosUserQueueStatisticsDropBytesRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 31, 1, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsDropBytesRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsDropBytesRate.setDescription('This object indicates the byte discarding rate.')
hwhqosUserQueueClassifierStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 32), )
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsTable.setDescription(' hwhqosUserQueueStatisticsTable ')
hwhqosUserQueueClassifierStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 32, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsInterfaceIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsDirection"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsClassifierName"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsSlotNumber"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsQueueIndex"))
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsEntry.setDescription(' hwhqosUserQueueStatisticsEntry ')
hwhqosUserQueueClassifierStatisticsInterfaceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 32, 1, 1), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsInterfaceIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsInterfaceIndex.setDescription('This object indicates the interface index.')
hwhqosUserQueueClassifierStatisticsDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 32, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsDirection.setDescription('This object indicates the direction where a policy is applied.')
hwhqosUserQueueClassifierStatisticsClassifierName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 32, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsClassifierName.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsClassifierName.setDescription('The value of this object identifies the name of a class template.')
hwhqosUserQueueClassifierStatisticsSlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 32, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsSlotNumber.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsSlotNumber.setDescription('The value of this object identifies the number of the slot where the interface for applying a policy is located.')
hwhqosUserQueueClassifierStatisticsQueueIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 32, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("be", 1), ("af1", 2), ("af2", 3), ("af3", 4), ("af4", 5), ("ef", 6), ("cs6", 7), ("cs7", 8), ("total", 9)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsQueueIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsQueueIndex.setDescription('This object indicates the priority of a user queue.')
hwhqosUserQueueClassifierStatisticsReset = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 32, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("reset", 1)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsReset.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsReset.setDescription('This object indicates the resetting of statistics.')
hwhqosUserQueueClassifierStatisticsPassPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 32, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsPassPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsPassPackets.setDescription('This object indicates the number of passed packets.')
hwhqosUserQueueClassifierStatisticsPassBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 32, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsPassBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsPassBytes.setDescription('This object indicates the number of passed bytes.')
hwhqosUserQueueClassifierStatisticsDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 32, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsDropPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsDropPackets.setDescription('This object indicates the number of discarded packets.')
hwhqosUserQueueClassifierStatisticsDropBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 32, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsDropBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsDropBytes.setDescription('This object indicates the number of discarded bytes.')
hwhqosUserQueueClassifierStatisticsPassPacketsRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 32, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsPassPacketsRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsPassPacketsRate.setDescription('This object indicates the packet pass rate.')
hwhqosUserQueueClassifierStatisticsPassBytesRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 32, 1, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsPassBytesRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsPassBytesRate.setDescription('This object indicates the byte pass rate.')
hwhqosUserQueueClassifierStatisticsDropPacketsRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 32, 1, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsDropPacketsRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsDropPacketsRate.setDescription('This object indicates the packet discarding rate.')
hwhqosUserQueueClassifierStatisticsDropBytesRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 32, 1, 14), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsDropBytesRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsDropBytesRate.setDescription('This object indicates the byte discarding rate.')
hwhqosUserGroupQueueStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 33), )
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsTable.setDescription(' hwhqosUserGroupQueueStatisticsTable ')
hwhqosUserGroupQueueStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 33, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatisticsGroupName"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatisticsDirection"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatisticsSlotNumber"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatisticsQueueIndex"))
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsEntry.setDescription(' hwhqosUserGroupQueueStatisticsEntry ')
hwhqosUserGroupQueueStatisticsGroupName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 33, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsGroupName.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsGroupName.setDescription('The value of this object identifies the name of a user group queue.')
hwhqosUserGroupQueueStatisticsDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 33, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsDirection.setDescription('This object indicates the direction where a user group queue template is applied.')
hwhqosUserGroupQueueStatisticsSlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 33, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsSlotNumber.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsSlotNumber.setDescription('The value of this object identifies the number of the slot where a user group queue template is applied.')
hwhqosUserGroupQueueStatisticsQueueIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 33, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("be", 1), ("af1", 2), ("af2", 3), ("af3", 4), ("af4", 5), ("ef", 6), ("cs6", 7), ("cs7", 8), ("total", 9)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsQueueIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsQueueIndex.setDescription(' hwhqosUserGroupQueueStatisticsQueueIndex ')
hwhqosUserGroupQueueStatisticsReset = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 33, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("reset", 1)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsReset.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsReset.setDescription('This object indicates the resetting of statistics.')
hwhqosUserGroupQueueStatisticsPassPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 33, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsPassPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsPassPackets.setDescription('This object indicates the number of passed packets.')
hwhqosUserGroupQueueStatisticsPassBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 33, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsPassBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsPassBytes.setDescription('This object indicates the number of passed bytes.')
hwhqosUserGroupQueueStatisticsDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 33, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsDropPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsDropPackets.setDescription('This object indicates the number of discarded packets.')
hwhqosUserGroupQueueStatisticsDropBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 33, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsDropBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsDropBytes.setDescription('This object indicates the number of discarded bytes.')
hwhqosUserGroupQueueStatisticsPassPacketsRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 33, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsPassPacketsRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsPassPacketsRate.setDescription(' hwhqosUserGroupQueueStatisticsPassPacketsRate.')
hwhqosUserGroupQueueStatisticsPassBytesRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 33, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsPassBytesRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsPassBytesRate.setDescription(' hwhqosUserGroupQueueStatisticsPassBytesRate.')
hwhqosUserGroupQueueStatisticsDropPacketsRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 33, 1, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsDropPacketsRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsDropPacketsRate.setDescription(' hwhqosUserGroupQueueStatisticsDropPacketsRate.')
hwhqosUserGroupQueueStatisticsDropBytesRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 33, 1, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsDropBytesRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsDropBytesRate.setDescription(' hwhqosUserGroupQueueStatisticsDropBytesRate.')
hwhqosFlowQueueShaperTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 34), )
if mibBuilder.loadTexts: hwhqosFlowQueueShaperTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueShaperTable.setDescription('Description.')
hwhqosFlowQueueShaperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 34, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosFlowQueueName"))
if mibBuilder.loadTexts: hwhqosFlowQueueShaperEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueShaperEntry.setDescription('Description.')
hwhqosFlowQueueShaperBE = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 34, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueShaperBE.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueShaperBE.setDescription('Description.')
hwhqosFlowQueueShaperAF1 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 34, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueShaperAF1.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueShaperAF1.setDescription('Description.')
hwhqosFlowQueueShaperAF2 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 34, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueShaperAF2.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueShaperAF2.setDescription('Description.')
hwhqosFlowQueueShaperAF3 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 34, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueShaperAF3.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueShaperAF3.setDescription('Description.')
hwhqosFlowQueueShaperAF4 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 34, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueShaperAF4.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueShaperAF4.setDescription('Description.')
hwhqosFlowQueueShaperEF = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 34, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueShaperEF.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueShaperEF.setDescription('Description.')
hwhqosFlowQueueShaperCS6 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 34, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueShaperCS6.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueShaperCS6.setDescription('Description.')
hwhqosFlowQueueShaperCS7 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 34, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueShaperCS7.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueShaperCS7.setDescription('Description.')
hwhqosFlowQueueShaperValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 34, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10000000))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueShaperValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueShaperValue.setDescription('Description.')
hwhqosFlowQueueShaperRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 34, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFlowQueueShaperRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueShaperRowStatus.setDescription('Description.')
hwhqosWredQueueDepthTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 35), )
if mibBuilder.loadTexts: hwhqosWredQueueDepthTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredQueueDepthTable.setDescription('Description.')
hwhqosWredQueueDepthEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 35, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosWredQueueDepthType"), (0, "HUAWEI-HQOS-MIB", "hwhqosWredQueueDepthWredName"))
if mibBuilder.loadTexts: hwhqosWredQueueDepthEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredQueueDepthEntry.setDescription('Description.')
hwhqosWredQueueDepthType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 35, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("flowwred", 1), ("portwred", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosWredQueueDepthType.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredQueueDepthType.setDescription('Description.')
hwhqosWredQueueDepthWredName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 35, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosWredQueueDepthWredName.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredQueueDepthWredName.setDescription('Description.')
hwhqosWredQueueDepthValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 35, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 131072))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosWredQueueDepthValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredQueueDepthValue.setDescription('Description.')
hwhqosWredQueueDepthRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 35, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosWredQueueDepthRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredQueueDepthRowStatus.setDescription('Description.')
hwhqosBandwidthCheckTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 36), )
if mibBuilder.loadTexts: hwhqosBandwidthCheckTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosBandwidthCheckTable.setDescription('Description.')
hwhqosBandwidthCheckEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 36, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosBandwidthCheckInterfaceIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosBandwidthCheckDirection"))
if mibBuilder.loadTexts: hwhqosBandwidthCheckEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosBandwidthCheckEntry.setDescription('Description.')
hwhqosBandwidthCheckInterfaceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 36, 1, 1), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosBandwidthCheckInterfaceIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosBandwidthCheckInterfaceIndex.setDescription('Description.')
hwhqosBandwidthCheckDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 36, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosBandwidthCheckDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosBandwidthCheckDirection.setDescription('Description.')
hwhqosBandwidthCheckValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 36, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosBandwidthCheckValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosBandwidthCheckValue.setDescription('Description.')
hwhqosBandwidthCheckRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 36, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosBandwidthCheckRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosBandwidthCheckRowStatus.setDescription('Description.')
hwhqosServiceIdentifyPolicyTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 37), )
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyTable.setDescription('Description.')
hwhqosServiceIdentifyPolicyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 37, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyName"))
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyEntry.setDescription('Description.')
hwhqosServiceIdentifyPolicyName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 37, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyName.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyName.setDescription('Description.')
hwhqosServiceIdentifyPolicyRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 37, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyRowStatus.setDescription('Description.')
hwhqosServiceIdentifyPolicyApplyTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 38), )
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyApplyTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyApplyTable.setDescription('Description.')
hwhqosServiceIdentifyPolicyApplyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 38, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyApplyInterfaceIndex"))
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyApplyEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyApplyEntry.setDescription('Description.')
hwhqosServiceIdentifyPolicyApplyInterfaceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 38, 1, 1), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyApplyInterfaceIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyApplyInterfaceIndex.setDescription('Description.')
hwhqosServiceIdentifyPolicyApplyName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 38, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyApplyName.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyApplyName.setDescription('Description.')
hwhqosServiceIdentifyPolicyApplyRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 38, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyApplyRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyApplyRowStatus.setDescription('Description.')
hwhqosServiceIdentifyPolicyTypeTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 39), )
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyTypeTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyTypeTable.setDescription('Description.')
hwhqosServiceIdentifyPolicyTypeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 39, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyName"))
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyTypeEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyTypeEntry.setDescription('Description.')
hwhqosServiceIdentifyPolicyTypeValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 39, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("innervlan", 1), ("outervlan", 2), ("cosinnervlan", 3), ("cosoutervlan", 4), ("dscp", 5), ("dhcpoption60", 6)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyTypeValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyTypeValue.setDescription('Description.')
hwhqosServiceIdentifyPolicyTypeRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 39, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyTypeRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyTypeRowStatus.setDescription('Description.')
hwhqosServiceIdentifyPolicyVlanTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 40), )
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyVlanTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyVlanTable.setDescription('Description.')
hwhqosServiceIdentifyPolicyVlanEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 40, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyName"), (0, "HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyVlanID"))
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyVlanEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyVlanEntry.setDescription('Description.')
hwhqosServiceIdentifyPolicyVlanID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 40, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyVlanID.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyVlanID.setDescription('Description.')
hwhqosServiceIdentifyPolicyVlanDomainName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 40, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyVlanDomainName.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyVlanDomainName.setDescription('Description.')
hwhqosServiceIdentifyPolicyVlanRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 40, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyVlanRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyVlanRowStatus.setDescription('Description.')
hwhqosServiceIdentifyPolicy8021pTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 41), )
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicy8021pTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicy8021pTable.setDescription('Description.')
hwhqosServiceIdentifyPolicy8021pEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 41, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyName"), (0, "HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicy8021pCosID"))
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicy8021pEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicy8021pEntry.setDescription('Description.')
hwhqosServiceIdentifyPolicy8021pCosID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 41, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicy8021pCosID.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicy8021pCosID.setDescription('Description.')
hwhqosServiceIdentifyPolicy8021pDomainName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 41, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicy8021pDomainName.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicy8021pDomainName.setDescription('Description.')
hwhqosServiceIdentifyPolicy8021pRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 41, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicy8021pRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicy8021pRowStatus.setDescription('Description.')
hwhqosServiceIdentifyPolicyDscpTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 42), )
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyDscpTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyDscpTable.setDescription('Description.')
hwhqosServiceIdentifyPolicyDscpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 42, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyName"), (0, "HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyDscpID"))
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyDscpEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyDscpEntry.setDescription('Description.')
hwhqosServiceIdentifyPolicyDscpID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 42, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyDscpID.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyDscpID.setDescription('Description.')
hwhqosServiceIdentifyPolicyDscpDomainName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 42, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyDscpDomainName.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyDscpDomainName.setDescription('Description.')
hwhqosServiceIdentifyPolicyDscpRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 42, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyDscpRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyDscpRowStatus.setDescription('Description.')
hwhqosServiceIdentifyPolicyOption60Table = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 43), )
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyOption60Table.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyOption60Table.setDescription('Description.')
hwhqosServiceIdentifyPolicyOption60Entry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 43, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyName"))
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyOption60Entry.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyOption60Entry.setDescription('Description.')
hwhqosServiceIdentifyPolicyOption60 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 43, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyOption60.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyOption60.setDescription('Description.')
hwhqosServiceIdentifyPolicyOption60RowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 43, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyOption60RowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyOption60RowStatus.setDescription('Description.')
hwhqosDomainRateLimitModeTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 44), )
if mibBuilder.loadTexts: hwhqosDomainRateLimitModeTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainRateLimitModeTable.setDescription('Description.')
hwhqosDomainRateLimitModeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 44, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosDomainRateLimitModeDomainName"))
if mibBuilder.loadTexts: hwhqosDomainRateLimitModeEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainRateLimitModeEntry.setDescription('Description.')
hwhqosDomainRateLimitModeDomainName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 44, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosDomainRateLimitModeDomainName.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainRateLimitModeDomainName.setDescription('Description.')
hwhqosDomainRateLimitModeDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 44, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosDomainRateLimitModeDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainRateLimitModeDirection.setDescription('Description.')
hwhqosDomainRateLimitModeRateLimitMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 44, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("sq", 1), ("car", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosDomainRateLimitModeRateLimitMode.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainRateLimitModeRateLimitMode.setDescription('Description.')
hwhqosDomainRateLimitModeRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 44, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosDomainRateLimitModeRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainRateLimitModeRowStatus.setDescription('Description.')
hwhqosDomainSessionGroupExcludeTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 45), )
if mibBuilder.loadTexts: hwhqosDomainSessionGroupExcludeTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainSessionGroupExcludeTable.setDescription('Description.')
hwhqosDomainSessionGroupExcludeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 45, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosDomainSessionGroupExcludeDomainName"), (0, "HUAWEI-HQOS-MIB", "hwhqosDomainSessionGroupExcludeDirection"), (0, "HUAWEI-HQOS-MIB", "hwhqosDomainSessionGroupExcludeMode"))
if mibBuilder.loadTexts: hwhqosDomainSessionGroupExcludeEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainSessionGroupExcludeEntry.setDescription('Description.')
hwhqosDomainSessionGroupExcludeDomainName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 45, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosDomainSessionGroupExcludeDomainName.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainSessionGroupExcludeDomainName.setDescription('Description.')
hwhqosDomainSessionGroupExcludeDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 45, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosDomainSessionGroupExcludeDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainSessionGroupExcludeDirection.setDescription('Description.')
hwhqosDomainSessionGroupExcludeMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 45, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("sq", 1), ("car", 2), ("all", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosDomainSessionGroupExcludeMode.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainSessionGroupExcludeMode.setDescription('Description.')
hwhqosDomainSessionGroupExcludeRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 45, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosDomainSessionGroupExcludeRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainSessionGroupExcludeRowStatus.setDescription('Description.')
hwhqosDomainUserMaxSessionTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 46), )
if mibBuilder.loadTexts: hwhqosDomainUserMaxSessionTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainUserMaxSessionTable.setDescription('Description.')
hwhqosDomainUserMaxSessionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 46, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosDomainUserMaxSessionDomainName"))
if mibBuilder.loadTexts: hwhqosDomainUserMaxSessionEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainUserMaxSessionEntry.setDescription('Description.')
hwhqosDomainUserMaxSessionDomainName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 46, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosDomainUserMaxSessionDomainName.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainUserMaxSessionDomainName.setDescription('Description.')
hwhqosDomainUserMaxSessionNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 46, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 147456))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosDomainUserMaxSessionNum.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainUserMaxSessionNum.setDescription('Description.')
hwhqosDomainUserMaxSessionRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 46, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosDomainUserMaxSessionRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainUserMaxSessionRowStatus.setDescription('Description.')
hwhqosUpdateUseridProfileTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 47), )
if mibBuilder.loadTexts: hwhqosUpdateUseridProfileTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosUpdateUseridProfileTable.setDescription('Description.')
hwhqosUpdateUseridProfileEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 47, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosUpdateUseridProfileUserid"), (0, "HUAWEI-HQOS-MIB", "hwhqosUpdateUseridProfileDirection"))
if mibBuilder.loadTexts: hwhqosUpdateUseridProfileEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosUpdateUseridProfileEntry.setDescription('Description.')
hwhqosUpdateUseridProfileUserid = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 47, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 164864))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUpdateUseridProfileUserid.setStatus('current')
if mibBuilder.loadTexts: hwhqosUpdateUseridProfileUserid.setDescription('Description.')
hwhqosUpdateUseridProfileDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 47, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUpdateUseridProfileDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosUpdateUseridProfileDirection.setDescription('Description.')
hwhqosUpdateUseridProfileName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 47, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 63))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUpdateUseridProfileName.setStatus('current')
if mibBuilder.loadTexts: hwhqosUpdateUseridProfileName.setDescription('Description.')
hwhqosUpdateUseridProfileRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 47, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUpdateUseridProfileRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosUpdateUseridProfileRowStatus.setDescription('Description.')
hwhqosDomainUserPriorityTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 48), )
if mibBuilder.loadTexts: hwhqosDomainUserPriorityTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainUserPriorityTable.setDescription('Description.')
hwhqosDomainUserPriorityEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 48, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosDomainUserPriorityDomainName"), (0, "HUAWEI-HQOS-MIB", "hwhqosDomainUserPriorityDirection"))
if mibBuilder.loadTexts: hwhqosDomainUserPriorityEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainUserPriorityEntry.setDescription('Description.')
hwhqosDomainUserPriorityDomainName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 48, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosDomainUserPriorityDomainName.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainUserPriorityDomainName.setDescription('Description.')
hwhqosDomainUserPriorityDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 48, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosDomainUserPriorityDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainUserPriorityDirection.setDescription('Description.')
hwhqosDomainUserPriorityValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 48, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15))).clone(namedValues=NamedValues(("priority0", 1), ("priority1", 2), ("priority2", 3), ("priority3", 4), ("priority4", 5), ("priority5", 6), ("priority6", 7), ("priority7", 8), ("trust8021pinner", 9), ("trust8021pouter", 10), ("trustexpinner", 11), ("trustexpouter", 12), ("trustdscpinner", 13), ("trustdscpouter", 14), ("unchangeable", 15)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosDomainUserPriorityValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainUserPriorityValue.setDescription('Description.')
hwhqosDomainUserPriorityRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 48, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosDomainUserPriorityRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainUserPriorityRowStatus.setDescription('Description.')
hwhqosTMScheduleModeTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 50), )
if mibBuilder.loadTexts: hwhqosTMScheduleModeTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosTMScheduleModeTable.setDescription('Description.')
hwhqosTMScheduleModeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 50, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosTMScheduleModeSlot"))
if mibBuilder.loadTexts: hwhqosTMScheduleModeEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosTMScheduleModeEntry.setDescription('Description.')
hwhqosTMScheduleModeSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 50, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosTMScheduleModeSlot.setStatus('current')
if mibBuilder.loadTexts: hwhqosTMScheduleModeSlot.setDescription('Description.')
hwhqosTMScheduleModeSimpleEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 50, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("simple", 1)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosTMScheduleModeSimpleEnable.setStatus('current')
if mibBuilder.loadTexts: hwhqosTMScheduleModeSimpleEnable.setDescription('Description.')
hwhqosTMScheduleModeRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 50, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosTMScheduleModeRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosTMScheduleModeRowStatus.setDescription('Description.')
hwhqosUserBandwidthTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 51), )
if mibBuilder.loadTexts: hwhqosUserBandwidthTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserBandwidthTable.setDescription('Description.')
hwhqosUserBandwidthEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 51, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosUserBandwidthUserid"))
if mibBuilder.loadTexts: hwhqosUserBandwidthEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserBandwidthEntry.setDescription('Description.')
hwhqosUserBandwidthUserid = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 51, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 164864))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserBandwidthUserid.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserBandwidthUserid.setDescription('Description.')
hwhqosUserBandwidthCir = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 51, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserBandwidthCir.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserBandwidthCir.setDescription('Description.')
hwhqosUserBandwidthPir = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 51, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserBandwidthPir.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserBandwidthPir.setDescription('Description.')
hwhqosUserBandwidthCommittedCir = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 51, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserBandwidthCommittedCir.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserBandwidthCommittedCir.setDescription('Description.')
hwhqosUserBandwidthRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 51, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserBandwidthRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserBandwidthRowStatus.setDescription('Description.')
hwhqosUseridStatTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 52), )
if mibBuilder.loadTexts: hwhqosUseridStatTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosUseridStatTable.setDescription('Description.')
hwhqosUseridStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 52, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosUseridStatUserid"), (0, "HUAWEI-HQOS-MIB", "hwhqosUseridStatQueue"), (0, "HUAWEI-HQOS-MIB", "hwhqosUseridStatDirection"))
if mibBuilder.loadTexts: hwhqosUseridStatEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosUseridStatEntry.setDescription('Description.')
hwhqosUseridStatUserid = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 52, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 164864))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUseridStatUserid.setStatus('current')
if mibBuilder.loadTexts: hwhqosUseridStatUserid.setDescription('Description.')
hwhqosUseridStatQueue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 52, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("be", 1), ("af1", 2), ("af2", 3), ("af3", 4), ("af4", 5), ("ef", 6), ("cs6", 7), ("cs7", 8), ("total", 9)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUseridStatQueue.setStatus('current')
if mibBuilder.loadTexts: hwhqosUseridStatQueue.setDescription('Description.')
hwhqosUseridStatDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 52, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUseridStatDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosUseridStatDirection.setDescription('Description.')
hwhqosUseridStatPassBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 52, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUseridStatPassBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosUseridStatPassBytes.setDescription('Description.')
hwhqosUseridStatDropBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 52, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUseridStatDropBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosUseridStatDropBytes.setDescription('Description.')
hwhqosUseridStatPassPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 52, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUseridStatPassPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosUseridStatPassPackets.setDescription('Description.')
hwhqosUseridStatDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 52, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUseridStatDropPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosUseridStatDropPackets.setDescription('Description.')
hwhqosUseridStatPassPacketsRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 52, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUseridStatPassPacketsRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUseridStatPassPacketsRate.setDescription('Description.')
hwhqosUseridStatDropPacketsRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 52, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUseridStatDropPacketsRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUseridStatDropPacketsRate.setDescription('Description.')
hwhqosUseridStatReset = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 52, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("reset", 1)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUseridStatReset.setStatus('current')
if mibBuilder.loadTexts: hwhqosUseridStatReset.setDescription('Description.')
hwhqosUseridStatPassBytesRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 52, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUseridStatPassBytesRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUseridStatPassBytesRate.setDescription('Description.')
hwhqosUseridStatDropBytesRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 52, 1, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUseridStatDropBytesRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUseridStatDropBytesRate.setDescription('Description.')
hwhqosFatherUserGroupQueueApplyTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 53), )
if mibBuilder.loadTexts: hwhqosFatherUserGroupQueueApplyTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosFatherUserGroupQueueApplyTable.setDescription('Description.')
hwhqosFatherUserGroupQueueApplyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 53, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosFatherUserGroupQueueApplyInterfaceIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosFatherUserGroupQueueApplyDirection"))
if mibBuilder.loadTexts: hwhqosFatherUserGroupQueueApplyEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosFatherUserGroupQueueApplyEntry.setDescription('Description.')
hwhqosFatherUserGroupQueueApplyInterfaceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 53, 1, 1), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosFatherUserGroupQueueApplyInterfaceIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosFatherUserGroupQueueApplyInterfaceIndex.setDescription('Description.')
hwhqosFatherUserGroupQueueApplyDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 53, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2))).clone(namedValues=NamedValues(("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosFatherUserGroupQueueApplyDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosFatherUserGroupQueueApplyDirection.setDescription('Description.')
hwhqosFatherUserGroupQueueApplyName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 53, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFatherUserGroupQueueApplyName.setStatus('current')
if mibBuilder.loadTexts: hwhqosFatherUserGroupQueueApplyName.setDescription('Description.')
hwhqosFatherUserGroupQueueApplyRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 53, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosFatherUserGroupQueueApplyRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosFatherUserGroupQueueApplyRowStatus.setDescription('Description.')
hwhqosUserGroupQueueApplyTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 54), )
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyTable.setDescription(' ')
hwhqosUserGroupQueueApplyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 54, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueApplyInterfaceIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueApplyPeVid"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueApplyCeVid"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueApplyDirection"))
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyEntry.setDescription(' This table is used to apply a user group queue to interfaces. ')
hwhqosUserGroupQueueApplyInterfaceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 54, 1, 1), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyInterfaceIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyInterfaceIndex.setDescription(' This object indicates the index of interface.')
hwhqosUserGroupQueueApplyPeVid = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 54, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 4094), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyPeVid.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyPeVid.setDescription('This object indicates the index of vlan or l2tp group.')
hwhqosUserGroupQueueApplyCeVid = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 54, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyCeVid.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyCeVid.setDescription('This object indicates the index of vlan or l2tp group.')
hwhqosUserGroupQueueApplyDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 54, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyDirection.setDescription(' ')
hwhqosUserGroupQueueApplyName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 54, 1, 5), OctetString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyName.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyName.setDescription('This object indicates the name of user-group-queue.')
hwhqosUserGroupQueueApplyEachvlan = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 54, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("eachVlan", 1), ("eachPevid", 2), ("eachCevid", 3), ("none", 4)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyEachvlan.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyEachvlan.setDescription(' ')
hwhqosUserGroupQueueApplyRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 54, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyRowStatus.setDescription(' ')
hwhqosProfileApplyDomainTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 55), )
if mibBuilder.loadTexts: hwhqosProfileApplyDomainTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyDomainTable.setDescription(' ')
hwhqosProfileApplyDomainEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 55, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosProfileApplyDomainName"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileApplyDomainDirection"))
if mibBuilder.loadTexts: hwhqosProfileApplyDomainEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyDomainEntry.setDescription('Description.')
hwhqosProfileApplyDomainName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 55, 1, 1), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileApplyDomainName.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyDomainName.setDescription(' ')
hwhqosProfileApplyDomainDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 55, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileApplyDomainDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyDomainDirection.setDescription(' ')
hwhqosProfileApplyDomainProfileName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 55, 1, 3), OctetString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileApplyDomainProfileName.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyDomainProfileName.setDescription(' ')
hwhqosProfileApplyDomainRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 55, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileApplyDomainRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyDomainRowStatus.setDescription(' ')
hwhqosProfileApplyUserVlanTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 56), )
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanTable.setDescription(' ')
hwhqosProfileApplyUserVlanEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 56, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosProfileApplyUserVlanInterfaceIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileApplyUserVlanPeVid"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileApplyUserVlanCeVid"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileApplyUserVlanDirection"))
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanEntry.setDescription('Description.')
hwhqosProfileApplyUserVlanInterfaceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 56, 1, 1), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanInterfaceIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanInterfaceIndex.setDescription(' ')
hwhqosProfileApplyUserVlanPeVid = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 56, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 4094), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanPeVid.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanPeVid.setDescription('This object indicates the index of vlan or l2tp group.')
hwhqosProfileApplyUserVlanCeVid = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 56, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanCeVid.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanCeVid.setDescription('This object indicates the index of vlan or l2tp group.')
hwhqosProfileApplyUserVlanDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 56, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanDirection.setDescription(' ')
hwhqosProfileApplyUserVlanProfileName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 56, 1, 5), OctetString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanProfileName.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanProfileName.setDescription(' ')
hwhqosProfileApplyUserVlanEach = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 56, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanEach.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanEach.setDescription(' ')
hwhqosProfileApplyUserVlanRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 56, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanRowStatus.setDescription(' ')
hwhqosLinkAdjRemoteEnableTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 57), )
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteEnableTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteEnableTable.setDescription(' ')
hwhqosLinkAdjRemoteEnableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 57, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosLinkAdjRemoteEnableType"), (0, "HUAWEI-HQOS-MIB", "hwhqosLinkAdjRemoteEnableSlot"), (0, "HUAWEI-HQOS-MIB", "hwhqosLinkAdjRemoteEnableDomainName"))
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteEnableEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteEnableEntry.setDescription('Description.')
hwhqosLinkAdjRemoteEnableType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 57, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("system", 1), ("domain", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteEnableType.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteEnableType.setDescription(' ')
hwhqosLinkAdjRemoteEnableSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 57, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteEnableSlot.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteEnableSlot.setDescription(' ')
hwhqosLinkAdjRemoteEnableDomainName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 57, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteEnableDomainName.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteEnableDomainName.setDescription(' hwhqosLinkAdjRemoteEnableDomainName')
hwhqosLinkAdjRemoteEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 57, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteEnable.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteEnable.setDescription(' ')
hwhqosLinkAdjRemoteEnableRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 57, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteEnableRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteEnableRowStatus.setDescription(' ')
hwhqosLinkAdjShapingModeTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 58), )
if mibBuilder.loadTexts: hwhqosLinkAdjShapingModeTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjShapingModeTable.setDescription(' ')
hwhqosLinkAdjShapingModeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 58, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosLinkAdjShapingModeType"), (0, "HUAWEI-HQOS-MIB", "hwhqosLinkAdjShapingModeInterfaceIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosLinkAdjShapingModeDomainName"))
if mibBuilder.loadTexts: hwhqosLinkAdjShapingModeEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjShapingModeEntry.setDescription('Description.')
hwhqosLinkAdjShapingModeType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 58, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("interface", 1), ("domain", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosLinkAdjShapingModeType.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjShapingModeType.setDescription('Specify whether the command is applied on a interface or in a domain ')
hwhqosLinkAdjShapingModeInterfaceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 58, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosLinkAdjShapingModeInterfaceIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjShapingModeInterfaceIndex.setDescription(' ')
hwhqosLinkAdjShapingModeDomainName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 58, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosLinkAdjShapingModeDomainName.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjShapingModeDomainName.setDescription(' hwhqosLinkAdjShapingModeDomainName')
hwhqosLinkAdjShapingMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 58, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("frame", 1), ("cell", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosLinkAdjShapingMode.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjShapingMode.setDescription(' ')
hwhqosLinkAdjShapingModeRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 58, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosLinkAdjShapingModeRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjShapingModeRowStatus.setDescription(' ')
hwhqosLinkAdjRemoteTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 59), )
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteTable.setDescription(' ')
hwhqosLinkAdjRemoteEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 59, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosLinkAdjRemoteType"), (0, "HUAWEI-HQOS-MIB", "hwhqosLinkAdjRemoteInterfaceIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosLinkAdjRemoteDomainName"))
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteEntry.setDescription('Description.')
hwhqosLinkAdjRemoteType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 59, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("interface", 1), ("domain", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteType.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteType.setDescription('Specify whether the command is applied on a interface or in a domain')
hwhqosLinkAdjRemoteInterfaceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 59, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteInterfaceIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteInterfaceIndex.setDescription(' ')
hwhqosLinkAdjRemoteDomainName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 59, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteDomainName.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteDomainName.setDescription(' hwhqosLinkAdjRemoteDomainName')
hwhqosLinkAdjRemoteValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 59, 1, 4), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteValue.setDescription(' ')
hwhqosLinkAdjRemoteRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 59, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteRowStatus.setDescription(' ')
hwhqosLinkAdjLocalTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 60), )
if mibBuilder.loadTexts: hwhqosLinkAdjLocalTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjLocalTable.setDescription(' ')
hwhqosLinkAdjLocalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 60, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosLinkAdjLocalSlotNumber"))
if mibBuilder.loadTexts: hwhqosLinkAdjLocalEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjLocalEntry.setDescription('Description.')
hwhqosLinkAdjLocalSlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 60, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosLinkAdjLocalSlotNumber.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjLocalSlotNumber.setDescription(' ')
hwhqosLinkAdjLocalValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 60, 1, 2), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosLinkAdjLocalValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjLocalValue.setDescription(' ')
hwhqosLinkAdjLocalRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 60, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosLinkAdjLocalRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjLocalRowStatus.setDescription(' ')
hwhqosLinkAdjExcludeTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 61), )
if mibBuilder.loadTexts: hwhqosLinkAdjExcludeTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjExcludeTable.setDescription(' ')
hwhqosLinkAdjExcludeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 61, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosLinkAdjExcludeSlotNumber"))
if mibBuilder.loadTexts: hwhqosLinkAdjExcludeEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjExcludeEntry.setDescription('Description.')
hwhqosLinkAdjExcludeSlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 61, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosLinkAdjExcludeSlotNumber.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjExcludeSlotNumber.setDescription(' ')
hwhqosLinkAdjExcludeEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 61, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosLinkAdjExcludeEnable.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjExcludeEnable.setDescription(' ')
hwhqosLinkAdjExcludeRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 61, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosLinkAdjExcludeRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjExcludeRowStatus.setDescription(' ')
hwhqosProfileInfoTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 62), )
if mibBuilder.loadTexts: hwhqosProfileInfoTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileInfoTable.setDescription(' hwhqosProfileInfoTable ')
hwhqosProfileInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 62, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosProfileInfoName"))
if mibBuilder.loadTexts: hwhqosProfileInfoEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileInfoEntry.setDescription(' hwhqosProfileInfoEntry ')
hwhqosProfileInfoName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 62, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileInfoName.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileInfoName.setDescription(' hwhqosProfileInfoName ')
hwhqosProfileInfoDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 62, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1023))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileInfoDescription.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileInfoDescription.setDescription(' hwhqosProfileInfoDescription ')
hwhqosProfileInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 62, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileInfoRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileInfoRowStatus.setDescription(' Row status. The value ranges from 1 to 6 but usually 4 and 6 are used. createAndGo[4] - create a row. destroy[6] -delete a row. ')
hwhqosUserGroupQueueTemplateTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 63), )
if mibBuilder.loadTexts: hwhqosUserGroupQueueTemplateTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueTemplateTable.setDescription(' hwhqosUserGroupQueueTemplateTable ')
hwhqosUserGroupQueueTemplateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 63, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueTemplateName"))
if mibBuilder.loadTexts: hwhqosUserGroupQueueTemplateEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueTemplateEntry.setDescription(' hwhqosUserGroupQueueTemplateEntry ')
hwhqosUserGroupQueueTemplateName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 63, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueTemplateName.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueTemplateName.setDescription(' hwhqosUserGroupQueueTemplateName ')
hwhqosUserGroupQueueTemplateModeTemplate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 63, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserGroupQueueTemplateModeTemplate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueTemplateModeTemplate.setDescription(' 1: modetemplate 2: unmodetemplate ')
hwhqosUserGroupQueueTemplateRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 63, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserGroupQueueTemplateRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueTemplateRowStatus.setDescription(' Row status. The value ranges from 1 to 6 but usually 4 and 6 are used. createAndGo[4] - create a row. destroy[destroy[6] -delete a row. ')
hwhqosProfileWeightTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 64), )
if mibBuilder.loadTexts: hwhqosProfileWeightTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileWeightTable.setDescription(' hwhqosProfileWeightTable ')
hwhqosProfileWeightEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 64, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosProfileName"), (0, "HUAWEI-HQOS-MIB", "hwhqosProfileWeightDirection"))
if mibBuilder.loadTexts: hwhqosProfileWeightEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileWeightEntry.setDescription(' hwhqosProfileWeightTableEntry ')
hwhqosProfileWeightDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 64, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2), ("inout", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosProfileWeightDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileWeightDirection.setDescription('This object indicates the direction where weight is applied.')
hwhqosProfileWeightValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 64, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 63))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileWeightValue.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileWeightValue.setDescription('This object indicates the weight value of user-queue.')
hwhqosProfileWeightRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 64, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosProfileWeightRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileWeightRowStatus.setDescription('This object indicates the row status. Currently, three row statuses are supported: Active, CreateAndGo, and Destroy.')
hwhqosUserQueueApplyAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 65), )
if mibBuilder.loadTexts: hwhqosUserQueueApplyAlarmTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueApplyAlarmTable.setDescription(' hwhqosUserQueueApplyAlarmTable ')
hwhqosUserQueueApplyAlarmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 65, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosUserQueueApplyAlarmIfNetName"))
if mibBuilder.loadTexts: hwhqosUserQueueApplyAlarmEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueApplyAlarmEntry.setDescription(' hwhqosUserQueueApplyAlarmEntry ')
hwhqosUserQueueApplyAlarmIfNetName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 65, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 511))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserQueueApplyAlarmIfNetName.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueApplyAlarmIfNetName.setDescription('This object indicates the name of the interface which apply user-queue. The object is a string of 1 to 31 characters.')
hwhqosPortQueueStatDiscardAlarmTrapTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 66), )
if mibBuilder.loadTexts: hwhqosPortQueueStatDiscardAlarmTrapTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueStatDiscardAlarmTrapTable.setDescription('hwhqosPortQueueStatDiscardAlarmTrapTable')
hwhqosPortQueueStatDiscardAlarmTrapEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 66, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosPortQueueInterfaceTrap"), (0, "HUAWEI-HQOS-MIB", "hwhqosPortQueueCosValueTrap"), (0, "HUAWEI-HQOS-MIB", "hwhqosPortQueueDiscardTypeTrap"), (0, "HUAWEI-HQOS-MIB", "hwhqosPortQueueDiscardValueTrap"))
if mibBuilder.loadTexts: hwhqosPortQueueStatDiscardAlarmTrapEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueStatDiscardAlarmTrapEntry.setDescription('hwhqosPortQueueStatDiscardAlarmTrapEntry')
hwhqosPortQueueInterfaceTrap = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 66, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 511))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosPortQueueInterfaceTrap.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueInterfaceTrap.setDescription('The value of this object identifies the name of an interface.')
hwhqosPortQueueCosValueTrap = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 66, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("portqueueBE", 1), ("portqueueAF1", 2), ("portqueueAF2", 3), ("portqueueAF3", 4), ("portqueueAF4", 5), ("portqueueEF", 6), ("portqueueCS6", 7), ("portqueueCS7", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosPortQueueCosValueTrap.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueCosValueTrap.setDescription(' The value of this object identifies the queue type. The value can be any of the following: PORTQUEUEBE(1), PORTQUEUEAF1(2), PORTQUEUEAF2(3), PORTQUEUEAF3(4), PORTQUEUEAF4(5), PORTQUEUEEF(6), PORTQUEUECS6(7), PORTQUEUECS7(8) ')
hwhqosPortQueueDiscardTypeTrap = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 66, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("discardpacket", 1), ("discardbyte", 2), ("discardpacketratio", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosPortQueueDiscardTypeTrap.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueDiscardTypeTrap.setDescription('The value of this object identifies the discard type. The value can be any of the following: DISCARD-PACKET(1), DISCARD-BYTE(2), DISCARD-PACKET-RATIO(3) ')
hwhqosPortQueueDiscardValueTrap = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 66, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 511))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosPortQueueDiscardValueTrap.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueDiscardValueTrap.setDescription('This object indicates the discard value.')
hwhqosPortQueueStatDiscardAlarmCancelTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 67), )
if mibBuilder.loadTexts: hwhqosPortQueueStatDiscardAlarmCancelTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueStatDiscardAlarmCancelTable.setDescription('hwhqosPortQueueStatDiscardAlarmCancelTable')
hwhqosPortQueueStatDiscardAlarmCancelEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 67, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosPortQueueInterfaceCancel"), (0, "HUAWEI-HQOS-MIB", "hwhqosPortQueueCosValueCancel"), (0, "HUAWEI-HQOS-MIB", "hwhqosPortQueueDiscardTypeCancel"), (0, "HUAWEI-HQOS-MIB", "hwhqosPortQueueDiscardValueCancel"))
if mibBuilder.loadTexts: hwhqosPortQueueStatDiscardAlarmCancelEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueStatDiscardAlarmCancelEntry.setDescription('hwhqosPortQueueStatDiscardAlarmCancelEntry')
hwhqosPortQueueInterfaceCancel = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 67, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 511))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosPortQueueInterfaceCancel.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueInterfaceCancel.setDescription('The value of this object identifies the name of an interface.')
hwhqosPortQueueCosValueCancel = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 67, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("portqueueBE", 1), ("portqueueAF1", 2), ("portqueueAF2", 3), ("portqueueAF3", 4), ("portqueueAF4", 5), ("portqueueEF", 6), ("portqueueCS6", 7), ("portqueueCS7", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosPortQueueCosValueCancel.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueCosValueCancel.setDescription(' The value of this object identifies the queue type. The value can be any of the following: PORTQUEUEBE(1), PORTQUEUEAF1(2), PORTQUEUEAF2(3), PORTQUEUEAF3(4), PORTQUEUEAF4(5), PORTQUEUEEF(6), PORTQUEUECS6(7), PORTQUEUECS7(8) ')
hwhqosPortQueueDiscardTypeCancel = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 67, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("discardpacket", 1), ("discardbyte", 2), ("discardpacketratio", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosPortQueueDiscardTypeCancel.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueDiscardTypeCancel.setDescription('The value of this object identifies the discard type. The value can be any of the following: DISCARD-PACKET(1), DISCARD-BYTE(2), DISCARD-PACKET-RATIO(3) ')
hwhqosPortQueueDiscardValueCancel = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 67, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 511))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosPortQueueDiscardValueCancel.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueDiscardValueCancel.setDescription('This object indicates the discard value.')
hwhqosIfUserQueueTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 68), )
if mibBuilder.loadTexts: hwhqosIfUserQueueTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueTable.setDescription('Description.')
hwhqosIfUserQueueEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 68, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosIfUserQueueIfIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosIfUserQueueAclType"), (0, "HUAWEI-HQOS-MIB", "hwhqosIfUserQueueAclID1"), (0, "HUAWEI-HQOS-MIB", "hwhqosIfUserQueueAclID2"))
if mibBuilder.loadTexts: hwhqosIfUserQueueEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueEntry.setDescription('Description.')
hwhqosIfUserQueueIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 68, 1, 1), Integer32())
if mibBuilder.loadTexts: hwhqosIfUserQueueIfIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueIfIndex.setDescription('Description.')
hwhqosIfUserQueueAclType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 68, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ipv4", 1), ("ipv6", 2))))
if mibBuilder.loadTexts: hwhqosIfUserQueueAclType.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueAclType.setDescription('Description.')
hwhqosIfUserQueueAclID1 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 68, 1, 3), Integer32())
if mibBuilder.loadTexts: hwhqosIfUserQueueAclID1.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueAclID1.setDescription('Description.')
hwhqosIfUserQueueAclID2 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 68, 1, 4), Integer32())
if mibBuilder.loadTexts: hwhqosIfUserQueueAclID2.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueAclID2.setDescription('Description.')
hwhqosIfUserQueuePir = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 68, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(64, 10000000))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosIfUserQueuePir.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueuePir.setDescription('Description.')
hwhqosIfUserQueueFlowQueueProfileName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 68, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwhqosIfUserQueueFlowQueueProfileName.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueFlowQueueProfileName.setDescription('Description.')
hwhqosIfUserQueueFlowMappingProfileName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 68, 1, 8), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosIfUserQueueFlowMappingProfileName.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueFlowMappingProfileName.setDescription('Description.')
hwhqosIfUserQueueRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 68, 1, 9), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosIfUserQueueRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueRowStatus.setDescription('Description.')
hwhqosIfUserQueueStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 69), )
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsTable.setDescription('Description.')
hwhqosIfUserQueueStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 69, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosIfUserQueueStatisticsIfIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosIfUserQueueStatisticsAclType"), (0, "HUAWEI-HQOS-MIB", "hwhqosIfUserQueueStatisticsAclID1"), (0, "HUAWEI-HQOS-MIB", "hwhqosIfUserQueueStatisticsAclID2"), (0, "HUAWEI-HQOS-MIB", "hwhqosIfUserQueueStatisticsQueueIndex"))
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsEntry.setDescription('Description.')
hwhqosIfUserQueueStatisticsIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 69, 1, 1), Integer32())
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsIfIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsIfIndex.setDescription('Description.')
hwhqosIfUserQueueStatisticsAclType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 69, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ipv4", 1), ("ipv6", 2))))
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsAclType.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsAclType.setDescription('Description.')
hwhqosIfUserQueueStatisticsAclID1 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 69, 1, 3), Integer32())
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsAclID1.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsAclID1.setDescription('Description.')
hwhqosIfUserQueueStatisticsAclID2 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 69, 1, 4), Integer32())
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsAclID2.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsAclID2.setDescription('Description.')
hwhqosIfUserQueueStatisticsQueueIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 69, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 255))).clone(namedValues=NamedValues(("be", 1), ("af1", 2), ("af2", 3), ("af3", 4), ("af4", 5), ("ef", 6), ("cs6", 7), ("cs7", 8), ("total", 255))))
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsQueueIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsQueueIndex.setDescription('Description.')
hwhqosIfUserQueueStatisticsQueuePassPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 69, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsQueuePassPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsQueuePassPackets.setDescription('Description.')
hwhqosIfUserQueueStatisticsQueuePassBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 69, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsQueuePassBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsQueuePassBytes.setDescription('Description.')
hwhqosIfUserQueueStatisticsQueueDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 69, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsQueueDropPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsQueueDropPackets.setDescription('Description.')
hwhqosIfUserQueueStatisticsQueueDropBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 69, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsQueueDropBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsQueueDropBytes.setDescription('Description.')
hwhqosIfUserQueueStatisticsReset = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 69, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("reset", 1), ("other", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsReset.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfUserQueueStatisticsReset.setDescription('Description.')
hwhqosUserQueueShapeAllTrafficTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 70), )
if mibBuilder.loadTexts: hwhqosUserQueueShapeAllTrafficTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueShapeAllTrafficTable.setDescription('hwhqosUserQueueShapeAllTrafficTable')
hwhqosUserQueueShapeAllTrafficEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 70, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosUserQueueShapeAllTrafficInterfaceIndex"))
if mibBuilder.loadTexts: hwhqosUserQueueShapeAllTrafficEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueShapeAllTrafficEntry.setDescription('hwhqosUserQueueShapeAllTrafficEntry')
hwhqosUserQueueShapeAllTrafficInterfaceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 70, 1, 1), Integer32())
if mibBuilder.loadTexts: hwhqosUserQueueShapeAllTrafficInterfaceIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueShapeAllTrafficInterfaceIndex.setDescription('hwhqosUserQueueShapeAllTrafficInterfaceIndex')
hwhqosUserQueueShapeAllTrafficRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 70, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserQueueShapeAllTrafficRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueShapeAllTrafficRowStatus.setDescription(' Row status. The value ranges from 1 to 6 but usually 4 and 6 are used. createAndGo[4] - create a row. destroy[destroy[6] -delete a row. ')
hwhqosUserGroupQueueInterfaceStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 71), )
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsTable.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsTable.setDescription(' hwhqosUserGroupQueueInterfaceStatisticsTable ')
hwhqosUserGroupQueueInterfaceStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 71, 1), ).setIndexNames((0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsIfIndex"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsDirection"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsPevid"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsCevid"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsVlanid"), (0, "HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsQueueIndex"))
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsEntry.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsEntry.setDescription(' hwhqosUserGroupQueueInterfaceStatisticsEntry ')
hwhqosUserGroupQueueInterfaceStatisticsIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 71, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsIfIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsIfIndex.setDescription('The value of this object identifies the index of an interface.')
hwhqosUserGroupQueueInterfaceStatisticsDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 71, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsDirection.setDescription('This object indicates the direction where a user group queue template is applied.')
hwhqosUserGroupQueueInterfaceStatisticsPevid = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 71, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsPevid.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsPevid.setDescription('The value of this object identifies the pevid of an interface.')
hwhqosUserGroupQueueInterfaceStatisticsCevid = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 71, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsCevid.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsCevid.setDescription('The value of this object identifies the cevid of an interface.')
hwhqosUserGroupQueueInterfaceStatisticsVlanid = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 71, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsVlanid.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsVlanid.setDescription('The value of this object identifies the vlanid of an interface.')
hwhqosUserGroupQueueInterfaceStatisticsQueueIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 71, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("be", 1), ("af1", 2), ("af2", 3), ("af3", 4), ("af4", 5), ("ef", 6), ("cs6", 7), ("cs7", 8), ("total", 9)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsQueueIndex.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsQueueIndex.setDescription('hwhqosUserGroupQueueInterfaceStatisticsQueueIndex')
hwhqosUserGroupQueueInterfaceStatisticsReset = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 71, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("reset", 1)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsReset.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsReset.setDescription('This object indicates the resetting of statistics.')
hwhqosUserGroupQueueInterfaceStatisticsPassPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 71, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsPassPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsPassPackets.setDescription('This object indicates the number of passed packets.')
hwhqosUserGroupQueueInterfaceStatisticsPassBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 71, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsPassBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsPassBytes.setDescription('This object indicates the number of passed bytes.')
hwhqosUserGroupQueueInterfaceStatisticsDropPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 71, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsDropPackets.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsDropPackets.setDescription('This object indicates the number of discarded packets.')
hwhqosUserGroupQueueInterfaceStatisticsDropBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 71, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsDropBytes.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsDropBytes.setDescription('This object indicates the number of discarded bytes.')
hwhqosUserGroupQueueInterfaceStatisticsPassPacketsRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 71, 1, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsPassPacketsRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsPassPacketsRate.setDescription('hwhqosUserGroupQueueInterfaceStatisticsPassPacketsRate.')
hwhqosUserGroupQueueInterfaceStatisticsPassBytesRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 71, 1, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsPassBytesRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsPassBytesRate.setDescription('hwhqosUserGroupQueueInterfaceStatisticsPassBytesRate.')
hwhqosUserGroupQueueInterfaceStatisticsDropPacketsRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 71, 1, 14), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsDropPacketsRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsDropPacketsRate.setDescription('hwhqosUserGroupQueueInterfaceStatisticsDropPacketsRate.')
hwhqosUserGroupQueueInterfaceStatisticsDropBytesRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 1, 71, 1, 15), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsDropBytesRate.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsDropBytesRate.setDescription('hwhqosUserGroupQueueInterfaceStatisticsDropBytesRate.')
hwhqosObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 2))
hwhqosUserFrameId = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 2, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserFrameId.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserFrameId.setDescription('The ID of the frame which the alarm device located.')
hwhqosUserSlotId = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 2, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserSlotId.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserSlotId.setDescription('The ID of the slot on which the alarmed HQoS user applied.')
hwhqosUserPortId = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 2, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserPortId.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserPortId.setDescription('The ID of port on which the alarmed HQoS user applied.')
hwhqosUserQueueApplyFailDirection = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 2, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inbound", 1), ("outbound", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwhqosUserQueueApplyFailDirection.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueApplyFailDirection.setDescription('The direction of alarmed HQoS which user applied.')
hwhqosTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 3))
hwhqosUserQueueStatDiscardAlarmTrap = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 3, 1)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUserFrameId"), ("HUAWEI-HQOS-MIB", "hwhqosUserSlotId"), ("HUAWEI-HQOS-MIB", "hwhqosUserPortId"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatPerDropPackets"))
if mibBuilder.loadTexts: hwhqosUserQueueStatDiscardAlarmTrap.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatDiscardAlarmTrap.setDescription('The user queue discard alarm trap.')
hwhqosUserQueueApplyPirFailAlarmTrap = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 3, 2)).setObjects(("ENTITY-MIB", "entPhysicalIndex"), ("ENTITY-MIB", "entPhysicalName"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueApplyFailDirection"))
if mibBuilder.loadTexts: hwhqosUserQueueApplyPirFailAlarmTrap.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueApplyPirFailAlarmTrap.setDescription('The user apply pir fail alarm trap.')
hwhqosUserQueueApplyPirSucessAlarmTrap = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 3, 3)).setObjects(("ENTITY-MIB", "entPhysicalIndex"), ("ENTITY-MIB", "entPhysicalName"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueApplyFailDirection"))
if mibBuilder.loadTexts: hwhqosUserQueueApplyPirSucessAlarmTrap.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueApplyPirSucessAlarmTrap.setDescription('The user apply pir sucess alarm trap.')
hwhqosUserQueueApplyAlarmTrap = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 3, 4)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUserQueueApplyAlarmIfNetName"))
if mibBuilder.loadTexts: hwhqosUserQueueApplyAlarmTrap.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueApplyAlarmTrap.setDescription('The user-queue apply alarm trap.')
hwhqosPortQueueStatDiscardAlarmTrap = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 3, 5)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosPortQueueInterfaceTrap"), ("HUAWEI-HQOS-MIB", "hwhqosPortQueueCosValueTrap"), ("HUAWEI-HQOS-MIB", "hwhqosPortQueueDiscardTypeTrap"), ("HUAWEI-HQOS-MIB", "hwhqosPortQueueDiscardValueTrap"))
if mibBuilder.loadTexts: hwhqosPortQueueStatDiscardAlarmTrap.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueStatDiscardAlarmTrap.setDescription('The PortQueue Stat Discard alarm trap.')
hwhqosPortQueueStatDiscardAlarmCancelTrap = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 3, 6)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosPortQueueInterfaceCancel"), ("HUAWEI-HQOS-MIB", "hwhqosPortQueueCosValueCancel"), ("HUAWEI-HQOS-MIB", "hwhqosPortQueueDiscardTypeCancel"), ("HUAWEI-HQOS-MIB", "hwhqosPortQueueDiscardValueCancel"))
if mibBuilder.loadTexts: hwhqosPortQueueStatDiscardAlarmCancelTrap.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueStatDiscardAlarmCancelTrap.setDescription('The PortQueue Stat Discard Cancel alarm trap.')
hwhqosConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4))
hwhqosCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 1))
hwhqosUserQueueStatCompliances = ModuleCompliance((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 1, 1)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatGroup"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatTrapGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosUserQueueStatCompliances = hwhqosUserQueueStatCompliances.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatCompliances.setDescription('The compliance statment.')
hwhqosGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2))
hwhqosIfStatGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 1)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosQueueForwardPackets"), ("HUAWEI-HQOS-MIB", "hwhqosQueueForwardBytes"), ("HUAWEI-HQOS-MIB", "hwhqosQueueDropPackets"), ("HUAWEI-HQOS-MIB", "hwhqosQueueDropBytes"), ("HUAWEI-HQOS-MIB", "hwhqosQueueRemarkPackets"), ("HUAWEI-HQOS-MIB", "hwhqosQueueRemarkBytes"), ("HUAWEI-HQOS-MIB", "hwhqosSetZero"), ("HUAWEI-HQOS-MIB", "hwhqosQueueForwardPacketRate"), ("HUAWEI-HQOS-MIB", "hwhqosQueueForwardByteRate"), ("HUAWEI-HQOS-MIB", "hwhqosQueueDropPacketRate"), ("HUAWEI-HQOS-MIB", "hwhqosQueueDropByteRate"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosIfStatGroup = hwhqosIfStatGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfStatGroup.setDescription('Description.')
hqhqosAtmPvcStatGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 2)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosAtmPvcQueueForwardPackets"), ("HUAWEI-HQOS-MIB", "hwhqosAtmPvcQueueForwardBytes"), ("HUAWEI-HQOS-MIB", "hwhqosAtmPvcQueueDropPackets"), ("HUAWEI-HQOS-MIB", "hwhqosAtmPvcQueueDropBytes"), ("HUAWEI-HQOS-MIB", "hwhqosAtmPvcQueueRemarkPackets"), ("HUAWEI-HQOS-MIB", "hwhqosAtmPvcQueueRemarkBytes"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hqhqosAtmPvcStatGroup = hqhqosAtmPvcStatGroup.setStatus('current')
if mibBuilder.loadTexts: hqhqosAtmPvcStatGroup.setDescription('Description.')
hwhqosPortQueueGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 3)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosPortQueueArithmetic"), ("HUAWEI-HQOS-MIB", "hwhqosPortQueueWeightValue"), ("HUAWEI-HQOS-MIB", "hwhqosPortQueueShaValue"), ("HUAWEI-HQOS-MIB", "hwhqosPortQueueShaPercent"), ("HUAWEI-HQOS-MIB", "hwhqosPortQueueWredName"), ("HUAWEI-HQOS-MIB", "hwhqosPortQueuePbsValue"), ("HUAWEI-HQOS-MIB", "hwhqosPortQueueRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosPortQueueGroup = hwhqosPortQueueGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueGroup.setDescription('Description.')
hwhqosWredGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 4)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosWredGreenLowLimit"), ("HUAWEI-HQOS-MIB", "hwhqosWredGreenHighLimit"), ("HUAWEI-HQOS-MIB", "hwhqosWredGreenDiscardPercent"), ("HUAWEI-HQOS-MIB", "hwhqosWredYellowLowLimit"), ("HUAWEI-HQOS-MIB", "hwhqosWredYellowHighLimit"), ("HUAWEI-HQOS-MIB", "hwhqosWredYellowDiscardPercent"), ("HUAWEI-HQOS-MIB", "hwhqosWredRedLowLimit"), ("HUAWEI-HQOS-MIB", "hwhqosWredRedHighLimit"), ("HUAWEI-HQOS-MIB", "hwhqosWredRedDiscardPercent"), ("HUAWEI-HQOS-MIB", "hwhqosWredRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosWredGroup = hwhqosWredGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredGroup.setDescription('Description.')
hwhqosIfQueueStatGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 5)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosIfQueueStatForwardPackets"), ("HUAWEI-HQOS-MIB", "hwhqosIfQueueStatForwardBytes"), ("HUAWEI-HQOS-MIB", "hwhqosIfQueueStatDropBytes"), ("HUAWEI-HQOS-MIB", "hwhqosIfQueueStatDropPackets"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosIfQueueStatGroup = hwhqosIfQueueStatGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosIfQueueStatGroup.setDescription('Description.')
hwhqosUserQueueStatGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 6)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatForwardPackets"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatForwardBytes"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatDropPackets"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatDropBytes"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatReset"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatLastResetTime"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatPerDropPackets"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosUserQueueStatGroup = hwhqosUserQueueStatGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatGroup.setDescription('Description.')
hwhqosUserGroupQueueStatGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 7)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatReset"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueDropBytes"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueDropPackets"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueForwardBytes"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueForwardPackets"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatLastResetTime"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosUserGroupQueueStatGroup = hwhqosUserGroupQueueStatGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatGroup.setDescription('Description.')
hwhqosObjectsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 8)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUserFrameId"), ("HUAWEI-HQOS-MIB", "hwhqosUserSlotId"), ("HUAWEI-HQOS-MIB", "hwhqosUserPortId"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueApplyFailDirection"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosObjectsGroup = hwhqosObjectsGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosObjectsGroup.setDescription('Description.')
hwhqosUserQueueStatTrapGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 9)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatDiscardAlarmTrap"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueApplyPirFailAlarmTrap"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueApplyPirSucessAlarmTrap"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosUserQueueStatTrapGroup = hwhqosUserQueueStatTrapGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatTrapGroup.setDescription('The notification group defined for discard packets of a user`s queue.')
hwhqosVpnQoSTunnelStatGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 10)).setObjects(("HUAWEI-HQOS-MIB", "hwVPNHQoSTunnelIfIndex"), ("HUAWEI-HQOS-MIB", "hwVPNHQoSVPNType"), ("HUAWEI-HQOS-MIB", "hwVPNHQoSVPNValue"), ("HUAWEI-HQOS-MIB", "hwVPNHQoSPassBytes"), ("HUAWEI-HQOS-MIB", "hwVPNHQoSPassPackets"), ("HUAWEI-HQOS-MIB", "hwVPNHQoSDropPackets"), ("HUAWEI-HQOS-MIB", "hwVPNHQoSDropBytes"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosVpnQoSTunnelStatGroup = hwhqosVpnQoSTunnelStatGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosVpnQoSTunnelStatGroup.setDescription('Description.')
hwhqosTunnelStatGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 11)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosTunnelIfIndex"), ("HUAWEI-HQOS-MIB", "hwhqosTunnelCosType"), ("HUAWEI-HQOS-MIB", "hwhqosTunnelVPNType"), ("HUAWEI-HQOS-MIB", "hwhqosTunnelVPNName"), ("HUAWEI-HQOS-MIB", "hwhqosTunnelPassBytes"), ("HUAWEI-HQOS-MIB", "hwhqosTunnelPassPackets"), ("HUAWEI-HQOS-MIB", "hwhqosTunnelDropBytes"), ("HUAWEI-HQOS-MIB", "hwhqosTunnelDropPackets"), ("HUAWEI-HQOS-MIB", "hwhqosTunnelPassedByteRate"), ("HUAWEI-HQOS-MIB", "hwhqosTunnelPassPacketRate"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosTunnelStatGroup = hwhqosTunnelStatGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosTunnelStatGroup.setDescription('Description.')
hwhqosProfileGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 12)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosProfileName"), ("HUAWEI-HQOS-MIB", "hwhqosProfileRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosProfileGroup = hwhqosProfileGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileGroup.setDescription('Description.')
hwhqosProfileSuppressionGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 13)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosSuppressionDirection"), ("HUAWEI-HQOS-MIB", "hwhqosSuppressionType"), ("HUAWEI-HQOS-MIB", "hwhqosSuppressionCirValue"), ("HUAWEI-HQOS-MIB", "hwhqosSuppressionCbsValue"), ("HUAWEI-HQOS-MIB", "hwhqosSuppressionRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosProfileSuppressionGroup = hwhqosProfileSuppressionGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileSuppressionGroup.setDescription('Description.')
hwhqosProfileCarGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 14)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosProfileCarDirection"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarCirValue"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarPirValue"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarCbsValue"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarPbsValue"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarGreenAction"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarYellowAction"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarRedAction"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosProfileCarGroup = hwhqosProfileCarGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarGroup.setDescription('Description.')
hwhqosProfileUserQueueGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 15)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueDirection"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueCirValue"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueuePirValue"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueFlowQueueName"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueMappingName"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueGroupName"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueServiceTemplateName"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosProfileUserQueueGroup = hwhqosProfileUserQueueGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueGroup.setDescription('Description.')
hwhqosProfileUserApplyGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 16)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosProfileInterfaceIndex"), ("HUAWEI-HQOS-MIB", "hwhqosProfileApplyDirection"), ("HUAWEI-HQOS-MIB", "hwhqosProfileApplyPevid"), ("HUAWEI-HQOS-MIB", "hwhqosProfileApplyCevid"), ("HUAWEI-HQOS-MIB", "hwhqosProfileApplyName"), ("HUAWEI-HQOS-MIB", "hwhqosProfileApplyIdentifier"), ("HUAWEI-HQOS-MIB", "hwhqosGroupName"), ("HUAWEI-HQOS-MIB", "hwhqosProfileApplyRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosProfileUserApplyGroup = hwhqosProfileUserApplyGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserApplyGroup.setDescription('Description.')
hwhqosFlowMappingGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 17)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosFlowMappingName"), ("HUAWEI-HQOS-MIB", "hwhqosFlowMappingRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosFlowMappingGroup = hwhqosFlowMappingGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowMappingGroup.setDescription('Description.')
hwhqosFlowMappingCfgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 18)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosFolwMappingCfgQueueCosValue"), ("HUAWEI-HQOS-MIB", "hwhqosFlowMappingCfgPortQueueCosValue"), ("HUAWEI-HQOS-MIB", "hwhqosFlowMappingCfgRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosFlowMappingCfgGroup = hwhqosFlowMappingCfgGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowMappingCfgGroup.setDescription('Description.')
hwhqosFlowQueueGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 19)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosFlowQueueName"), ("HUAWEI-HQOS-MIB", "hwhqosFlowQueueRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosFlowQueueGroup = hwhqosFlowQueueGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueGroup.setDescription('Description.')
hwhqosFlowQueueCfgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 20)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosFlowQueueCfgCosValue"), ("HUAWEI-HQOS-MIB", "hwhqosFlowQueueCfgType"), ("HUAWEI-HQOS-MIB", "hwhqosFlowQueueCfgWeightValue"), ("HUAWEI-HQOS-MIB", "hwhqosFlowQueueCfgShapingValue"), ("HUAWEI-HQOS-MIB", "hwhqosFlowQueueCfgShapingPercentageValue"), ("HUAWEI-HQOS-MIB", "hwhqosFlowQueueCfgPbsValue"), ("HUAWEI-HQOS-MIB", "hwhqosFlowQueueCfgWredName"), ("HUAWEI-HQOS-MIB", "hwhqosFlowQueueCfgRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosFlowQueueCfgGroup = hwhqosFlowQueueCfgGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueCfgGroup.setDescription('Description.')
hwhqosFlowWredGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 21)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosFlowWredName"), ("HUAWEI-HQOS-MIB", "hwhqosFlowWredRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosFlowWredGroup = hwhqosFlowWredGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowWredGroup.setDescription('Description.')
hwhqosFlowWredColorGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 22)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosFlowWredColor"), ("HUAWEI-HQOS-MIB", "hwhqosFlowWredColorLowlimitPercentage"), ("HUAWEI-HQOS-MIB", "hwhqosFlowWredColorHighlimitPercentage"), ("HUAWEI-HQOS-MIB", "hwhqosFlowWredColorDiscardPercentage"), ("HUAWEI-HQOS-MIB", "hwhqosFlowWredColorRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosFlowWredColorGroup = hwhqosFlowWredColorGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowWredColorGroup.setDescription('Description.')
hwhqosUserGroupQueueGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 23)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueName"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueSlotNumber"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosUserGroupQueueGroup = hwhqosUserGroupQueueGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueGroup.setDescription('Description.')
hwhqosUserGroupQueueShapingGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 24)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueShapingDirection"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueShapingValue"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueuePbsValue"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueShapingRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosUserGroupQueueShapingGroup = hwhqosUserGroupQueueShapingGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueShapingGroup.setDescription('Description.')
hwhqosUserQueueGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 25)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUserQueueInterfaceIndex"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueDirection"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueCirValue"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueuePirValue"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueFlowQueueName"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueFlowMappingName"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueGroupName"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueServiceTemplateName"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosUserQueueGroup = hwhqosUserQueueGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueGroup.setDescription('Description.')
hwhqosBehaviorUserQueueGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 26)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosBehaviorName"), ("HUAWEI-HQOS-MIB", "hwhqosBehaviorCirValue"), ("HUAWEI-HQOS-MIB", "hwhqosBehaviorPirValue"), ("HUAWEI-HQOS-MIB", "hwhqosBehaviorFlowQueueName"), ("HUAWEI-HQOS-MIB", "hwhqosBehaviorFlowMappingName"), ("HUAWEI-HQOS-MIB", "hwhqosBehaviorGroupName"), ("HUAWEI-HQOS-MIB", "hwhqosBehaviorServiceTemplateName"), ("HUAWEI-HQOS-MIB", "hwhqosBehaviorUserQueueRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosBehaviorUserQueueGroup = hwhqosBehaviorUserQueueGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosBehaviorUserQueueGroup.setDescription('Description.')
hwhqosBandwidthGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 27)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosBandwidthInterfaceIndex"), ("HUAWEI-HQOS-MIB", "hwhqosBandwidthValue"), ("HUAWEI-HQOS-MIB", "hwhqosBandwidthRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosBandwidthGroup = hwhqosBandwidthGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosBandwidthGroup.setDescription('Description.')
hwhqosServiceTemplateGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 28)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosServiceTemplateName"), ("HUAWEI-HQOS-MIB", "hwhqosSlotNumber"), ("HUAWEI-HQOS-MIB", "hwhqosServiceTemplateRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosServiceTemplateGroup = hwhqosServiceTemplateGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceTemplateGroup.setDescription('Description.')
hwhqosNetworkHeaderLengthGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 29)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosNetworkHeaderLengthDirection"), ("HUAWEI-HQOS-MIB", "hwhqosNetWorkHeaderLengthValue"), ("HUAWEI-HQOS-MIB", "hwhqosNetWorkHeaderLengthRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosNetworkHeaderLengthGroup = hwhqosNetworkHeaderLengthGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosNetworkHeaderLengthGroup.setDescription('Description.')
hwhqosServiceTemplateApplyGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 30)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosServiceTemplateApplyInterfaceIndex"), ("HUAWEI-HQOS-MIB", "hwhqosApplyServiceTemplateName"), ("HUAWEI-HQOS-MIB", "hwhqosServiceTemplateApplyRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosServiceTemplateApplyGroup = hwhqosServiceTemplateApplyGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceTemplateApplyGroup.setDescription('Description.')
hwhqosProfileUserQueueStatisticsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 31)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsInterfaceIndex"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsDirection"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsPevid"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsCevid"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsSlotNumber"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsQueueIndex"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsReset"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsPassPackets"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsPassBytes"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsDropPackets"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsDropBytes"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsPassPacketsRate"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsPassBytesRate"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsDropPacketsRate"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsDropBytesRate"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsTrafficTowardsThisInterface"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsConfiguredCir"), ("HUAWEI-HQOS-MIB", "hwhqosProfileUserQueueStatisticsConfiguredPir"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosProfileUserQueueStatisticsGroup = hwhqosProfileUserQueueStatisticsGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileUserQueueStatisticsGroup.setDescription('Description.')
hwhqosProfileCarStatisticsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 32)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsInterfaceIndex"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsDirection"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsPevid"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsCevid"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsType"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsSlotNumber"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsReset"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsPassPackets"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsPassBytes"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsDropPackets"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsDropBytes"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsPassPacketsRate"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsPassBytesRate"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsDropPacketsRate"), ("HUAWEI-HQOS-MIB", "hwhqosProfileCarStatisticsDropBytesRate"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosProfileCarStatisticsGroup = hwhqosProfileCarStatisticsGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileCarStatisticsGroup.setDescription('Description.')
hwhqosUserQueueStatisticsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 33)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatisticsInterfaceIndex"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatisticsDirection"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatisticsSlotNumber"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatisticsQueueIndex"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatisticsReset"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatisticsPassPackets"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatisticsPassBytes"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatisticsDropPackets"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatisticsDropBytes"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatisticsPassPacketsRate"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatisticsPassBytesRate"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatisticsDropPacketsRate"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueStatisticsDropBytesRate"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosUserQueueStatisticsGroup = hwhqosUserQueueStatisticsGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueStatisticsGroup.setDescription('Description.')
hwhqosUserQueueClassifierStatisticsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 34)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsInterfaceIndex"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsDirection"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsClassifierName"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsSlotNumber"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsQueueIndex"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsReset"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsPassPackets"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsPassBytes"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsDropPackets"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsDropBytes"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsPassPacketsRate"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsPassBytesRate"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsDropPacketsRate"), ("HUAWEI-HQOS-MIB", "hwhqosUserQueueClassifierStatisticsDropBytesRate"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosUserQueueClassifierStatisticsGroup = hwhqosUserQueueClassifierStatisticsGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueClassifierStatisticsGroup.setDescription('Description.')
hwhqosUserGroupQueueStatisticsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 35)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatisticsGroupName"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatisticsDirection"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatisticsSlotNumber"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatisticsQueueIndex"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatisticsReset"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatisticsPassPackets"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatisticsPassBytes"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatisticsDropPackets"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatisticsDropBytes"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatisticsPassPacketsRate"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatisticsPassBytesRate"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatisticsDropPacketsRate"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueStatisticsDropBytesRate"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosUserGroupQueueStatisticsGroup = hwhqosUserGroupQueueStatisticsGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueStatisticsGroup.setDescription('Description.')
hwhqosFlowQueueShaperGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 36)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosFlowQueueShaperBE"), ("HUAWEI-HQOS-MIB", "hwhqosFlowQueueShaperAF1"), ("HUAWEI-HQOS-MIB", "hwhqosFlowQueueShaperAF2"), ("HUAWEI-HQOS-MIB", "hwhqosFlowQueueShaperAF3"), ("HUAWEI-HQOS-MIB", "hwhqosFlowQueueShaperAF4"), ("HUAWEI-HQOS-MIB", "hwhqosFlowQueueShaperEF"), ("HUAWEI-HQOS-MIB", "hwhqosFlowQueueShaperCS6"), ("HUAWEI-HQOS-MIB", "hwhqosFlowQueueShaperCS7"), ("HUAWEI-HQOS-MIB", "hwhqosFlowQueueShaperValue"), ("HUAWEI-HQOS-MIB", "hwhqosFlowQueueShaperRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosFlowQueueShaperGroup = hwhqosFlowQueueShaperGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosFlowQueueShaperGroup.setDescription('Description.')
hwhqosWredQueueDepthGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 37)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosWredQueueDepthType"), ("HUAWEI-HQOS-MIB", "hwhqosWredQueueDepthWredName"), ("HUAWEI-HQOS-MIB", "hwhqosWredQueueDepthValue"), ("HUAWEI-HQOS-MIB", "hwhqosWredQueueDepthRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosWredQueueDepthGroup = hwhqosWredQueueDepthGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosWredQueueDepthGroup.setDescription('Description.')
hwhqosBandwidthCheck = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 38)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosBandwidthCheckInterfaceIndex"), ("HUAWEI-HQOS-MIB", "hwhqosBandwidthCheckValue"), ("HUAWEI-HQOS-MIB", "hwhqosBandwidthCheckRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosBandwidthCheck = hwhqosBandwidthCheck.setStatus('current')
if mibBuilder.loadTexts: hwhqosBandwidthCheck.setDescription('Description.')
hwhqosServiceIdentifyPolicyGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 39)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyName"), ("HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosServiceIdentifyPolicyGroup = hwhqosServiceIdentifyPolicyGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyGroup.setDescription('Description.')
hwhqosServiceIdentifyPolicyApplyGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 40)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyApplyInterfaceIndex"), ("HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyApplyName"), ("HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyApplyRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosServiceIdentifyPolicyApplyGroup = hwhqosServiceIdentifyPolicyApplyGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyApplyGroup.setDescription('Description.')
hwhqosServiceIdentifyPolicyTypeGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 41)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyTypeValue"), ("HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyTypeRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosServiceIdentifyPolicyTypeGroup = hwhqosServiceIdentifyPolicyTypeGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyTypeGroup.setDescription('Description.')
hwhqosServiceIdentifyPolicyVlanGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 42)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyVlanID"), ("HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyVlanDomainName"), ("HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyVlanRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosServiceIdentifyPolicyVlanGroup = hwhqosServiceIdentifyPolicyVlanGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyVlanGroup.setDescription('Description.')
hwhqosServiceIdentifyPolicy8021pGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 43)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicy8021pCosID"), ("HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicy8021pDomainName"), ("HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicy8021pRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosServiceIdentifyPolicy8021pGroup = hwhqosServiceIdentifyPolicy8021pGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicy8021pGroup.setDescription('Description.')
hwhqosServiceIdentifyPolicyDscpGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 44)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyDscpID"), ("HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyDscpDomainName"), ("HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyDscpRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosServiceIdentifyPolicyDscpGroup = hwhqosServiceIdentifyPolicyDscpGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyDscpGroup.setDescription('Description.')
hwhqosServiceIdentifyPolicyOption60Group = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 45)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyOption60"), ("HUAWEI-HQOS-MIB", "hwhqosServiceIdentifyPolicyOption60RowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosServiceIdentifyPolicyOption60Group = hwhqosServiceIdentifyPolicyOption60Group.setStatus('current')
if mibBuilder.loadTexts: hwhqosServiceIdentifyPolicyOption60Group.setDescription('Description.')
hwhqosDomainRateLimitModeGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 46)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosDomainRateLimitModeDomainName"), ("HUAWEI-HQOS-MIB", "hwhqosDomainRateLimitModeRateLimitMode"), ("HUAWEI-HQOS-MIB", "hwhqosDomainRateLimitModeRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosDomainRateLimitModeGroup = hwhqosDomainRateLimitModeGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainRateLimitModeGroup.setDescription('Description.')
hwhqosDomainSessionGroupExcludeGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 47)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosDomainSessionGroupExcludeDomainName"), ("HUAWEI-HQOS-MIB", "hwhqosDomainSessionGroupExcludeRowStatus"), ("HUAWEI-HQOS-MIB", "hwhqosDomainSessionGroupExcludeMode"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosDomainSessionGroupExcludeGroup = hwhqosDomainSessionGroupExcludeGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainSessionGroupExcludeGroup.setDescription('Description.')
hwhqosDomainUserMaxSessionGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 48)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosDomainUserMaxSessionDomainName"), ("HUAWEI-HQOS-MIB", "hwhqosDomainUserMaxSessionNum"), ("HUAWEI-HQOS-MIB", "hwhqosDomainUserMaxSessionRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosDomainUserMaxSessionGroup = hwhqosDomainUserMaxSessionGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainUserMaxSessionGroup.setDescription('Description.')
hwhqosUpdateUseridProfileGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 49)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUpdateUseridProfileUserid"), ("HUAWEI-HQOS-MIB", "hwhqosUpdateUseridProfileDirection"), ("HUAWEI-HQOS-MIB", "hwhqosUpdateUseridProfileName"), ("HUAWEI-HQOS-MIB", "hwhqosUpdateUseridProfileRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosUpdateUseridProfileGroup = hwhqosUpdateUseridProfileGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosUpdateUseridProfileGroup.setDescription('Description.')
hwhqosDomainUserPriorityGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 50)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosDomainUserPriorityDomainName"), ("HUAWEI-HQOS-MIB", "hwhqosDomainUserPriorityDirection"), ("HUAWEI-HQOS-MIB", "hwhqosDomainUserPriorityRowStatus"), ("HUAWEI-HQOS-MIB", "hwhqosDomainUserPriorityValue"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosDomainUserPriorityGroup = hwhqosDomainUserPriorityGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosDomainUserPriorityGroup.setDescription('Description.')
hwhqosUseridStatGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 52)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUseridStatUserid"), ("HUAWEI-HQOS-MIB", "hwhqosUseridStatQueue"), ("HUAWEI-HQOS-MIB", "hwhqosUseridStatDirection"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosUseridStatGroup = hwhqosUseridStatGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosUseridStatGroup.setDescription('Description.')
hwhqosFatherUserGroupQueueApplyGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 55)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosFatherUserGroupQueueApplyInterfaceIndex"), ("HUAWEI-HQOS-MIB", "hwhqosFatherUserGroupQueueApplyDirection"), ("HUAWEI-HQOS-MIB", "hwhqosFatherUserGroupQueueApplyName"), ("HUAWEI-HQOS-MIB", "hwhqosFatherUserGroupQueueApplyRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosFatherUserGroupQueueApplyGroup = hwhqosFatherUserGroupQueueApplyGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosFatherUserGroupQueueApplyGroup.setDescription('Description.')
hwhqosUserGroupQueueApplyGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 56)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueApplyInterfaceIndex"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueApplyDirection"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueApplyPeVid"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueApplyCeVid"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueApplyName"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueApplyEachvlan"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueApplyRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosUserGroupQueueApplyGroup = hwhqosUserGroupQueueApplyGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueApplyGroup.setDescription(' ')
hwhqosProfileApplyDomainGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 57)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosProfileApplyDomainName"), ("HUAWEI-HQOS-MIB", "hwhqosProfileApplyDomainDirection"), ("HUAWEI-HQOS-MIB", "hwhqosProfileApplyDomainProfileName"), ("HUAWEI-HQOS-MIB", "hwhqosProfileApplyDomainRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosProfileApplyDomainGroup = hwhqosProfileApplyDomainGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyDomainGroup.setDescription(' ')
hwhqosProfileApplyUserVlanGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 58)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosProfileApplyUserVlanInterfaceIndex"), ("HUAWEI-HQOS-MIB", "hwhqosProfileApplyUserVlanDirection"), ("HUAWEI-HQOS-MIB", "hwhqosProfileApplyUserVlanPeVid"), ("HUAWEI-HQOS-MIB", "hwhqosProfileApplyUserVlanCeVid"), ("HUAWEI-HQOS-MIB", "hwhqosProfileApplyUserVlanProfileName"), ("HUAWEI-HQOS-MIB", "hwhqosProfileApplyUserVlanEach"), ("HUAWEI-HQOS-MIB", "hwhqosProfileApplyUserVlanRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosProfileApplyUserVlanGroup = hwhqosProfileApplyUserVlanGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileApplyUserVlanGroup.setDescription(' ')
hwhqosLinkAdjRemoteEnableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 59)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosLinkAdjRemoteEnableType"), ("HUAWEI-HQOS-MIB", "hwhqosLinkAdjRemoteEnable"), ("HUAWEI-HQOS-MIB", "hwhqosLinkAdjRemoteEnableSlot"), ("HUAWEI-HQOS-MIB", "hwhqosLinkAdjRemoteEnableRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosLinkAdjRemoteEnableGroup = hwhqosLinkAdjRemoteEnableGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteEnableGroup.setDescription(' ')
hwhqosLinkAdjShapingModeGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 60)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosLinkAdjShapingModeType"), ("HUAWEI-HQOS-MIB", "hwhqosLinkAdjShapingMode"), ("HUAWEI-HQOS-MIB", "hwhqosLinkAdjShapingModeRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosLinkAdjShapingModeGroup = hwhqosLinkAdjShapingModeGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjShapingModeGroup.setDescription(' ')
hwhqosLinkAdjRemoteGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 61)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosLinkAdjRemoteType"), ("HUAWEI-HQOS-MIB", "hwhqosLinkAdjRemoteValue"), ("HUAWEI-HQOS-MIB", "hwhqosLinkAdjRemoteRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosLinkAdjRemoteGroup = hwhqosLinkAdjRemoteGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjRemoteGroup.setDescription(' ')
hwhqosLinkAdjLocalTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 62)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosLinkAdjLocalSlotNumber"), ("HUAWEI-HQOS-MIB", "hwhqosLinkAdjLocalValue"), ("HUAWEI-HQOS-MIB", "hwhqosLinkAdjLocalRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosLinkAdjLocalTableGroup = hwhqosLinkAdjLocalTableGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjLocalTableGroup.setDescription(' ')
hwhqosLinkAdjExcludeGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 63)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosLinkAdjExcludeSlotNumber"), ("HUAWEI-HQOS-MIB", "hwhqosLinkAdjExcludeEnable"), ("HUAWEI-HQOS-MIB", "hwhqosLinkAdjExcludeRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosLinkAdjExcludeGroup = hwhqosLinkAdjExcludeGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosLinkAdjExcludeGroup.setDescription(' ')
hwhqosProfileInfoGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 64)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosProfileInfoName"), ("HUAWEI-HQOS-MIB", "hwhqosProfileInfoDescription"), ("HUAWEI-HQOS-MIB", "hwhqosProfileInfoRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosProfileInfoGroup = hwhqosProfileInfoGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileInfoGroup.setDescription('Description.')
hwhqosUserGroupQueueTemplateGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 65)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueTemplateName"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueTemplateModeTemplate"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueTemplateRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosUserGroupQueueTemplateGroup = hwhqosUserGroupQueueTemplateGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueTemplateGroup.setDescription('Description.')
hwhqosProfileWeightGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 66)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosProfileWeightDirection"), ("HUAWEI-HQOS-MIB", "hwhqosProfileWeightValue"), ("HUAWEI-HQOS-MIB", "hwhqosProfileWeightRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosProfileWeightGroup = hwhqosProfileWeightGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosProfileWeightGroup.setDescription('Description.')
hwhqosUserQueueApplyAlarmGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 67)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUserQueueApplyAlarmTrap"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosUserQueueApplyAlarmGroup = hwhqosUserQueueApplyAlarmGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserQueueApplyAlarmGroup.setDescription('Description.')
hwhqosPortQueueStatDiscardAlarmTrapGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 68)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosPortQueueStatDiscardAlarmTrap"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosPortQueueStatDiscardAlarmTrapGroup = hwhqosPortQueueStatDiscardAlarmTrapGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueStatDiscardAlarmTrapGroup.setDescription('Description.')
hwhqosPortQueueStatDiscarAlarmCancelGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 69)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosPortQueueStatDiscardAlarmCancelTrap"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosPortQueueStatDiscarAlarmCancelGroup = hwhqosPortQueueStatDiscarAlarmCancelGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosPortQueueStatDiscarAlarmCancelGroup.setDescription('Description.')
hwhqosUserGroupQueueInterfaceStatisticsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 132, 4, 2, 70)).setObjects(("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsIfIndex"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsDirection"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsPevid"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsCevid"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsVlanid"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsQueueIndex"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsReset"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsPassPackets"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsPassBytes"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsDropPackets"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsDropBytes"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsPassPacketsRate"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsPassBytesRate"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsDropPacketsRate"), ("HUAWEI-HQOS-MIB", "hwhqosUserGroupQueueInterfaceStatisticsDropBytesRate"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwhqosUserGroupQueueInterfaceStatisticsGroup = hwhqosUserGroupQueueInterfaceStatisticsGroup.setStatus('current')
if mibBuilder.loadTexts: hwhqosUserGroupQueueInterfaceStatisticsGroup.setDescription('Description.')
mibBuilder.exportSymbols("HUAWEI-HQOS-MIB", hwhqosFlowMappingCfgRowStatus=hwhqosFlowMappingCfgRowStatus, hwhqosFlowWredColorHighlimitPercentage=hwhqosFlowWredColorHighlimitPercentage, hwhqosUserQueueApplyAlarmEntry=hwhqosUserQueueApplyAlarmEntry, hwhqosFlowQueueCfgRowStatus=hwhqosFlowQueueCfgRowStatus, hwhqosBandwidthCheckInterfaceIndex=hwhqosBandwidthCheckInterfaceIndex, hwhqosWredRedLowLimit=hwhqosWredRedLowLimit, hwhqosLinkAdjLocalTable=hwhqosLinkAdjLocalTable, hwhqosLinkAdjRemoteEntry=hwhqosLinkAdjRemoteEntry, hwhqosProfileUserQueueGroupName=hwhqosProfileUserQueueGroupName, hwhqosProfileUserQueueStatisticsGroup=hwhqosProfileUserQueueStatisticsGroup, hwhqosProfileApplyDomainName=hwhqosProfileApplyDomainName, hwhqosFlowWredColorRowStatus=hwhqosFlowWredColorRowStatus, hwhqosUserQueueClassifierStatisticsEntry=hwhqosUserQueueClassifierStatisticsEntry, hwhqosUserQueueStatisticsPassPackets=hwhqosUserQueueStatisticsPassPackets, hwhqosUpdateUseridProfileEntry=hwhqosUpdateUseridProfileEntry, hwhqosIfQueueStatTable=hwhqosIfQueueStatTable, hwhqosUserGroupQueueInterfaceStatisticsEntry=hwhqosUserGroupQueueInterfaceStatisticsEntry, hwhqosProfileUserQueueStatisticsQueueIndex=hwhqosProfileUserQueueStatisticsQueueIndex, hwhqosSuppressionCbsValue=hwhqosSuppressionCbsValue, hwhqosTunnelDropBytes=hwhqosTunnelDropBytes, hwhqosObjects=hwhqosObjects, hwhqosProfileUserQueueEntry=hwhqosProfileUserQueueEntry, hwhqosIfUserQueueStatisticsAclID1=hwhqosIfUserQueueStatisticsAclID1, hwhqosUserQueueStatisticsPassPacketsRate=hwhqosUserQueueStatisticsPassPacketsRate, hwhqosTMScheduleModeTable=hwhqosTMScheduleModeTable, hwhqosUseridStatDropPackets=hwhqosUseridStatDropPackets, hwhqosCompliances=hwhqosCompliances, hwhqosUserGroupQueueShapingDirection=hwhqosUserGroupQueueShapingDirection, hwhqosFlowQueueCfgEntry=hwhqosFlowQueueCfgEntry, hwhqosProfileCarTable=hwhqosProfileCarTable, hwhqosProfileUserQueueRowStatus=hwhqosProfileUserQueueRowStatus, hwhqosProfileWeightRowStatus=hwhqosProfileWeightRowStatus, hwhqosProfileApplyDomainGroup=hwhqosProfileApplyDomainGroup, hwhqosLinkAdjRemoteEnableRowStatus=hwhqosLinkAdjRemoteEnableRowStatus, hqhqosAtmPvcStatGroup=hqhqosAtmPvcStatGroup, hwhqosWredTable=hwhqosWredTable, hwhqosUserQueueStatisticsPassBytesRate=hwhqosUserQueueStatisticsPassBytesRate, hwhqosUserGroupQueueDropPackets=hwhqosUserGroupQueueDropPackets, hwhqosGroups=hwhqosGroups, hwhqosUserGroupQueueStatDirection=hwhqosUserGroupQueueStatDirection, hwhqosBehaviorPirValue=hwhqosBehaviorPirValue, hwhqosUserGroupQueueStatisticsGroup=hwhqosUserGroupQueueStatisticsGroup, hwhqosTunnelStatisticsEntry=hwhqosTunnelStatisticsEntry, hwhqosServiceTemplateRowStatus=hwhqosServiceTemplateRowStatus, hwhqosProfileCarStatisticsSlotNumber=hwhqosProfileCarStatisticsSlotNumber, hwhqosUserGroupQueueInterfaceStatisticsDropPackets=hwhqosUserGroupQueueInterfaceStatisticsDropPackets, hwhqosIfUserQueueStatisticsEntry=hwhqosIfUserQueueStatisticsEntry, hwhqosBandwidthTable=hwhqosBandwidthTable, hwhqosUpdateUseridProfileTable=hwhqosUpdateUseridProfileTable, hwhqosServiceIdentifyPolicyApplyGroup=hwhqosServiceIdentifyPolicyApplyGroup, hwhqosPortQueueShaValue=hwhqosPortQueueShaValue, hwhqosProfileApplyDomainEntry=hwhqosProfileApplyDomainEntry, hwhqosProfileApplyEntry=hwhqosProfileApplyEntry, hwhqosUserQueueStatDropBytes=hwhqosUserQueueStatDropBytes, hwhqosUserQueueStatGroup=hwhqosUserQueueStatGroup, hwhqosUserGroupQueueShapingEntry=hwhqosUserGroupQueueShapingEntry, hwhqosUserGroupQueueInterfaceStatisticsDirection=hwhqosUserGroupQueueInterfaceStatisticsDirection, hwhqosProfileCarStatisticsGroup=hwhqosProfileCarStatisticsGroup, hwVPNHQoSTunnelIfIndex=hwVPNHQoSTunnelIfIndex, hwhqosServiceTemplateEntry=hwhqosServiceTemplateEntry, hwhqosServiceTemplateApplyInterfaceIndex=hwhqosServiceTemplateApplyInterfaceIndex, hwhqosTunnelPassedByteRate=hwhqosTunnelPassedByteRate, hwhqosWredGreenDiscardPercent=hwhqosWredGreenDiscardPercent, hwhqosUserQueueApplyAlarmTable=hwhqosUserQueueApplyAlarmTable, hwhqosBandwidthCheckEntry=hwhqosBandwidthCheckEntry, hwhqosWredQueueDepthTable=hwhqosWredQueueDepthTable, hwhqosServiceIdentifyPolicyOption60Entry=hwhqosServiceIdentifyPolicyOption60Entry, hwhqosUserQueueStatForwardBytes=hwhqosUserQueueStatForwardBytes, hwhqosDomainRateLimitModeDomainName=hwhqosDomainRateLimitModeDomainName, hwhqosBehaviorFlowQueueName=hwhqosBehaviorFlowQueueName, hwhqosUserBandwidthUserid=hwhqosUserBandwidthUserid, hwhqosUserGroupQueueTemplateName=hwhqosUserGroupQueueTemplateName, hwhqosProfileUserQueueStatisticsDirection=hwhqosProfileUserQueueStatisticsDirection, hwhqosUserGroupQueueTemplateTable=hwhqosUserGroupQueueTemplateTable, hwhqosLinkAdjRemoteEnableEntry=hwhqosLinkAdjRemoteEnableEntry, hwhqosUserBandwidthPir=hwhqosUserBandwidthPir, hwhqosIfQueueStatForwardPackets=hwhqosIfQueueStatForwardPackets, hwhqosServiceIdentifyPolicyApplyRowStatus=hwhqosServiceIdentifyPolicyApplyRowStatus, hwhqosUserQueueStatEntry=hwhqosUserQueueStatEntry, hwhqosServiceTemplateName=hwhqosServiceTemplateName, hwhqosServiceIdentifyPolicy8021pRowStatus=hwhqosServiceIdentifyPolicy8021pRowStatus, hwhqosBandwidthCheckTable=hwhqosBandwidthCheckTable, hwhqosUserQueueDirection=hwhqosUserQueueDirection, hwhqosFatherUserGroupQueueApplyGroup=hwhqosFatherUserGroupQueueApplyGroup, hwhqosLinkAdjExcludeRowStatus=hwhqosLinkAdjExcludeRowStatus, hwhqosUserQueueClassifierStatisticsDirection=hwhqosUserQueueClassifierStatisticsDirection, hwhqosAtmPvcStatEntry=hwhqosAtmPvcStatEntry, hwhqosUserGroupQueueInterfaceStatisticsPassPacketsRate=hwhqosUserGroupQueueInterfaceStatisticsPassPacketsRate, hwhqosLinkAdjRemoteEnableGroup=hwhqosLinkAdjRemoteEnableGroup, hwhqosUserGroupQueueStatisticsPassBytesRate=hwhqosUserGroupQueueStatisticsPassBytesRate, hwVPNHQoSDropPackets=hwVPNHQoSDropPackets, hwhqosIfUserQueueStatisticsQueuePassPackets=hwhqosIfUserQueueStatisticsQueuePassPackets, hwhqosQueueRemarkBytes=hwhqosQueueRemarkBytes, hwhqosQueueDropByteRate=hwhqosQueueDropByteRate, hwhqosServiceTemplateApplyEntry=hwhqosServiceTemplateApplyEntry, hwhqosQueueDropPacketRate=hwhqosQueueDropPacketRate, hwhqosUserQueueStatQueueIndex=hwhqosUserQueueStatQueueIndex, hwhqosProfileUserQueuePirValue=hwhqosProfileUserQueuePirValue, hwhqosUserQueueClassifierStatisticsPassPacketsRate=hwhqosUserQueueClassifierStatisticsPassPacketsRate, hwhqosUserQueueStatisticsDirection=hwhqosUserQueueStatisticsDirection, hwhqosBehaviorUserQueueGroup=hwhqosBehaviorUserQueueGroup, hwhqosQueueForwardPacketRate=hwhqosQueueForwardPacketRate, hwhqosProfileCarPirValue=hwhqosProfileCarPirValue, hwhqosIfUserQueueStatisticsQueueDropPackets=hwhqosIfUserQueueStatisticsQueueDropPackets, hwhqosServiceIdentifyPolicyTypeEntry=hwhqosServiceIdentifyPolicyTypeEntry, hwhqosProfileApplyUserVlanCeVid=hwhqosProfileApplyUserVlanCeVid, hwhqosUserBandwidthRowStatus=hwhqosUserBandwidthRowStatus, hwhqosProfileApplyDirection=hwhqosProfileApplyDirection, hwhqosFlowQueueCfgGroup=hwhqosFlowQueueCfgGroup, hwhqosProfileApplyCevid=hwhqosProfileApplyCevid, hwhqosStat=hwhqosStat, hwhqosProfileCarCbsValue=hwhqosProfileCarCbsValue, hwhqosWredGroup=hwhqosWredGroup, hwhqosUserGroupQueueInterfaceStatisticsTable=hwhqosUserGroupQueueInterfaceStatisticsTable, hwhqosServiceIdentifyPolicyOption60Table=hwhqosServiceIdentifyPolicyOption60Table, hwhqosIfUserQueueEntry=hwhqosIfUserQueueEntry, hwhqosServiceTemplateApplyRowStatus=hwhqosServiceTemplateApplyRowStatus, hwhqosAtmPvcVCI=hwhqosAtmPvcVCI, hwhqosIfUserQueueFlowQueueProfileName=hwhqosIfUserQueueFlowQueueProfileName, hwhqosIfUserQueueStatisticsIfIndex=hwhqosIfUserQueueStatisticsIfIndex, hwhqosFlowWredColorEntry=hwhqosFlowWredColorEntry, hwhqosWredQueueDepthType=hwhqosWredQueueDepthType, hwhqosDomainUserPriorityValue=hwhqosDomainUserPriorityValue, hwhqosProfileCarGreenAction=hwhqosProfileCarGreenAction, hwhqosLinkAdjExcludeEnable=hwhqosLinkAdjExcludeEnable, hwhqosUserGroupQueueEntry=hwhqosUserGroupQueueEntry, hwhqosProfileCarRowStatus=hwhqosProfileCarRowStatus, hwhqosPortQueueEntry=hwhqosPortQueueEntry, hwhqosUserGroupQueueStatisticsSlotNumber=hwhqosUserGroupQueueStatisticsSlotNumber, hwhqosPortQueueStatDiscardAlarmTrapEntry=hwhqosPortQueueStatDiscardAlarmTrapEntry, hwhqosProfileCarGroup=hwhqosProfileCarGroup, hwhqosWredQueueDepthGroup=hwhqosWredQueueDepthGroup, hwhqosProfileWeightTable=hwhqosProfileWeightTable, hwhqosProfileWeightDirection=hwhqosProfileWeightDirection, hwhqosUserQueueApplyAlarmTrap=hwhqosUserQueueApplyAlarmTrap, hwhqosProfileCarStatisticsPassBytesRate=hwhqosProfileCarStatisticsPassBytesRate, hwhqosUserGroupQueueRowStatus=hwhqosUserGroupQueueRowStatus, hwhqosLinkAdjRemoteRowStatus=hwhqosLinkAdjRemoteRowStatus, hwhqosProfileApplyUserVlanPeVid=hwhqosProfileApplyUserVlanPeVid, hwhqosPortQueueShaPercent=hwhqosPortQueueShaPercent, hwhqosUserQueueShapeAllTrafficTable=hwhqosUserQueueShapeAllTrafficTable, hwhqosUserQueueServiceTemplateName=hwhqosUserQueueServiceTemplateName, hwhqosUserQueueApplyAlarmGroup=hwhqosUserQueueApplyAlarmGroup, hwhqosProfileUserQueueStatisticsTrafficTowardsThisInterface=hwhqosProfileUserQueueStatisticsTrafficTowardsThisInterface, hwhqosTunnelPassPacketRate=hwhqosTunnelPassPacketRate, hwhqosIfUserQueueStatisticsQueueIndex=hwhqosIfUserQueueStatisticsQueueIndex, hwhqosProfileApplyIdentifier=hwhqosProfileApplyIdentifier, hwhqosProfileUserQueueFlowQueueName=hwhqosProfileUserQueueFlowQueueName, hwhqosUserGroupQueueTemplateModeTemplate=hwhqosUserGroupQueueTemplateModeTemplate, hwhqosProfileCarStatisticsPassBytes=hwhqosProfileCarStatisticsPassBytes, hwhqosUserGroupQueueGroup=hwhqosUserGroupQueueGroup, hwhqosFlowQueueCfgType=hwhqosFlowQueueCfgType, hwhqosLinkAdjRemoteValue=hwhqosLinkAdjRemoteValue, hwhqosTunnelCosType=hwhqosTunnelCosType, hwhqosBandwidthEntry=hwhqosBandwidthEntry, hwhqosNetworkHeaderLengthGroup=hwhqosNetworkHeaderLengthGroup, hwhqosFatherUserGroupQueueApplyRowStatus=hwhqosFatherUserGroupQueueApplyRowStatus, hwhqosFlowQueueShaperCS6=hwhqosFlowQueueShaperCS6, hwhqosProfileUserQueueGroup=hwhqosProfileUserQueueGroup, hwhqosLinkAdjRemoteInterfaceIndex=hwhqosLinkAdjRemoteInterfaceIndex, hwhqosFlowQueueCfgShapingPercentageValue=hwhqosFlowQueueCfgShapingPercentageValue, hwVPNHQoSTunnelStatisticsTable=hwVPNHQoSTunnelStatisticsTable, hwhqosNetworkHeaderLengthTable=hwhqosNetworkHeaderLengthTable, hwhqosFlowMappingCfgEntry=hwhqosFlowMappingCfgEntry, hwhqosLinkAdjRemoteType=hwhqosLinkAdjRemoteType, hwhqosNetworkHeaderLengthDirection=hwhqosNetworkHeaderLengthDirection, hwhqosUserQueueGroup=hwhqosUserQueueGroup, hwhqosProfileApplyUserVlanTable=hwhqosProfileApplyUserVlanTable, hwhqosWredQueueDepthEntry=hwhqosWredQueueDepthEntry, hwhqosLinkAdjShapingModeGroup=hwhqosLinkAdjShapingModeGroup, hwhqosFlowWredColorLowlimitPercentage=hwhqosFlowWredColorLowlimitPercentage, hwhqosUserGroupQueueApplyEachvlan=hwhqosUserGroupQueueApplyEachvlan, hwhqosUserQueueCirValue=hwhqosUserQueueCirValue, hwhqosProfileApplyDomainRowStatus=hwhqosProfileApplyDomainRowStatus, hwhqosUserQueueStatisticsDropBytes=hwhqosUserQueueStatisticsDropBytes, hwhqosUserGroupQueueName=hwhqosUserGroupQueueName, PYSNMP_MODULE_ID=hwHQOS, hwhqosProfileUserQueueStatisticsReset=hwhqosProfileUserQueueStatisticsReset, hwhqosUserGroupQueueStatTable=hwhqosUserGroupQueueStatTable, hwhqosProfileApplyDomainDirection=hwhqosProfileApplyDomainDirection, hwhqosAtmPvcQueueRemarkBytes=hwhqosAtmPvcQueueRemarkBytes, hwhqosDomainSessionGroupExcludeRowStatus=hwhqosDomainSessionGroupExcludeRowStatus, hwhqosFatherUserGroupQueueApplyName=hwhqosFatherUserGroupQueueApplyName, hwhqosWredEntry=hwhqosWredEntry, hwhqosUserBandwidthCir=hwhqosUserBandwidthCir, hwhqosUserGroupQueueTemplateRowStatus=hwhqosUserGroupQueueTemplateRowStatus, hwhqosPortQueueDiscardValueCancel=hwhqosPortQueueDiscardValueCancel, hwhqosServiceIdentifyPolicy8021pEntry=hwhqosServiceIdentifyPolicy8021pEntry, hwhqosProfileUserQueueStatisticsPassBytes=hwhqosProfileUserQueueStatisticsPassBytes, hwhqosProfileApplyUserVlanEntry=hwhqosProfileApplyUserVlanEntry, hwhqosNetworkHeaderLengthEntry=hwhqosNetworkHeaderLengthEntry, hwhqosServiceTemplateTable=hwhqosServiceTemplateTable, hwhqosServiceIdentifyPolicyVlanEntry=hwhqosServiceIdentifyPolicyVlanEntry, hwhqosServiceIdentifyPolicyVlanDomainName=hwhqosServiceIdentifyPolicyVlanDomainName, hwhqosFlowMappingCfgPortQueueCosValue=hwhqosFlowMappingCfgPortQueueCosValue, hwhqosPortQueueInterfaceCancel=hwhqosPortQueueInterfaceCancel, hwhqosFlowWredColorGroup=hwhqosFlowWredColorGroup, hwhqosIfUserQueueStatisticsTable=hwhqosIfUserQueueStatisticsTable, hwhqosPortQueueStatDiscardAlarmCancelEntry=hwhqosPortQueueStatDiscardAlarmCancelEntry, hwhqosDomainUserPriorityGroup=hwhqosDomainUserPriorityGroup, hwhqosFlowMappingName=hwhqosFlowMappingName, hwhqosBehaviorUserQueueEntry=hwhqosBehaviorUserQueueEntry, hwhqosFolwMappingCfgQueueCosValue=hwhqosFolwMappingCfgQueueCosValue, hwhqosFlowQueueName=hwhqosFlowQueueName, hwhqosServiceIdentifyPolicyDscpID=hwhqosServiceIdentifyPolicyDscpID, hwhqosUserGroupQueueForwardPackets=hwhqosUserGroupQueueForwardPackets, hwhqosAtmPvcQueueRemarkPackets=hwhqosAtmPvcQueueRemarkPackets, hwhqosPortQueueCosValueTrap=hwhqosPortQueueCosValueTrap, hwhqosSlotNumber=hwhqosSlotNumber, hwhqosUserGroupQueueStatisticsDropPacketsRate=hwhqosUserGroupQueueStatisticsDropPacketsRate, hwhqosTunnelDropPackets=hwhqosTunnelDropPackets, hwhqosFlowWredColorTable=hwhqosFlowWredColorTable, hwhqosUserGroupQueueShapingValue=hwhqosUserGroupQueueShapingValue, hwhqosFlowQueueShaperAF3=hwhqosFlowQueueShaperAF3, hwhqosProfileApplyRowStatus=hwhqosProfileApplyRowStatus, hwhqosUserQueueClassifierStatisticsReset=hwhqosUserQueueClassifierStatisticsReset, hwhqosProfileUserQueueStatisticsEntry=hwhqosProfileUserQueueStatisticsEntry, hwhqosUserPortId=hwhqosUserPortId, hwhqosUserGroupQueueTable=hwhqosUserGroupQueueTable, hwhqosAtmPvcUserLayer1=hwhqosAtmPvcUserLayer1, hwhqosNetWorkHeaderLengthRowStatus=hwhqosNetWorkHeaderLengthRowStatus, hwhqosQueueForwardBytes=hwhqosQueueForwardBytes, hwhqosProfileGroup=hwhqosProfileGroup, hwhqosProfileUserQueueStatisticsSlotNumber=hwhqosProfileUserQueueStatisticsSlotNumber, hwhqosBehaviorUserQueueRowStatus=hwhqosBehaviorUserQueueRowStatus, hwhqosProfileSuppressionTable=hwhqosProfileSuppressionTable, hwhqosIfUserQueueIfIndex=hwhqosIfUserQueueIfIndex, hwhqosFlowMappingRowStatus=hwhqosFlowMappingRowStatus, hwVPNHQoSVPNType=hwVPNHQoSVPNType, hwhqosBandwidthInterfaceIndex=hwhqosBandwidthInterfaceIndex, hwhqosUserQueueStatCompliances=hwhqosUserQueueStatCompliances, hwhqosDomainRateLimitModeGroup=hwhqosDomainRateLimitModeGroup, hwhqosDomainSessionGroupExcludeDirection=hwhqosDomainSessionGroupExcludeDirection, hwhqosLinkAdjRemoteTable=hwhqosLinkAdjRemoteTable, hwhqosFlowQueueShaperRowStatus=hwhqosFlowQueueShaperRowStatus, hwhqosAtmPvcDirection=hwhqosAtmPvcDirection, hwhqosServiceIdentifyPolicyTypeTable=hwhqosServiceIdentifyPolicyTypeTable, hwhqosProfileInfoEntry=hwhqosProfileInfoEntry, hwhqosFlowQueueCfgCosValue=hwhqosFlowQueueCfgCosValue, hwhqosUserQueueClassifierStatisticsDropPacketsRate=hwhqosUserQueueClassifierStatisticsDropPacketsRate, hwhqosLinkAdjLocalRowStatus=hwhqosLinkAdjLocalRowStatus, hwhqosUseridStatQueue=hwhqosUseridStatQueue, hwhqosUserQueueApplyFailDirection=hwhqosUserQueueApplyFailDirection, hwhqosProfileApplyUserVlanInterfaceIndex=hwhqosProfileApplyUserVlanInterfaceIndex, hwhqosAtmPvcUserLayer2=hwhqosAtmPvcUserLayer2, hwhqosPortQueueRowStatus=hwhqosPortQueueRowStatus, hwhqosServiceIdentifyPolicyEntry=hwhqosServiceIdentifyPolicyEntry, hwhqosProfileInfoDescription=hwhqosProfileInfoDescription, hwhqosProfileUserQueueStatisticsTable=hwhqosProfileUserQueueStatisticsTable, hwhqosProfileApplyUserVlanProfileName=hwhqosProfileApplyUserVlanProfileName, hwhqosUserQueueFlowQueueName=hwhqosUserQueueFlowQueueName, hwhqosNetWorkHeaderLengthValue=hwhqosNetWorkHeaderLengthValue, hwhqosAtmPvcQueueForwardPackets=hwhqosAtmPvcQueueForwardPackets, hwhqosIfUserQueueAclID2=hwhqosIfUserQueueAclID2)
mibBuilder.exportSymbols("HUAWEI-HQOS-MIB", hwhqosProfileCarRedAction=hwhqosProfileCarRedAction, hwhqosServiceIdentifyPolicyApplyInterfaceIndex=hwhqosServiceIdentifyPolicyApplyInterfaceIndex, hwhqosLinkAdjShapingModeType=hwhqosLinkAdjShapingModeType, hwhqosUserGroupQueueStatisticsDropBytesRate=hwhqosUserGroupQueueStatisticsDropBytesRate, hwVPNHQoSVPNValue=hwVPNHQoSVPNValue, hwhqosUserGroupQueuePbsValue=hwhqosUserGroupQueuePbsValue, hwhqosUserQueueRowStatus=hwhqosUserQueueRowStatus, hwhqosProfileUserQueueMappingName=hwhqosProfileUserQueueMappingName, hwhqosQueueDropPackets=hwhqosQueueDropPackets, hwhqosUserGroupQueueInterfaceStatisticsQueueIndex=hwhqosUserGroupQueueInterfaceStatisticsQueueIndex, hwhqosUserQueueStatReset=hwhqosUserQueueStatReset, hwVPNHQoSDropBytes=hwVPNHQoSDropBytes, hwhqosDomainRateLimitModeRateLimitMode=hwhqosDomainRateLimitModeRateLimitMode, hwhqosFlowQueueTable=hwhqosFlowQueueTable, hwhqosUserQueueStatDirection=hwhqosUserQueueStatDirection, hwhqosBehaviorName=hwhqosBehaviorName, hwhqosBandwidthRowStatus=hwhqosBandwidthRowStatus, hwhqosLinkAdjLocalTableGroup=hwhqosLinkAdjLocalTableGroup, hwhqosAtmPvcQueueIndex=hwhqosAtmPvcQueueIndex, hwhqosSetZero=hwhqosSetZero, hwhqosIfUserQueueAclID1=hwhqosIfUserQueueAclID1, hwhqosTMScheduleModeSimpleEnable=hwhqosTMScheduleModeSimpleEnable, hwhqosPortQueueStatDiscarAlarmCancelGroup=hwhqosPortQueueStatDiscarAlarmCancelGroup, hwhqosQueueIndex=hwhqosQueueIndex, hwhqosDomainUserMaxSessionRowStatus=hwhqosDomainUserMaxSessionRowStatus, hwhqosUserGroupQueueTemplateGroup=hwhqosUserGroupQueueTemplateGroup, hwhqosDomainUserMaxSessionTable=hwhqosDomainUserMaxSessionTable, hwhqosFlowMappingGroup=hwhqosFlowMappingGroup, hwhqosProfileCarCirValue=hwhqosProfileCarCirValue, hwhqosLinkAdjRemoteGroup=hwhqosLinkAdjRemoteGroup, hwhqosFlowQueueEntry=hwhqosFlowQueueEntry, hwhqosFatherUserGroupQueueApplyEntry=hwhqosFatherUserGroupQueueApplyEntry, hwhqosProfileCarEntry=hwhqosProfileCarEntry, hwhqosServiceIdentifyPolicyDscpEntry=hwhqosServiceIdentifyPolicyDscpEntry, hwhqosProfileApplyUserVlanEach=hwhqosProfileApplyUserVlanEach, hwhqosFlowWredColorDiscardPercentage=hwhqosFlowWredColorDiscardPercentage, hwhqosServiceTemplateApplyGroup=hwhqosServiceTemplateApplyGroup, hwhqosUserLayer2=hwhqosUserLayer2, hwhqosIfQueueStatDropPackets=hwhqosIfQueueStatDropPackets, hwhqosProfileCarYellowAction=hwhqosProfileCarYellowAction, hwhqosProfileUserQueueServiceTemplateName=hwhqosProfileUserQueueServiceTemplateName, hwhqosUserQueueTable=hwhqosUserQueueTable, hwhqosIfUserQueueFlowMappingProfileName=hwhqosIfUserQueueFlowMappingProfileName, hwhqosAtmPvcVPI=hwhqosAtmPvcVPI, hwhqosUpdateUseridProfileRowStatus=hwhqosUpdateUseridProfileRowStatus, hwhqosUserGroupQueueStatisticsPassPacketsRate=hwhqosUserGroupQueueStatisticsPassPacketsRate, hwhqosFlowWredColor=hwhqosFlowWredColor, hwhqosProfileInfoTable=hwhqosProfileInfoTable, hwhqosTunnelVPNName=hwhqosTunnelVPNName, hwhqosFlowQueueShaperAF4=hwhqosFlowQueueShaperAF4, hwhqosBandwidthCheckValue=hwhqosBandwidthCheckValue, hwhqosProfileCarStatisticsDropBytes=hwhqosProfileCarStatisticsDropBytes, hwhqosPortQueueDiscardTypeTrap=hwhqosPortQueueDiscardTypeTrap, hwhqosDirection=hwhqosDirection, hwhqosFlowMappingTable=hwhqosFlowMappingTable, hwhqosProfileApplyUserVlanRowStatus=hwhqosProfileApplyUserVlanRowStatus, hwhqosProfileCarDirection=hwhqosProfileCarDirection, hwhqosUserQueueStatisticsQueueIndex=hwhqosUserQueueStatisticsQueueIndex, hwhqosProfileCarStatisticsTable=hwhqosProfileCarStatisticsTable, hwhqosTMScheduleModeSlot=hwhqosTMScheduleModeSlot, hwhqosLinkAdjShapingModeInterfaceIndex=hwhqosLinkAdjShapingModeInterfaceIndex, hwhqosProfileUserQueueCirValue=hwhqosProfileUserQueueCirValue, hwhqosUpdateUseridProfileUserid=hwhqosUpdateUseridProfileUserid, hwhqosServiceIdentifyPolicyApplyName=hwhqosServiceIdentifyPolicyApplyName, hwhqosServiceIdentifyPolicyVlanTable=hwhqosServiceIdentifyPolicyVlanTable, hwhqosDomainSessionGroupExcludeTable=hwhqosDomainSessionGroupExcludeTable, hwhqosServiceIdentifyPolicyVlanGroup=hwhqosServiceIdentifyPolicyVlanGroup, hwhqosUserGroupQueueStatisticsGroupName=hwhqosUserGroupQueueStatisticsGroupName, hwhqosUserGroupQueueStatReset=hwhqosUserGroupQueueStatReset, hwhqosLinkAdjExcludeGroup=hwhqosLinkAdjExcludeGroup, hwhqosProfileUserQueueStatisticsPassBytesRate=hwhqosProfileUserQueueStatisticsPassBytesRate, hwhqosIfStatGroup=hwhqosIfStatGroup, hwhqosServiceIdentifyPolicyApplyTable=hwhqosServiceIdentifyPolicyApplyTable, hwhqosUseridStatPassPackets=hwhqosUseridStatPassPackets, hwhqosUserGroupQueueShapingRowStatus=hwhqosUserGroupQueueShapingRowStatus, hwhqosDomainUserMaxSessionNum=hwhqosDomainUserMaxSessionNum, hwhqosFlowQueueGroup=hwhqosFlowQueueGroup, hwhqosFlowQueueShaperValue=hwhqosFlowQueueShaperValue, hwhqosUserQueueClassifierStatisticsTable=hwhqosUserQueueClassifierStatisticsTable, hwhqosServiceIdentifyPolicyTable=hwhqosServiceIdentifyPolicyTable, hwhqosFlowQueueRowStatus=hwhqosFlowQueueRowStatus, hwhqosProfileName=hwhqosProfileName, hwhqosBandwidthCheck=hwhqosBandwidthCheck, hwhqosFlowWredGroup=hwhqosFlowWredGroup, hwhqosServiceIdentifyPolicyDscpTable=hwhqosServiceIdentifyPolicyDscpTable, hwhqosUserGroupQueueStatisticsPassBytes=hwhqosUserGroupQueueStatisticsPassBytes, hwVPNHQoSPassPackets=hwVPNHQoSPassPackets, hwhqosUserQueueClassifierStatisticsPassBytesRate=hwhqosUserQueueClassifierStatisticsPassBytesRate, hwhqosUserGroupQueueStatisticsTable=hwhqosUserGroupQueueStatisticsTable, hwhqosUserGroupQueueApplyName=hwhqosUserGroupQueueApplyName, hwhqosProfileInfoName=hwhqosProfileInfoName, hwhqosProfileApplyPevid=hwhqosProfileApplyPevid, hwhqosUserQueueStatPerDropPackets=hwhqosUserQueueStatPerDropPackets, hwhqosProfileUserApplyGroup=hwhqosProfileUserApplyGroup, hwhqosDomainUserPriorityTable=hwhqosDomainUserPriorityTable, hwhqosFlowQueueCfgTable=hwhqosFlowQueueCfgTable, hwhqosLinkAdjLocalValue=hwhqosLinkAdjLocalValue, hwhqosUserGroupQueueInterfaceStatisticsDropPacketsRate=hwhqosUserGroupQueueInterfaceStatisticsDropPacketsRate, hwhqosPortQueueStatDiscardAlarmTrapTable=hwhqosPortQueueStatDiscardAlarmTrapTable, hwhqosServiceIdentifyPolicyDscpRowStatus=hwhqosServiceIdentifyPolicyDscpRowStatus, hwhqosUserGroupQueueStatisticsDirection=hwhqosUserGroupQueueStatisticsDirection, hwhqosPortQueueStatDiscardAlarmTrapGroup=hwhqosPortQueueStatDiscardAlarmTrapGroup, hwhqosUserQueueClassifierStatisticsDropBytesRate=hwhqosUserQueueClassifierStatisticsDropBytesRate, hwhqosLinkAdjRemoteEnableSlot=hwhqosLinkAdjRemoteEnableSlot, hwhqosProfileDescription=hwhqosProfileDescription, hwhqosUserQueueStatisticsSlotNumber=hwhqosUserQueueStatisticsSlotNumber, hwhqosUserQueueClassifierStatisticsDropBytes=hwhqosUserQueueClassifierStatisticsDropBytes, hwhqosUseridStatDropPacketsRate=hwhqosUseridStatDropPacketsRate, hwhqosUpdateUseridProfileName=hwhqosUpdateUseridProfileName, hwhqosIfQueueStatDropBytes=hwhqosIfQueueStatDropBytes, hwhqosUserQueueStatLastResetTime=hwhqosUserQueueStatLastResetTime, hwhqosUserQueueClassifierStatisticsQueueIndex=hwhqosUserQueueClassifierStatisticsQueueIndex, hwhqosServiceIdentifyPolicyName=hwhqosServiceIdentifyPolicyName, hwhqosFlowMappingEntry=hwhqosFlowMappingEntry, hwhqosLinkAdjShapingModeRowStatus=hwhqosLinkAdjShapingModeRowStatus, hwhqosIfUserQueueStatisticsQueuePassBytes=hwhqosIfUserQueueStatisticsQueuePassBytes, hwhqosUserSlotId=hwhqosUserSlotId, hwhqosUserQueuePirValue=hwhqosUserQueuePirValue, hwhqosTMScheduleModeRowStatus=hwhqosTMScheduleModeRowStatus, hwhqosUserQueueStatisticsTable=hwhqosUserQueueStatisticsTable, hwhqosProfileUserQueueStatisticsInterfaceIndex=hwhqosProfileUserQueueStatisticsInterfaceIndex, hwhqosProfileUserQueueStatisticsDropPacketsRate=hwhqosProfileUserQueueStatisticsDropPacketsRate, hwhqosPortQueueWeightValue=hwhqosPortQueueWeightValue, hwhqosAtmPvcQueueDropBytes=hwhqosAtmPvcQueueDropBytes, hwhqosBehaviorCirValue=hwhqosBehaviorCirValue, hwhqosUserGroupQueueStatisticsPassPackets=hwhqosUserGroupQueueStatisticsPassPackets, hwhqosDomainUserPriorityDomainName=hwhqosDomainUserPriorityDomainName, hwhqosUserQueueStatisticsInterfaceIndex=hwhqosUserQueueStatisticsInterfaceIndex, hwhqosPortQueueCosValueCancel=hwhqosPortQueueCosValueCancel, hwhqosLinkAdjLocalEntry=hwhqosLinkAdjLocalEntry, hwhqosPortQueueWredName=hwhqosPortQueueWredName, hwhqosProfileUserQueueTable=hwhqosProfileUserQueueTable, hwhqosWredRedHighLimit=hwhqosWredRedHighLimit, hwhqosProfileApplyDomainTable=hwhqosProfileApplyDomainTable, hwhqosUserQueueInterfaceIndex=hwhqosUserQueueInterfaceIndex, hwhqosLinkAdjShapingModeDomainName=hwhqosLinkAdjShapingModeDomainName, hwhqosProfileWeightValue=hwhqosProfileWeightValue, hwhqosPortQueueCosValue=hwhqosPortQueueCosValue, hwhqosTunnelPassPackets=hwhqosTunnelPassPackets, hwhqosProfileTable=hwhqosProfileTable, hwhqosProfileInterfaceIndex=hwhqosProfileInterfaceIndex, hwhqosUseridStatTable=hwhqosUseridStatTable, hwhqosIfUserQueueStatisticsQueueDropBytes=hwhqosIfUserQueueStatisticsQueueDropBytes, hwhqosUserQueueStatDropPackets=hwhqosUserQueueStatDropPackets, hwhqosUserFrameId=hwhqosUserFrameId, hwhqosUserQueueClassifierStatisticsInterfaceIndex=hwhqosUserQueueClassifierStatisticsInterfaceIndex, hwhqosLinkAdjRemoteEnableTable=hwhqosLinkAdjRemoteEnableTable, hwhqosProfileUserQueueStatisticsPassPackets=hwhqosProfileUserQueueStatisticsPassPackets, hwhqosFlowWredName=hwhqosFlowWredName, hwhqosUserQueueApplyPirSucessAlarmTrap=hwhqosUserQueueApplyPirSucessAlarmTrap, hwhqosUserQueueGroupName=hwhqosUserQueueGroupName, hwhqosIfUserQueueAclType=hwhqosIfUserQueueAclType, hwhqosProfileUserQueueStatisticsPassPacketsRate=hwhqosProfileUserQueueStatisticsPassPacketsRate, hwhqosSuppressionType=hwhqosSuppressionType, hwhqosIfUserQueueStatisticsAclType=hwhqosIfUserQueueStatisticsAclType, hwhqosUserQueueStatTrapGroup=hwhqosUserQueueStatTrapGroup, hwhqosTMScheduleModeEntry=hwhqosTMScheduleModeEntry, hwhqosIfQueueStatEntry=hwhqosIfQueueStatEntry, hwhqosLinkAdjExcludeEntry=hwhqosLinkAdjExcludeEntry, hwhqosUserQueueStatisticsGroup=hwhqosUserQueueStatisticsGroup, hwhqosFlowMappingCfgGroup=hwhqosFlowMappingCfgGroup, hwhqosBehaviorUserQueueTable=hwhqosBehaviorUserQueueTable, hwhqosUserGroupQueueApplyEntry=hwhqosUserGroupQueueApplyEntry, hwhqosQueueForwardByteRate=hwhqosQueueForwardByteRate, hwhqosUserQueueShapeAllTrafficEntry=hwhqosUserQueueShapeAllTrafficEntry, hwhqosServiceIdentifyPolicyTypeGroup=hwhqosServiceIdentifyPolicyTypeGroup, hwhqosUserGroupQueueInterfaceStatisticsDropBytesRate=hwhqosUserGroupQueueInterfaceStatisticsDropBytesRate, hwhqosServiceTemplateGroup=hwhqosServiceTemplateGroup, hwhqosDomainRateLimitModeDirection=hwhqosDomainRateLimitModeDirection, hwhqosUserGroupQueueSlotNumber=hwhqosUserGroupQueueSlotNumber, hwhqosUserQueueApplyAlarmIfNetName=hwhqosUserQueueApplyAlarmIfNetName, hwhqosIfQueueStatGroup=hwhqosIfQueueStatGroup, hwhqosWredRowStatus=hwhqosWredRowStatus, hwhqosProfileApplyTable=hwhqosProfileApplyTable, hwhqosUserGroupQueueApplyTable=hwhqosUserGroupQueueApplyTable, hwhqosUserQueueClassifierStatisticsSlotNumber=hwhqosUserQueueClassifierStatisticsSlotNumber, hwhqosObjectsGroup=hwhqosObjectsGroup, hwhqosUseridStatUserid=hwhqosUseridStatUserid, hwhqosProfileUserQueueStatisticsDropBytesRate=hwhqosProfileUserQueueStatisticsDropBytesRate, hwhqosLinkAdjRemoteEnable=hwhqosLinkAdjRemoteEnable, hwhqosProfileRowStatus=hwhqosProfileRowStatus, hwhqosDomainRateLimitModeEntry=hwhqosDomainRateLimitModeEntry, hwhqosUserQueueClassifierStatisticsPassBytes=hwhqosUserQueueClassifierStatisticsPassBytes, hwhqosIfUserQueueTable=hwhqosIfUserQueueTable, hwhqosProfileCarStatisticsReset=hwhqosProfileCarStatisticsReset, hwhqosPortQueueDiscardValueTrap=hwhqosPortQueueDiscardValueTrap, hwhqosUserQueueShapeAllTrafficInterfaceIndex=hwhqosUserQueueShapeAllTrafficInterfaceIndex, hwVPNHQoSPassBytes=hwVPNHQoSPassBytes, hwhqosLinkAdjExcludeSlotNumber=hwhqosLinkAdjExcludeSlotNumber, hwhqosUserGroupQueueInterfaceStatisticsPassPackets=hwhqosUserGroupQueueInterfaceStatisticsPassPackets, hwhqosServiceIdentifyPolicyOption60RowStatus=hwhqosServiceIdentifyPolicyOption60RowStatus, hwhqosServiceTemplateApplyTable=hwhqosServiceTemplateApplyTable, hwhqosFlowQueueShaperCS7=hwhqosFlowQueueShaperCS7, hwhqosFlowQueueCfgPbsValue=hwhqosFlowQueueCfgPbsValue, hwhqosAtmPvcIfIndex=hwhqosAtmPvcIfIndex, hwhqosProfileUserQueueDirection=hwhqosProfileUserQueueDirection, hwhqosUserQueueFlowMappingName=hwhqosUserQueueFlowMappingName, hwhqosUserQueueStatisticsReset=hwhqosUserQueueStatisticsReset, hwhqosUserQueueClassifierStatisticsPassPackets=hwhqosUserQueueClassifierStatisticsPassPackets, hwhqosUserBandwidthTable=hwhqosUserBandwidthTable, hwhqosUserQueueStatType=hwhqosUserQueueStatType, hwhqosProfileCarStatisticsCevid=hwhqosProfileCarStatisticsCevid, hwhqosLinkAdjShapingMode=hwhqosLinkAdjShapingMode, hwhqosTunnelStatGroup=hwhqosTunnelStatGroup, hwhqosDomainSessionGroupExcludeDomainName=hwhqosDomainSessionGroupExcludeDomainName, hwhqosUserGroupQueueStatEntry=hwhqosUserGroupQueueStatEntry, hwhqosUserGroupQueueApplyDirection=hwhqosUserGroupQueueApplyDirection, hwHQOS=hwHQOS, hwhqosQueueDropBytes=hwhqosQueueDropBytes, hwhqosIfUserQueueStatisticsReset=hwhqosIfUserQueueStatisticsReset, hwhqosUserGroupQueueInterfaceStatisticsDropBytes=hwhqosUserGroupQueueInterfaceStatisticsDropBytes, hwhqosBehaviorGroupName=hwhqosBehaviorGroupName, hwhqosUserGroupQueueDropBytes=hwhqosUserGroupQueueDropBytes, hwhqosUserGroupQueueApplyCeVid=hwhqosUserGroupQueueApplyCeVid, hwhqosSuppressionCirValue=hwhqosSuppressionCirValue, hwhqosProfileCarStatisticsEntry=hwhqosProfileCarStatisticsEntry, hwhqosServiceIdentifyPolicyDscpDomainName=hwhqosServiceIdentifyPolicyDscpDomainName, hwhqosAtmPvcStatTable=hwhqosAtmPvcStatTable, hwhqosUserGroupQueueApplyPeVid=hwhqosUserGroupQueueApplyPeVid, hwhqosUserQueueStatisticsPassBytes=hwhqosUserQueueStatisticsPassBytes, hwhqosUseridStatReset=hwhqosUseridStatReset, hwhqosProfileCarStatisticsInterfaceIndex=hwhqosProfileCarStatisticsInterfaceIndex, hwhqosProfileSuppressionEntry=hwhqosProfileSuppressionEntry, hwhqosBandwidthGroup=hwhqosBandwidthGroup, hwhqosLinkAdjRemoteDomainName=hwhqosLinkAdjRemoteDomainName, hwhqosUseridStatEntry=hwhqosUseridStatEntry, hwhqosWredQueueDepthRowStatus=hwhqosWredQueueDepthRowStatus, hwhqosFlowQueueShaperBE=hwhqosFlowQueueShaperBE, hwhqosProfileUserQueueStatisticsConfiguredCir=hwhqosProfileUserQueueStatisticsConfiguredCir, hwhqosUpdateUseridProfileGroup=hwhqosUpdateUseridProfileGroup, hwhqosApplyServiceTemplateName=hwhqosApplyServiceTemplateName, hwhqosUserGroupQueueInterfaceStatisticsPevid=hwhqosUserGroupQueueInterfaceStatisticsPevid, hwhqosUserQueueClassifierStatisticsClassifierName=hwhqosUserQueueClassifierStatisticsClassifierName, hwhqosUserQueueStatisticsDropPacketsRate=hwhqosUserQueueStatisticsDropPacketsRate, hwhqosUserLayer1=hwhqosUserLayer1, hwhqosDomainRateLimitModeRowStatus=hwhqosDomainRateLimitModeRowStatus, hwhqosProfileUserQueueStatisticsConfiguredPir=hwhqosProfileUserQueueStatisticsConfiguredPir, hwhqosIfQueueStatIfIndex=hwhqosIfQueueStatIfIndex, hwhqosGroupName=hwhqosGroupName, hwhqosUserGroupQueueStatisticsQueueIndex=hwhqosUserGroupQueueStatisticsQueueIndex, hwhqosPortQueueStatDiscardAlarmCancelTable=hwhqosPortQueueStatDiscardAlarmCancelTable, hwhqosPortQueuePbsValue=hwhqosPortQueuePbsValue, hwhqosProfileApplyName=hwhqosProfileApplyName, hwhqosProfileCarStatisticsDirection=hwhqosProfileCarStatisticsDirection, hwhqosProfileCarStatisticsDropPacketsRate=hwhqosProfileCarStatisticsDropPacketsRate, hwhqosFatherUserGroupQueueApplyInterfaceIndex=hwhqosFatherUserGroupQueueApplyInterfaceIndex, hwhqosLinkAdjRemoteEnableDomainName=hwhqosLinkAdjRemoteEnableDomainName, hwhqosTraps=hwhqosTraps, hwhqosProfileUserQueueStatisticsCevid=hwhqosProfileUserQueueStatisticsCevid, hwhqosBandwidthCheckDirection=hwhqosBandwidthCheckDirection, hwhqosFlowQueueShaperTable=hwhqosFlowQueueShaperTable, hwhqosUserGroupQueueApplyRowStatus=hwhqosUserGroupQueueApplyRowStatus, hwhqosUserQueueStatForwardPackets=hwhqosUserQueueStatForwardPackets, hwhqosDomainUserPriorityEntry=hwhqosDomainUserPriorityEntry)
mibBuilder.exportSymbols("HUAWEI-HQOS-MIB", hwhqosAtmPvcQueueDropPackets=hwhqosAtmPvcQueueDropPackets, hwhqosProfileWeightEntry=hwhqosProfileWeightEntry, hwhqosServiceIdentifyPolicy8021pGroup=hwhqosServiceIdentifyPolicy8021pGroup, hwhqosUseridStatDropBytes=hwhqosUseridStatDropBytes, hwhqosBehaviorServiceTemplateName=hwhqosBehaviorServiceTemplateName, hwhqosPortQueueGroup=hwhqosPortQueueGroup, hwhqosDomainSessionGroupExcludeMode=hwhqosDomainSessionGroupExcludeMode, hwhqosUserQueueEntry=hwhqosUserQueueEntry, hwhqosWredYellowLowLimit=hwhqosWredYellowLowLimit, hwhqosServiceIdentifyPolicy8021pCosID=hwhqosServiceIdentifyPolicy8021pCosID, hwhqosFatherUserGroupQueueApplyTable=hwhqosFatherUserGroupQueueApplyTable, hwhqosBandwidthCheckRowStatus=hwhqosBandwidthCheckRowStatus, hwhqosWredGreenHighLimit=hwhqosWredGreenHighLimit, hwhqosPortQueueStatDiscardAlarmTrap=hwhqosPortQueueStatDiscardAlarmTrap, hwhqosUseridStatGroup=hwhqosUseridStatGroup, hwhqosServiceIdentifyPolicyOption60=hwhqosServiceIdentifyPolicyOption60, hwhqosUseridStatPassBytesRate=hwhqosUseridStatPassBytesRate, hwhqosUserQueueStatDiscardAlarmTrap=hwhqosUserQueueStatDiscardAlarmTrap, hwhqosWredQueueDepthWredName=hwhqosWredQueueDepthWredName, hwhqosUserGroupQueueInterfaceStatisticsReset=hwhqosUserGroupQueueInterfaceStatisticsReset, hwhqosProfileApplyDomainProfileName=hwhqosProfileApplyDomainProfileName, hwhqosFlowWredEntry=hwhqosFlowWredEntry, hwhqosWredName=hwhqosWredName, hwhqosUserGroupQueueInterfaceStatisticsVlanid=hwhqosUserGroupQueueInterfaceStatisticsVlanid, hwhqosFlowWredRowStatus=hwhqosFlowWredRowStatus, hwhqosUserGroupQueueForwardBytes=hwhqosUserGroupQueueForwardBytes, hwhqosUserQueueShapeAllTrafficRowStatus=hwhqosUserQueueShapeAllTrafficRowStatus, hwhqosUseridStatPassBytes=hwhqosUseridStatPassBytes, hwhqosIfQueueStatDirection=hwhqosIfQueueStatDirection, hwhqosServiceIdentifyPolicyGroup=hwhqosServiceIdentifyPolicyGroup, hwhqosDomainUserPriorityRowStatus=hwhqosDomainUserPriorityRowStatus, hwhqosTunnelPassBytes=hwhqosTunnelPassBytes, hwhqosIfUserQueuePir=hwhqosIfUserQueuePir, hwhqosVpnQoSTunnelStatGroup=hwhqosVpnQoSTunnelStatGroup, hwhqosUserQueueClassifierStatisticsGroup=hwhqosUserQueueClassifierStatisticsGroup, hwhqosQueueForwardPackets=hwhqosQueueForwardPackets, hwhqosTunnelVPNType=hwhqosTunnelVPNType, hwhqosDomainUserMaxSessionDomainName=hwhqosDomainUserMaxSessionDomainName, hwhqosPortQueueStatDiscardAlarmCancelTrap=hwhqosPortQueueStatDiscardAlarmCancelTrap, hwhqosProfileCarPbsValue=hwhqosProfileCarPbsValue, hwhqosFlowQueueShaperAF1=hwhqosFlowQueueShaperAF1, hwhqosUpdateUseridProfileDirection=hwhqosUpdateUseridProfileDirection, hwhqosDomainSessionGroupExcludeEntry=hwhqosDomainSessionGroupExcludeEntry, hwhqosLinkAdjRemoteEnableType=hwhqosLinkAdjRemoteEnableType, hwhqosProfileCarStatisticsPassPacketsRate=hwhqosProfileCarStatisticsPassPacketsRate, hwhqosUserQueueStatisticsEntry=hwhqosUserQueueStatisticsEntry, hwhqosPortQueueTable=hwhqosPortQueueTable, hwVPNHQoSTunnelStatisticsEntry=hwVPNHQoSTunnelStatisticsEntry, hwhqosServiceIdentifyPolicyOption60Group=hwhqosServiceIdentifyPolicyOption60Group, hwhqosIfIndex=hwhqosIfIndex, hwhqosFlowQueueCfgShapingValue=hwhqosFlowQueueCfgShapingValue, hwhqosUserBandwidthCommittedCir=hwhqosUserBandwidthCommittedCir, hwhqosServiceIdentifyPolicyRowStatus=hwhqosServiceIdentifyPolicyRowStatus, hwhqosProfileCarStatisticsType=hwhqosProfileCarStatisticsType, hwhqosUserQueueStatTable=hwhqosUserQueueStatTable, hwhqosProfileUserQueueStatisticsPevid=hwhqosProfileUserQueueStatisticsPevid, hwhqosLinkAdjShapingModeEntry=hwhqosLinkAdjShapingModeEntry, hwhqosPortQueueDiscardTypeCancel=hwhqosPortQueueDiscardTypeCancel, hwhqosIfStatTable=hwhqosIfStatTable, hwhqosLinkAdjExcludeTable=hwhqosLinkAdjExcludeTable, hwhqosProfileSuppressionGroup=hwhqosProfileSuppressionGroup, hwhqosDomainSessionGroupExcludeGroup=hwhqosDomainSessionGroupExcludeGroup, hwhqosUserGroupQueueStatisticsEntry=hwhqosUserGroupQueueStatisticsEntry, hwhqosTunnelIfIndex=hwhqosTunnelIfIndex, hwhqosFlowQueueShaperGroup=hwhqosFlowQueueShaperGroup, hwhqosFlowQueueCfgWredName=hwhqosFlowQueueCfgWredName, hwhqosFlowQueueCfgWeightValue=hwhqosFlowQueueCfgWeightValue, hwhqosDomainUserMaxSessionEntry=hwhqosDomainUserMaxSessionEntry, hwhqosLinkAdjShapingModeTable=hwhqosLinkAdjShapingModeTable, hwhqosIfUserQueueRowStatus=hwhqosIfUserQueueRowStatus, hwhqosServiceIdentifyPolicyDscpGroup=hwhqosServiceIdentifyPolicyDscpGroup, hwhqosUserGroupQueueInterfaceStatisticsCevid=hwhqosUserGroupQueueInterfaceStatisticsCevid, hwhqosConformance=hwhqosConformance, hwhqosUserGroupQueueInterfaceStatisticsGroup=hwhqosUserGroupQueueInterfaceStatisticsGroup, hwhqosWredRedDiscardPercent=hwhqosWredRedDiscardPercent, CosType=CosType, hwhqosUserQueueStatNameString=hwhqosUserQueueStatNameString, hwhqosWredQueueDepthValue=hwhqosWredQueueDepthValue, hwhqosUserGroupQueueShapingGroup=hwhqosUserGroupQueueShapingGroup, hwhqosProfileApplyUserVlanDirection=hwhqosProfileApplyUserVlanDirection, hwhqosIfStatEntry=hwhqosIfStatEntry, hwhqosWredYellowDiscardPercent=hwhqosWredYellowDiscardPercent, hwhqosBehaviorFlowMappingName=hwhqosBehaviorFlowMappingName, hwhqosServiceIdentifyPolicy8021pTable=hwhqosServiceIdentifyPolicy8021pTable, hwhqosIfQueueStatForwardBytes=hwhqosIfQueueStatForwardBytes, hwhqosWredGreenLowLimit=hwhqosWredGreenLowLimit, hwhqosDomainUserPriorityDirection=hwhqosDomainUserPriorityDirection, hwhqosUserGroupQueueStatisticsDropBytes=hwhqosUserGroupQueueStatisticsDropBytes, hwhqosTunnelStatisticsTable=hwhqosTunnelStatisticsTable, hwhqosUserGroupQueueInterfaceStatisticsIfIndex=hwhqosUserGroupQueueInterfaceStatisticsIfIndex, hwhqosProfileCarStatisticsPassPackets=hwhqosProfileCarStatisticsPassPackets, hwhqosProfileInfoRowStatus=hwhqosProfileInfoRowStatus, hwhqosQueueRemarkPackets=hwhqosQueueRemarkPackets, hwhqosUserQueueApplyPirFailAlarmTrap=hwhqosUserQueueApplyPirFailAlarmTrap, hwhqosLinkAdjLocalSlotNumber=hwhqosLinkAdjLocalSlotNumber, hwhqosProfileCarStatisticsDropPackets=hwhqosProfileCarStatisticsDropPackets, hwhqosUserGroupQueueApplyGroup=hwhqosUserGroupQueueApplyGroup, hwhqosPortQueueIfIndex=hwhqosPortQueueIfIndex, hwhqosUserBandwidthEntry=hwhqosUserBandwidthEntry, hwhqosAtmPvcQueueForwardBytes=hwhqosAtmPvcQueueForwardBytes, hwhqosUserGroupQueueStatLastResetTime=hwhqosUserGroupQueueStatLastResetTime, hwhqosServiceIdentifyPolicyVlanID=hwhqosServiceIdentifyPolicyVlanID, hwhqosProfileApplyUserVlanGroup=hwhqosProfileApplyUserVlanGroup, hwhqosProfileEntry=hwhqosProfileEntry, hwhqosFlowQueueShaperAF2=hwhqosFlowQueueShaperAF2, hwhqosSuppressionRowStatus=hwhqosSuppressionRowStatus, hwhqosIfQueueStatQueueIndex=hwhqosIfQueueStatQueueIndex, hwhqosUserQueueClassifierStatisticsDropPackets=hwhqosUserQueueClassifierStatisticsDropPackets, hwhqosBandwidthValue=hwhqosBandwidthValue, hwhqosUserGroupQueueApplyInterfaceIndex=hwhqosUserGroupQueueApplyInterfaceIndex, hwhqosFlowWredTable=hwhqosFlowWredTable, hwhqosUserGroupQueueInterfaceStatisticsPassBytesRate=hwhqosUserGroupQueueInterfaceStatisticsPassBytesRate, hwhqosProfileCarStatisticsPevid=hwhqosProfileCarStatisticsPevid, hwhqosProfileUserQueueStatisticsDropPackets=hwhqosProfileUserQueueStatisticsDropPackets, hwhqosDomainRateLimitModeTable=hwhqosDomainRateLimitModeTable, hwhqosUserGroupQueueTemplateEntry=hwhqosUserGroupQueueTemplateEntry, hwhqosServiceIdentifyPolicy8021pDomainName=hwhqosServiceIdentifyPolicy8021pDomainName, hwhqosServiceIdentifyPolicyVlanRowStatus=hwhqosServiceIdentifyPolicyVlanRowStatus, hwhqosUserGroupQueueStatGroup=hwhqosUserGroupQueueStatGroup, hwhqosUserGroupQueueStatisticsReset=hwhqosUserGroupQueueStatisticsReset, hwhqosFatherUserGroupQueueApplyDirection=hwhqosFatherUserGroupQueueApplyDirection, hwhqosUserGroupQueueStatGroupName=hwhqosUserGroupQueueStatGroupName, hwhqosProfileCarStatisticsDropBytesRate=hwhqosProfileCarStatisticsDropBytesRate, hwhqosProfileUserQueueStatisticsDropBytes=hwhqosProfileUserQueueStatisticsDropBytes, hwhqosWredYellowHighLimit=hwhqosWredYellowHighLimit, hwhqosSuppressionDirection=hwhqosSuppressionDirection, hwhqosUserGroupQueueShapingTable=hwhqosUserGroupQueueShapingTable, hwhqosFlowQueueShaperEF=hwhqosFlowQueueShaperEF, hwhqosUserGroupQueueInterfaceStatisticsPassBytes=hwhqosUserGroupQueueInterfaceStatisticsPassBytes, hwhqosFlowQueueShaperEntry=hwhqosFlowQueueShaperEntry, hwhqosServiceIdentifyPolicyTypeRowStatus=hwhqosServiceIdentifyPolicyTypeRowStatus, hwhqosUseridStatDropBytesRate=hwhqosUseridStatDropBytesRate, hwhqosProfileInfoGroup=hwhqosProfileInfoGroup, hwhqosFlowMappingCfgTable=hwhqosFlowMappingCfgTable, hwhqosServiceIdentifyPolicyTypeValue=hwhqosServiceIdentifyPolicyTypeValue, hwhqosPortQueueArithmetic=hwhqosPortQueueArithmetic, hwhqosIfUserQueueStatisticsAclID2=hwhqosIfUserQueueStatisticsAclID2, hwhqosDomainUserMaxSessionGroup=hwhqosDomainUserMaxSessionGroup, hwhqosUseridStatPassPacketsRate=hwhqosUseridStatPassPacketsRate, hwhqosPortQueueInterfaceTrap=hwhqosPortQueueInterfaceTrap, hwhqosUserGroupQueueStatisticsDropPackets=hwhqosUserGroupQueueStatisticsDropPackets, hwhqosUseridStatDirection=hwhqosUseridStatDirection, hwhqosUserQueueStatisticsDropBytesRate=hwhqosUserQueueStatisticsDropBytesRate, hwhqosProfileWeightGroup=hwhqosProfileWeightGroup, hwhqosUserQueueStatisticsDropPackets=hwhqosUserQueueStatisticsDropPackets, hwhqosServiceIdentifyPolicyApplyEntry=hwhqosServiceIdentifyPolicyApplyEntry)
| 151.482152 | 16,021 | 0.805443 |
ace364eaac963b5da4ebe443782522ff6da9ff77 | 2,834 | py | Python | model_center/dataset/cpm1dataset/down_data.py | zh-zheng/ModelCenter | 2b7fc148c48e41a2a024b32b8bd1f989d8b1f654 | [
"Apache-2.0"
] | 4 | 2022-03-24T04:26:01.000Z | 2022-03-30T13:08:08.000Z | model_center/dataset/cpm1dataset/down_data.py | zh-zheng/ModelCenter | 2b7fc148c48e41a2a024b32b8bd1f989d8b1f654 | [
"Apache-2.0"
] | null | null | null | model_center/dataset/cpm1dataset/down_data.py | zh-zheng/ModelCenter | 2b7fc148c48e41a2a024b32b8bd1f989d8b1f654 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2020 The OpenBMB team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import csv
import numpy as np
class LCQMC_Dataset(torch.utils.data.Dataset):
def __init__(self, path, split, rank, world_size, tokenizer, max_length) -> None:
self.data = []
path = f"{path}/LCQMC/{split}.tsv"
with open(path, encoding='utf8') as fin:
reader = list(csv.reader(fin, delimiter='\t'))[1:]
for i, row in enumerate(reader):
text_a, text_b, label = row
lef_tokens = [1] + tokenizer.encode(f'"{text_a}"与"{text_b}"的关系是:')
rig_tokens = tokenizer.encode("。")
input_tokens, input_length, context, input_span = self.make_input(lef_tokens, rig_tokens, 1, max_length)
index = torch.zeros((max_length,), dtype=torch.int32)
index[len(lef_tokens) - 1] = 1
target = torch.tensor(int(label), dtype=torch.long)
self.data.append({
"input_tokens": input_tokens.cuda(),
"input_length": input_length.cuda(),
"input_context": context.cuda(),
"input_span": input_span.cuda(),
"targets": target.cuda(),
"index": index.cuda(),
})
def make_input(self, lef_tokens, rig_tokens, spans, max_length):
input = lef_tokens + [0 for i in range(spans)] + rig_tokens
length = len(input)
assert length < max_length # TODO
input_tokens = torch.zeros((max_length,), dtype=torch.int32)
input_tokens[:length] = torch.tensor(input).int()
input_length = torch.tensor(length, dtype=torch.int32)
context = np.arange(max_length)
context = (context < len(lef_tokens)) | (context >= len(lef_tokens) + spans)
context = torch.from_numpy(context).bool()
input_span = torch.zeros((max_length,), dtype=torch.int32)
return input_tokens, input_length, context, input_span
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
return self.data[idx]
@classmethod
def get_verbalizer(cls, tokenizer):
return [15682, 16357] # 有关,无关 # TODO change to tokenizer.encode(xxx) | 38.297297 | 120 | 0.6235 |
ace36628465d4efb2dff7659ec060d10ddd79ea4 | 91,134 | py | Python | lib/sqlalchemy/dialects/postgresql/base.py | EvaSDK/sqlalchemy | 0a60865d527331a4df9db0fc8a15038108075bca | [
"MIT"
] | null | null | null | lib/sqlalchemy/dialects/postgresql/base.py | EvaSDK/sqlalchemy | 0a60865d527331a4df9db0fc8a15038108075bca | [
"MIT"
] | null | null | null | lib/sqlalchemy/dialects/postgresql/base.py | EvaSDK/sqlalchemy | 0a60865d527331a4df9db0fc8a15038108075bca | [
"MIT"
] | null | null | null | # postgresql/base.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: postgresql
:name: PostgreSQL
Sequences/SERIAL
----------------
PostgreSQL supports sequences, and SQLAlchemy uses these as the default means
of creating new primary key values for integer-based primary key columns. When
creating tables, SQLAlchemy will issue the ``SERIAL`` datatype for
integer-based primary key columns, which generates a sequence and server side
default corresponding to the column.
To specify a specific named sequence to be used for primary key generation,
use the :func:`~sqlalchemy.schema.Sequence` construct::
Table('sometable', metadata,
Column('id', Integer, Sequence('some_id_seq'), primary_key=True)
)
When SQLAlchemy issues a single INSERT statement, to fulfill the contract of
having the "last insert identifier" available, a RETURNING clause is added to
the INSERT statement which specifies the primary key columns should be
returned after the statement completes. The RETURNING functionality only takes
place if Postgresql 8.2 or later is in use. As a fallback approach, the
sequence, whether specified explicitly or implicitly via ``SERIAL``, is
executed independently beforehand, the returned value to be used in the
subsequent insert. Note that when an
:func:`~sqlalchemy.sql.expression.insert()` construct is executed using
"executemany" semantics, the "last inserted identifier" functionality does not
apply; no RETURNING clause is emitted nor is the sequence pre-executed in this
case.
To force the usage of RETURNING by default off, specify the flag
``implicit_returning=False`` to :func:`.create_engine`.
.. _postgresql_isolation_level:
Transaction Isolation Level
---------------------------
All Postgresql dialects support setting of transaction isolation level
both via a dialect-specific parameter :paramref:`.create_engine.isolation_level`
accepted by :func:`.create_engine`,
as well as the ``isolation_level`` argument as passed to
:meth:`.Connection.execution_options`. When using a non-psycopg2 dialect,
this feature works by issuing the command
``SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL <level>`` for
each new connection.
To set isolation level using :func:`.create_engine`::
engine = create_engine(
"postgresql+pg8000://scott:tiger@localhost/test",
isolation_level="READ UNCOMMITTED"
)
To set using per-connection execution options::
connection = engine.connect()
connection = connection.execution_options(
isolation_level="READ COMMITTED"
)
Valid values for ``isolation_level`` include:
* ``READ COMMITTED``
* ``READ UNCOMMITTED``
* ``REPEATABLE READ``
* ``SERIALIZABLE``
The :mod:`~sqlalchemy.dialects.postgresql.psycopg2` and
:mod:`~sqlalchemy.dialects.postgresql.pg8000` dialects also offer the
special level ``AUTOCOMMIT``.
.. seealso::
:ref:`psycopg2_isolation_level`
:ref:`pg8000_isolation_level`
.. _postgresql_schema_reflection:
Remote-Schema Table Introspection and Postgresql search_path
------------------------------------------------------------
The Postgresql dialect can reflect tables from any schema. The
:paramref:`.Table.schema` argument, or alternatively the
:paramref:`.MetaData.reflect.schema` argument determines which schema will
be searched for the table or tables. The reflected :class:`.Table` objects
will in all cases retain this ``.schema`` attribute as was specified.
However, with regards to tables which these :class:`.Table` objects refer to
via foreign key constraint, a decision must be made as to how the ``.schema``
is represented in those remote tables, in the case where that remote
schema name is also a member of the current
`Postgresql search path
<http://www.postgresql.org/docs/9.0/static/ddl-schemas.html#DDL-SCHEMAS-PATH>`_.
By default, the Postgresql dialect mimics the behavior encouraged by
Postgresql's own ``pg_get_constraintdef()`` builtin procedure. This function
returns a sample definition for a particular foreign key constraint,
omitting the referenced schema name from that definition when the name is
also in the Postgresql schema search path. The interaction below
illustrates this behavior::
test=> CREATE TABLE test_schema.referred(id INTEGER PRIMARY KEY);
CREATE TABLE
test=> CREATE TABLE referring(
test(> id INTEGER PRIMARY KEY,
test(> referred_id INTEGER REFERENCES test_schema.referred(id));
CREATE TABLE
test=> SET search_path TO public, test_schema;
test=> SELECT pg_catalog.pg_get_constraintdef(r.oid, true) FROM
test-> pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n
test-> ON n.oid = c.relnamespace
test-> JOIN pg_catalog.pg_constraint r ON c.oid = r.conrelid
test-> WHERE c.relname='referring' AND r.contype = 'f'
test-> ;
pg_get_constraintdef
---------------------------------------------------
FOREIGN KEY (referred_id) REFERENCES referred(id)
(1 row)
Above, we created a table ``referred`` as a member of the remote schema
``test_schema``, however when we added ``test_schema`` to the
PG ``search_path`` and then asked ``pg_get_constraintdef()`` for the
``FOREIGN KEY`` syntax, ``test_schema`` was not included in the output of
the function.
On the other hand, if we set the search path back to the typical default
of ``public``::
test=> SET search_path TO public;
SET
The same query against ``pg_get_constraintdef()`` now returns the fully
schema-qualified name for us::
test=> SELECT pg_catalog.pg_get_constraintdef(r.oid, true) FROM
test-> pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n
test-> ON n.oid = c.relnamespace
test-> JOIN pg_catalog.pg_constraint r ON c.oid = r.conrelid
test-> WHERE c.relname='referring' AND r.contype = 'f';
pg_get_constraintdef
---------------------------------------------------------------
FOREIGN KEY (referred_id) REFERENCES test_schema.referred(id)
(1 row)
SQLAlchemy will by default use the return value of ``pg_get_constraintdef()``
in order to determine the remote schema name. That is, if our ``search_path``
were set to include ``test_schema``, and we invoked a table
reflection process as follows::
>>> from sqlalchemy import Table, MetaData, create_engine
>>> engine = create_engine("postgresql://scott:tiger@localhost/test")
>>> with engine.connect() as conn:
... conn.execute("SET search_path TO test_schema, public")
... meta = MetaData()
... referring = Table('referring', meta,
... autoload=True, autoload_with=conn)
...
<sqlalchemy.engine.result.ResultProxy object at 0x101612ed0>
The above process would deliver to the :attr:`.MetaData.tables` collection
``referred`` table named **without** the schema::
>>> meta.tables['referred'].schema is None
True
To alter the behavior of reflection such that the referred schema is
maintained regardless of the ``search_path`` setting, use the
``postgresql_ignore_search_path`` option, which can be specified as a
dialect-specific argument to both :class:`.Table` as well as
:meth:`.MetaData.reflect`::
>>> with engine.connect() as conn:
... conn.execute("SET search_path TO test_schema, public")
... meta = MetaData()
... referring = Table('referring', meta, autoload=True,
... autoload_with=conn,
... postgresql_ignore_search_path=True)
...
<sqlalchemy.engine.result.ResultProxy object at 0x1016126d0>
We will now have ``test_schema.referred`` stored as schema-qualified::
>>> meta.tables['test_schema.referred'].schema
'test_schema'
.. sidebar:: Best Practices for Postgresql Schema reflection
The description of Postgresql schema reflection behavior is complex, and
is the product of many years of dealing with widely varied use cases and
user preferences. But in fact, there's no need to understand any of it if
you just stick to the simplest use pattern: leave the ``search_path`` set
to its default of ``public`` only, never refer to the name ``public`` as
an explicit schema name otherwise, and refer to all other schema names
explicitly when building up a :class:`.Table` object. The options
described here are only for those users who can't, or prefer not to, stay
within these guidelines.
Note that **in all cases**, the "default" schema is always reflected as
``None``. The "default" schema on Postgresql is that which is returned by the
Postgresql ``current_schema()`` function. On a typical Postgresql
installation, this is the name ``public``. So a table that refers to another
which is in the ``public`` (i.e. default) schema will always have the
``.schema`` attribute set to ``None``.
.. versionadded:: 0.9.2 Added the ``postgresql_ignore_search_path``
dialect-level option accepted by :class:`.Table` and
:meth:`.MetaData.reflect`.
.. seealso::
`The Schema Search Path
<http://www.postgresql.org/docs/9.0/static/ddl-schemas.html#DDL-SCHEMAS-PATH>`_
- on the Postgresql website.
INSERT/UPDATE...RETURNING
-------------------------
The dialect supports PG 8.2's ``INSERT..RETURNING``, ``UPDATE..RETURNING`` and
``DELETE..RETURNING`` syntaxes. ``INSERT..RETURNING`` is used by default
for single-row INSERT statements in order to fetch newly generated
primary key identifiers. To specify an explicit ``RETURNING`` clause,
use the :meth:`._UpdateBase.returning` method on a per-statement basis::
# INSERT..RETURNING
result = table.insert().returning(table.c.col1, table.c.col2).\\
values(name='foo')
print result.fetchall()
# UPDATE..RETURNING
result = table.update().returning(table.c.col1, table.c.col2).\\
where(table.c.name=='foo').values(name='bar')
print result.fetchall()
# DELETE..RETURNING
result = table.delete().returning(table.c.col1, table.c.col2).\\
where(table.c.name=='foo')
print result.fetchall()
.. _postgresql_match:
Full Text Search
----------------
SQLAlchemy makes available the Postgresql ``@@`` operator via the
:meth:`.ColumnElement.match` method on any textual column expression.
On a Postgresql dialect, an expression like the following::
select([sometable.c.text.match("search string")])
will emit to the database::
SELECT text @@ to_tsquery('search string') FROM table
The Postgresql text search functions such as ``to_tsquery()``
and ``to_tsvector()`` are available
explicitly using the standard :data:`.func` construct. For example::
select([
func.to_tsvector('fat cats ate rats').match('cat & rat')
])
Emits the equivalent of::
SELECT to_tsvector('fat cats ate rats') @@ to_tsquery('cat & rat')
The :class:`.postgresql.TSVECTOR` type can provide for explicit CAST::
from sqlalchemy.dialects.postgresql import TSVECTOR
from sqlalchemy import select, cast
select([cast("some text", TSVECTOR)])
produces a statement equivalent to::
SELECT CAST('some text' AS TSVECTOR) AS anon_1
Full Text Searches in Postgresql are influenced by a combination of: the
PostgresSQL setting of ``default_text_search_config``, the ``regconfig`` used
to build the GIN/GiST indexes, and the ``regconfig`` optionally passed in
during a query.
When performing a Full Text Search against a column that has a GIN or
GiST index that is already pre-computed (which is common on full text
searches) one may need to explicitly pass in a particular PostgresSQL
``regconfig`` value to ensure the query-planner utilizes the index and does
not re-compute the column on demand.
In order to provide for this explicit query planning, or to use different
search strategies, the ``match`` method accepts a ``postgresql_regconfig``
keyword argument::
select([mytable.c.id]).where(
mytable.c.title.match('somestring', postgresql_regconfig='english')
)
Emits the equivalent of::
SELECT mytable.id FROM mytable
WHERE mytable.title @@ to_tsquery('english', 'somestring')
One can also specifically pass in a `'regconfig'` value to the
``to_tsvector()`` command as the initial argument::
select([mytable.c.id]).where(
func.to_tsvector('english', mytable.c.title )\
.match('somestring', postgresql_regconfig='english')
)
produces a statement equivalent to::
SELECT mytable.id FROM mytable
WHERE to_tsvector('english', mytable.title) @@
to_tsquery('english', 'somestring')
It is recommended that you use the ``EXPLAIN ANALYZE...`` tool from
PostgresSQL to ensure that you are generating queries with SQLAlchemy that
take full advantage of any indexes you may have created for full text search.
FROM ONLY ...
------------------------
The dialect supports PostgreSQL's ONLY keyword for targeting only a particular
table in an inheritance hierarchy. This can be used to produce the
``SELECT ... FROM ONLY``, ``UPDATE ONLY ...``, and ``DELETE FROM ONLY ...``
syntaxes. It uses SQLAlchemy's hints mechanism::
# SELECT ... FROM ONLY ...
result = table.select().with_hint(table, 'ONLY', 'postgresql')
print result.fetchall()
# UPDATE ONLY ...
table.update(values=dict(foo='bar')).with_hint('ONLY',
dialect_name='postgresql')
# DELETE FROM ONLY ...
table.delete().with_hint('ONLY', dialect_name='postgresql')
.. _postgresql_indexes:
Postgresql-Specific Index Options
---------------------------------
Several extensions to the :class:`.Index` construct are available, specific
to the PostgreSQL dialect.
Partial Indexes
^^^^^^^^^^^^^^^^
Partial indexes add criterion to the index definition so that the index is
applied to a subset of rows. These can be specified on :class:`.Index`
using the ``postgresql_where`` keyword argument::
Index('my_index', my_table.c.id, postgresql_where=tbl.c.value > 10)
Operator Classes
^^^^^^^^^^^^^^^^^
PostgreSQL allows the specification of an *operator class* for each column of
an index (see
http://www.postgresql.org/docs/8.3/interactive/indexes-opclass.html).
The :class:`.Index` construct allows these to be specified via the
``postgresql_ops`` keyword argument::
Index('my_index', my_table.c.id, my_table.c.data,
postgresql_ops={
'data': 'text_pattern_ops',
'id': 'int4_ops'
})
.. versionadded:: 0.7.2
``postgresql_ops`` keyword argument to :class:`.Index` construct.
Note that the keys in the ``postgresql_ops`` dictionary are the "key" name of
the :class:`.Column`, i.e. the name used to access it from the ``.c``
collection of :class:`.Table`, which can be configured to be different than
the actual name of the column as expressed in the database.
Index Types
^^^^^^^^^^^^
PostgreSQL provides several index types: B-Tree, Hash, GiST, and GIN, as well
as the ability for users to create their own (see
http://www.postgresql.org/docs/8.3/static/indexes-types.html). These can be
specified on :class:`.Index` using the ``postgresql_using`` keyword argument::
Index('my_index', my_table.c.data, postgresql_using='gin')
The value passed to the keyword argument will be simply passed through to the
underlying CREATE INDEX command, so it *must* be a valid index type for your
version of PostgreSQL.
.. _postgresql_index_storage:
Index Storage Parameters
^^^^^^^^^^^^^^^^^^^^^^^^
PostgreSQL allows storage parameters to be set on indexes. The storage
parameters available depend on the index method used by the index. Storage
parameters can be specified on :class:`.Index` using the ``postgresql_with``
keyword argument::
Index('my_index', my_table.c.data, postgresql_with={"fillfactor": 50})
.. versionadded:: 1.0.6
.. _postgresql_index_concurrently:
Indexes with CONCURRENTLY
^^^^^^^^^^^^^^^^^^^^^^^^^
The Postgresql index option CONCURRENTLY is supported by passing the
flag ``postgresql_concurrently`` to the :class:`.Index` construct::
tbl = Table('testtbl', m, Column('data', Integer))
idx1 = Index('test_idx1', tbl.c.data, postgresql_concurrently=True)
The above index construct will render SQL as::
CREATE INDEX CONCURRENTLY test_idx1 ON testtbl (data)
.. versionadded:: 0.9.9
.. _postgresql_index_reflection:
Postgresql Index Reflection
---------------------------
The Postgresql database creates a UNIQUE INDEX implicitly whenever the
UNIQUE CONSTRAINT construct is used. When inspecting a table using
:class:`.Inspector`, the :meth:`.Inspector.get_indexes`
and the :meth:`.Inspector.get_unique_constraints` will report on these
two constructs distinctly; in the case of the index, the key
``duplicates_constraint`` will be present in the index entry if it is
detected as mirroring a constraint. When performing reflection using
``Table(..., autoload=True)``, the UNIQUE INDEX is **not** returned
in :attr:`.Table.indexes` when it is detected as mirroring a
:class:`.UniqueConstraint` in the :attr:`.Table.constraints` collection.
.. versionchanged:: 1.0.0 - :class:`.Table` reflection now includes
:class:`.UniqueConstraint` objects present in the :attr:`.Table.constraints`
collection; the Postgresql backend will no longer include a "mirrored"
:class:`.Index` construct in :attr:`.Table.indexes` if it is detected
as corresponding to a unique constraint.
Special Reflection Options
--------------------------
The :class:`.Inspector` used for the Postgresql backend is an instance
of :class:`.PGInspector`, which offers additional methods::
from sqlalchemy import create_engine, inspect
engine = create_engine("postgresql+psycopg2://localhost/test")
insp = inspect(engine) # will be a PGInspector
print(insp.get_enums())
.. autoclass:: PGInspector
:members:
.. _postgresql_table_options:
PostgreSQL Table Options
-------------------------
Several options for CREATE TABLE are supported directly by the PostgreSQL
dialect in conjunction with the :class:`.Table` construct:
* ``TABLESPACE``::
Table("some_table", metadata, ..., postgresql_tablespace='some_tablespace')
* ``ON COMMIT``::
Table("some_table", metadata, ..., postgresql_on_commit='PRESERVE ROWS')
* ``WITH OIDS``::
Table("some_table", metadata, ..., postgresql_with_oids=True)
* ``WITHOUT OIDS``::
Table("some_table", metadata, ..., postgresql_with_oids=False)
* ``INHERITS``::
Table("some_table", metadata, ..., postgresql_inherits="some_supertable")
Table("some_table", metadata, ..., postgresql_inherits=("t1", "t2", ...))
.. versionadded:: 1.0.0
.. seealso::
`Postgresql CREATE TABLE options
<http://www.postgresql.org/docs/9.3/static/sql-createtable.html>`_
ARRAY Types
-----------
The Postgresql dialect supports arrays, both as multidimensional column types
as well as array literals:
* :class:`.postgresql.ARRAY` - ARRAY datatype
* :class:`.postgresql.array` - array literal
JSON Types
----------
The Postgresql dialect supports both JSON and JSONB datatypes, including
psycopg2's native support and support for all of Postgresql's special
operators:
* :class:`.postgresql.JSON`
* :class:`.postgresql.JSONB`
HSTORE Type
-----------
The Postgresql HSTORE type as well as hstore literals are supported:
* :class:`.postgresql.HSTORE` - HSTORE datatype
* :class:`.postgresql.hstore` - hstore literal
ENUM Types
----------
Postgresql has an independently creatable TYPE structure which is used
to implement an enumerated type. This approach introduces significant
complexity on the SQLAlchemy side in terms of when this type should be
CREATED and DROPPED. The type object is also an independently reflectable
entity. The following sections should be consulted:
* :class:`.postgresql.ENUM` - DDL and typing support for ENUM.
* :meth:`.PGInspector.get_enums` - retrieve a listing of current ENUM types
* :meth:`.postgresql.ENUM.create` , :meth:`.postgresql.ENUM.drop` - individual
CREATE and DROP commands for ENUM.
.. _postgresql_array_of_enum:
Using ENUM with ARRAY
^^^^^^^^^^^^^^^^^^^^^
The combination of ENUM and ARRAY is not directly supported by backend
DBAPIs at this time. In order to send and receive an ARRAY of ENUM,
use the following workaround type::
class ArrayOfEnum(ARRAY):
def bind_expression(self, bindvalue):
return sa.cast(bindvalue, self)
def result_processor(self, dialect, coltype):
super_rp = super(ArrayOfEnum, self).result_processor(
dialect, coltype)
def handle_raw_string(value):
inner = re.match(r"^{(.*)}$", value).group(1)
return inner.split(",")
def process(value):
if value is None:
return None
return super_rp(handle_raw_string(value))
return process
E.g.::
Table(
'mydata', metadata,
Column('id', Integer, primary_key=True),
Column('data', ArrayOfEnum(ENUM('a', 'b, 'c', name='myenum')))
)
This type is not included as a built-in type as it would be incompatible
with a DBAPI that suddenly decides to support ARRAY of ENUM directly in
a new version.
"""
from collections import defaultdict
import re
from ... import sql, schema, exc, util
from ...engine import default, reflection
from ...sql import compiler, expression
from ... import types as sqltypes
try:
from uuid import UUID as _python_UUID
except ImportError:
_python_UUID = None
from sqlalchemy.types import INTEGER, BIGINT, SMALLINT, VARCHAR, \
CHAR, TEXT, FLOAT, NUMERIC, \
DATE, BOOLEAN, REAL
RESERVED_WORDS = set(
["all", "analyse", "analyze", "and", "any", "array", "as", "asc",
"asymmetric", "both", "case", "cast", "check", "collate", "column",
"constraint", "create", "current_catalog", "current_date",
"current_role", "current_time", "current_timestamp", "current_user",
"default", "deferrable", "desc", "distinct", "do", "else", "end",
"except", "false", "fetch", "for", "foreign", "from", "grant", "group",
"having", "in", "initially", "intersect", "into", "leading", "limit",
"localtime", "localtimestamp", "new", "not", "null", "of", "off",
"offset", "old", "on", "only", "or", "order", "placing", "primary",
"references", "returning", "select", "session_user", "some", "symmetric",
"table", "then", "to", "trailing", "true", "union", "unique", "user",
"using", "variadic", "when", "where", "window", "with", "authorization",
"between", "binary", "cross", "current_schema", "freeze", "full",
"ilike", "inner", "is", "isnull", "join", "left", "like", "natural",
"notnull", "outer", "over", "overlaps", "right", "similar", "verbose"
])
_DECIMAL_TYPES = (1231, 1700)
_FLOAT_TYPES = (700, 701, 1021, 1022)
_INT_TYPES = (20, 21, 23, 26, 1005, 1007, 1016)
class BYTEA(sqltypes.LargeBinary):
__visit_name__ = 'BYTEA'
class DOUBLE_PRECISION(sqltypes.Float):
__visit_name__ = 'DOUBLE_PRECISION'
class INET(sqltypes.TypeEngine):
__visit_name__ = "INET"
PGInet = INET
class CIDR(sqltypes.TypeEngine):
__visit_name__ = "CIDR"
PGCidr = CIDR
class MACADDR(sqltypes.TypeEngine):
__visit_name__ = "MACADDR"
PGMacAddr = MACADDR
class OID(sqltypes.TypeEngine):
"""Provide the Postgresql OID type.
.. versionadded:: 0.9.5
"""
__visit_name__ = "OID"
class TIMESTAMP(sqltypes.TIMESTAMP):
def __init__(self, timezone=False, precision=None):
super(TIMESTAMP, self).__init__(timezone=timezone)
self.precision = precision
class TIME(sqltypes.TIME):
def __init__(self, timezone=False, precision=None):
super(TIME, self).__init__(timezone=timezone)
self.precision = precision
class INTERVAL(sqltypes.TypeEngine):
"""Postgresql INTERVAL type.
The INTERVAL type may not be supported on all DBAPIs.
It is known to work on psycopg2 and not pg8000 or zxjdbc.
"""
__visit_name__ = 'INTERVAL'
def __init__(self, precision=None):
self.precision = precision
@classmethod
def _adapt_from_generic_interval(cls, interval):
return INTERVAL(precision=interval.second_precision)
@property
def _type_affinity(self):
return sqltypes.Interval
PGInterval = INTERVAL
class BIT(sqltypes.TypeEngine):
__visit_name__ = 'BIT'
def __init__(self, length=None, varying=False):
if not varying:
# BIT without VARYING defaults to length 1
self.length = length or 1
else:
# but BIT VARYING can be unlimited-length, so no default
self.length = length
self.varying = varying
PGBit = BIT
class UUID(sqltypes.TypeEngine):
"""Postgresql UUID type.
Represents the UUID column type, interpreting
data either as natively returned by the DBAPI
or as Python uuid objects.
The UUID type may not be supported on all DBAPIs.
It is known to work on psycopg2 and not pg8000.
"""
__visit_name__ = 'UUID'
def __init__(self, as_uuid=False):
"""Construct a UUID type.
:param as_uuid=False: if True, values will be interpreted
as Python uuid objects, converting to/from string via the
DBAPI.
"""
if as_uuid and _python_UUID is None:
raise NotImplementedError(
"This version of Python does not support "
"the native UUID type."
)
self.as_uuid = as_uuid
def bind_processor(self, dialect):
if self.as_uuid:
def process(value):
if value is not None:
value = util.text_type(value)
return value
return process
else:
return None
def result_processor(self, dialect, coltype):
if self.as_uuid:
def process(value):
if value is not None:
value = _python_UUID(value)
return value
return process
else:
return None
PGUuid = UUID
class TSVECTOR(sqltypes.TypeEngine):
"""The :class:`.postgresql.TSVECTOR` type implements the Postgresql
text search type TSVECTOR.
It can be used to do full text queries on natural language
documents.
.. versionadded:: 0.9.0
.. seealso::
:ref:`postgresql_match`
"""
__visit_name__ = 'TSVECTOR'
class ENUM(sqltypes.Enum):
"""Postgresql ENUM type.
This is a subclass of :class:`.types.Enum` which includes
support for PG's ``CREATE TYPE`` and ``DROP TYPE``.
When the builtin type :class:`.types.Enum` is used and the
:paramref:`.Enum.native_enum` flag is left at its default of
True, the Postgresql backend will use a :class:`.postgresql.ENUM`
type as the implementation, so the special create/drop rules
will be used.
The create/drop behavior of ENUM is necessarily intricate, due to the
awkward relationship the ENUM type has in relationship to the
parent table, in that it may be "owned" by just a single table, or
may be shared among many tables.
When using :class:`.types.Enum` or :class:`.postgresql.ENUM`
in an "inline" fashion, the ``CREATE TYPE`` and ``DROP TYPE`` is emitted
corresponding to when the :meth:`.Table.create` and :meth:`.Table.drop`
methods are called::
table = Table('sometable', metadata,
Column('some_enum', ENUM('a', 'b', 'c', name='myenum'))
)
table.create(engine) # will emit CREATE ENUM and CREATE TABLE
table.drop(engine) # will emit DROP TABLE and DROP ENUM
To use a common enumerated type between multiple tables, the best
practice is to declare the :class:`.types.Enum` or
:class:`.postgresql.ENUM` independently, and associate it with the
:class:`.MetaData` object itself::
my_enum = ENUM('a', 'b', 'c', name='myenum', metadata=metadata)
t1 = Table('sometable_one', metadata,
Column('some_enum', myenum)
)
t2 = Table('sometable_two', metadata,
Column('some_enum', myenum)
)
When this pattern is used, care must still be taken at the level
of individual table creates. Emitting CREATE TABLE without also
specifying ``checkfirst=True`` will still cause issues::
t1.create(engine) # will fail: no such type 'myenum'
If we specify ``checkfirst=True``, the individual table-level create
operation will check for the ``ENUM`` and create if not exists::
# will check if enum exists, and emit CREATE TYPE if not
t1.create(engine, checkfirst=True)
When using a metadata-level ENUM type, the type will always be created
and dropped if either the metadata-wide create/drop is called::
metadata.create_all(engine) # will emit CREATE TYPE
metadata.drop_all(engine) # will emit DROP TYPE
The type can also be created and dropped directly::
my_enum.create(engine)
my_enum.drop(engine)
.. versionchanged:: 1.0.0 The Postgresql :class:`.postgresql.ENUM` type
now behaves more strictly with regards to CREATE/DROP. A metadata-level
ENUM type will only be created and dropped at the metadata level,
not the table level, with the exception of
``table.create(checkfirst=True)``.
The ``table.drop()`` call will now emit a DROP TYPE for a table-level
enumerated type.
"""
def __init__(self, *enums, **kw):
"""Construct an :class:`~.postgresql.ENUM`.
Arguments are the same as that of
:class:`.types.Enum`, but also including
the following parameters.
:param create_type: Defaults to True.
Indicates that ``CREATE TYPE`` should be
emitted, after optionally checking for the
presence of the type, when the parent
table is being created; and additionally
that ``DROP TYPE`` is called when the table
is dropped. When ``False``, no check
will be performed and no ``CREATE TYPE``
or ``DROP TYPE`` is emitted, unless
:meth:`~.postgresql.ENUM.create`
or :meth:`~.postgresql.ENUM.drop`
are called directly.
Setting to ``False`` is helpful
when invoking a creation scheme to a SQL file
without access to the actual database -
the :meth:`~.postgresql.ENUM.create` and
:meth:`~.postgresql.ENUM.drop` methods can
be used to emit SQL to a target bind.
.. versionadded:: 0.7.4
"""
self.create_type = kw.pop("create_type", True)
super(ENUM, self).__init__(*enums, **kw)
def create(self, bind=None, checkfirst=True):
"""Emit ``CREATE TYPE`` for this
:class:`~.postgresql.ENUM`.
If the underlying dialect does not support
Postgresql CREATE TYPE, no action is taken.
:param bind: a connectable :class:`.Engine`,
:class:`.Connection`, or similar object to emit
SQL.
:param checkfirst: if ``True``, a query against
the PG catalog will be first performed to see
if the type does not exist already before
creating.
"""
if not bind.dialect.supports_native_enum:
return
if not checkfirst or \
not bind.dialect.has_type(
bind, self.name, schema=self.schema):
bind.execute(CreateEnumType(self))
def drop(self, bind=None, checkfirst=True):
"""Emit ``DROP TYPE`` for this
:class:`~.postgresql.ENUM`.
If the underlying dialect does not support
Postgresql DROP TYPE, no action is taken.
:param bind: a connectable :class:`.Engine`,
:class:`.Connection`, or similar object to emit
SQL.
:param checkfirst: if ``True``, a query against
the PG catalog will be first performed to see
if the type actually exists before dropping.
"""
if not bind.dialect.supports_native_enum:
return
if not checkfirst or \
bind.dialect.has_type(bind, self.name, schema=self.schema):
bind.execute(DropEnumType(self))
def _check_for_name_in_memos(self, checkfirst, kw):
"""Look in the 'ddl runner' for 'memos', then
note our name in that collection.
This to ensure a particular named enum is operated
upon only once within any kind of create/drop
sequence without relying upon "checkfirst".
"""
if not self.create_type:
return True
if '_ddl_runner' in kw:
ddl_runner = kw['_ddl_runner']
if '_pg_enums' in ddl_runner.memo:
pg_enums = ddl_runner.memo['_pg_enums']
else:
pg_enums = ddl_runner.memo['_pg_enums'] = set()
present = self.name in pg_enums
pg_enums.add(self.name)
return present
else:
return False
def _on_table_create(self, target, bind, checkfirst, **kw):
if checkfirst or (
not self.metadata and
not kw.get('_is_metadata_operation', False)) and \
not self._check_for_name_in_memos(checkfirst, kw):
self.create(bind=bind, checkfirst=checkfirst)
def _on_table_drop(self, target, bind, checkfirst, **kw):
if not self.metadata and \
not kw.get('_is_metadata_operation', False) and \
not self._check_for_name_in_memos(checkfirst, kw):
self.drop(bind=bind, checkfirst=checkfirst)
def _on_metadata_create(self, target, bind, checkfirst, **kw):
if not self._check_for_name_in_memos(checkfirst, kw):
self.create(bind=bind, checkfirst=checkfirst)
def _on_metadata_drop(self, target, bind, checkfirst, **kw):
if not self._check_for_name_in_memos(checkfirst, kw):
self.drop(bind=bind, checkfirst=checkfirst)
colspecs = {
sqltypes.Interval: INTERVAL,
sqltypes.Enum: ENUM,
}
ischema_names = {
'integer': INTEGER,
'bigint': BIGINT,
'smallint': SMALLINT,
'character varying': VARCHAR,
'character': CHAR,
'"char"': sqltypes.String,
'name': sqltypes.String,
'text': TEXT,
'numeric': NUMERIC,
'float': FLOAT,
'real': REAL,
'inet': INET,
'cidr': CIDR,
'uuid': UUID,
'bit': BIT,
'bit varying': BIT,
'macaddr': MACADDR,
'oid': OID,
'double precision': DOUBLE_PRECISION,
'timestamp': TIMESTAMP,
'timestamp with time zone': TIMESTAMP,
'timestamp without time zone': TIMESTAMP,
'time with time zone': TIME,
'time without time zone': TIME,
'date': DATE,
'time': TIME,
'bytea': BYTEA,
'boolean': BOOLEAN,
'interval': INTERVAL,
'interval year to month': INTERVAL,
'interval day to second': INTERVAL,
'tsvector': TSVECTOR
}
class PGCompiler(compiler.SQLCompiler):
def visit_array(self, element, **kw):
return "ARRAY[%s]" % self.visit_clauselist(element, **kw)
def visit_slice(self, element, **kw):
return "%s:%s" % (
self.process(element.start, **kw),
self.process(element.stop, **kw),
)
def visit_any(self, element, **kw):
return "%s%sANY (%s)" % (
self.process(element.left, **kw),
compiler.OPERATORS[element.operator],
self.process(element.right, **kw)
)
def visit_all(self, element, **kw):
return "%s%sALL (%s)" % (
self.process(element.left, **kw),
compiler.OPERATORS[element.operator],
self.process(element.right, **kw)
)
def visit_getitem_binary(self, binary, operator, **kw):
return "%s[%s]" % (
self.process(binary.left, **kw),
self.process(binary.right, **kw)
)
def visit_match_op_binary(self, binary, operator, **kw):
if "postgresql_regconfig" in binary.modifiers:
regconfig = self.render_literal_value(
binary.modifiers['postgresql_regconfig'],
sqltypes.STRINGTYPE)
if regconfig:
return "%s @@ to_tsquery(%s, %s)" % (
self.process(binary.left, **kw),
regconfig,
self.process(binary.right, **kw)
)
return "%s @@ to_tsquery(%s)" % (
self.process(binary.left, **kw),
self.process(binary.right, **kw)
)
def visit_ilike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
return '%s ILIKE %s' % \
(self.process(binary.left, **kw),
self.process(binary.right, **kw)) \
+ (
' ESCAPE ' +
self.render_literal_value(escape, sqltypes.STRINGTYPE)
if escape else ''
)
def visit_notilike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
return '%s NOT ILIKE %s' % \
(self.process(binary.left, **kw),
self.process(binary.right, **kw)) \
+ (
' ESCAPE ' +
self.render_literal_value(escape, sqltypes.STRINGTYPE)
if escape else ''
)
def render_literal_value(self, value, type_):
value = super(PGCompiler, self).render_literal_value(value, type_)
if self.dialect._backslash_escapes:
value = value.replace('\\', '\\\\')
return value
def visit_sequence(self, seq):
return "nextval('%s')" % self.preparer.format_sequence(seq)
def limit_clause(self, select, **kw):
text = ""
if select._limit_clause is not None:
text += " \n LIMIT " + self.process(select._limit_clause, **kw)
if select._offset_clause is not None:
if select._limit_clause is None:
text += " \n LIMIT ALL"
text += " OFFSET " + self.process(select._offset_clause, **kw)
return text
def format_from_hint_text(self, sqltext, table, hint, iscrud):
if hint.upper() != 'ONLY':
raise exc.CompileError("Unrecognized hint: %r" % hint)
return "ONLY " + sqltext
def get_select_precolumns(self, select, **kw):
if select._distinct is not False:
if select._distinct is True:
return "DISTINCT "
elif isinstance(select._distinct, (list, tuple)):
return "DISTINCT ON (" + ', '.join(
[self.process(col) for col in select._distinct]
) + ") "
else:
return "DISTINCT ON (" + \
self.process(select._distinct, **kw) + ") "
else:
return ""
def for_update_clause(self, select, **kw):
if select._for_update_arg.read:
tmp = " FOR SHARE"
else:
tmp = " FOR UPDATE"
if select._for_update_arg.of:
tables = util.OrderedSet(
c.table if isinstance(c, expression.ColumnClause)
else c for c in select._for_update_arg.of)
tmp += " OF " + ", ".join(
self.process(table, ashint=True, **kw)
for table in tables
)
if select._for_update_arg.nowait:
tmp += " NOWAIT"
return tmp
def returning_clause(self, stmt, returning_cols):
columns = [
self._label_select_column(None, c, True, False, {})
for c in expression._select_iterables(returning_cols)
]
return 'RETURNING ' + ', '.join(columns)
def visit_substring_func(self, func, **kw):
s = self.process(func.clauses.clauses[0], **kw)
start = self.process(func.clauses.clauses[1], **kw)
if len(func.clauses.clauses) > 2:
length = self.process(func.clauses.clauses[2], **kw)
return "SUBSTRING(%s FROM %s FOR %s)" % (s, start, length)
else:
return "SUBSTRING(%s FROM %s)" % (s, start)
class PGDDLCompiler(compiler.DDLCompiler):
def get_column_specification(self, column, **kwargs):
colspec = self.preparer.format_column(column)
impl_type = column.type.dialect_impl(self.dialect)
if column.primary_key and \
column is column.table._autoincrement_column and \
(
self.dialect.supports_smallserial or
not isinstance(impl_type, sqltypes.SmallInteger)
) and (
column.default is None or
(
isinstance(column.default, schema.Sequence) and
column.default.optional
)):
if isinstance(impl_type, sqltypes.BigInteger):
colspec += " BIGSERIAL"
elif isinstance(impl_type, sqltypes.SmallInteger):
colspec += " SMALLSERIAL"
else:
colspec += " SERIAL"
else:
colspec += " " + self.dialect.type_compiler.process(
column.type, type_expression=column)
default = self.get_column_default_string(column)
if default is not None:
colspec += " DEFAULT " + default
if not column.nullable:
colspec += " NOT NULL"
return colspec
def visit_create_enum_type(self, create):
type_ = create.element
return "CREATE TYPE %s AS ENUM (%s)" % (
self.preparer.format_type(type_),
", ".join(
self.sql_compiler.process(sql.literal(e), literal_binds=True)
for e in type_.enums)
)
def visit_drop_enum_type(self, drop):
type_ = drop.element
return "DROP TYPE %s" % (
self.preparer.format_type(type_)
)
def visit_create_index(self, create):
preparer = self.preparer
index = create.element
self._verify_index_table(index)
text = "CREATE "
if index.unique:
text += "UNIQUE "
text += "INDEX "
concurrently = index.dialect_options['postgresql']['concurrently']
if concurrently:
text += "CONCURRENTLY "
text += "%s ON %s " % (
self._prepared_index_name(index,
include_schema=False),
preparer.format_table(index.table)
)
using = index.dialect_options['postgresql']['using']
if using:
text += "USING %s " % preparer.quote(using)
ops = index.dialect_options["postgresql"]["ops"]
text += "(%s)" \
% (
', '.join([
self.sql_compiler.process(
expr.self_group()
if not isinstance(expr, expression.ColumnClause)
else expr,
include_table=False, literal_binds=True) +
(
(' ' + ops[expr.key])
if hasattr(expr, 'key')
and expr.key in ops else ''
)
for expr in index.expressions
])
)
withclause = index.dialect_options['postgresql']['with']
if withclause:
text += " WITH (%s)" % (', '.join(
['%s = %s' % storage_parameter
for storage_parameter in withclause.items()]))
whereclause = index.dialect_options["postgresql"]["where"]
if whereclause is not None:
where_compiled = self.sql_compiler.process(
whereclause, include_table=False,
literal_binds=True)
text += " WHERE " + where_compiled
return text
def visit_exclude_constraint(self, constraint, **kw):
text = ""
if constraint.name is not None:
text += "CONSTRAINT %s " % \
self.preparer.format_constraint(constraint)
elements = []
for expr, name, op in constraint._render_exprs:
kw['include_table'] = False
elements.append(
"%s WITH %s" % (self.sql_compiler.process(expr, **kw), op)
)
text += "EXCLUDE USING %s (%s)" % (constraint.using,
', '.join(elements))
if constraint.where is not None:
text += ' WHERE (%s)' % self.sql_compiler.process(
constraint.where,
literal_binds=True)
text += self.define_constraint_deferrability(constraint)
return text
def post_create_table(self, table):
table_opts = []
pg_opts = table.dialect_options['postgresql']
inherits = pg_opts.get('inherits')
if inherits is not None:
if not isinstance(inherits, (list, tuple)):
inherits = (inherits, )
table_opts.append(
'\n INHERITS ( ' +
', '.join(self.preparer.quote(name) for name in inherits) +
' )')
if pg_opts['with_oids'] is True:
table_opts.append('\n WITH OIDS')
elif pg_opts['with_oids'] is False:
table_opts.append('\n WITHOUT OIDS')
if pg_opts['on_commit']:
on_commit_options = pg_opts['on_commit'].replace("_", " ").upper()
table_opts.append('\n ON COMMIT %s' % on_commit_options)
if pg_opts['tablespace']:
tablespace_name = pg_opts['tablespace']
table_opts.append(
'\n TABLESPACE %s' % self.preparer.quote(tablespace_name)
)
return ''.join(table_opts)
class PGTypeCompiler(compiler.GenericTypeCompiler):
def visit_TSVECTOR(self, type, **kw):
return "TSVECTOR"
def visit_INET(self, type_, **kw):
return "INET"
def visit_CIDR(self, type_, **kw):
return "CIDR"
def visit_MACADDR(self, type_, **kw):
return "MACADDR"
def visit_OID(self, type_, **kw):
return "OID"
def visit_FLOAT(self, type_, **kw):
if not type_.precision:
return "FLOAT"
else:
return "FLOAT(%(precision)s)" % {'precision': type_.precision}
def visit_DOUBLE_PRECISION(self, type_, **kw):
return "DOUBLE PRECISION"
def visit_BIGINT(self, type_, **kw):
return "BIGINT"
def visit_HSTORE(self, type_, **kw):
return "HSTORE"
def visit_JSON(self, type_, **kw):
return "JSON"
def visit_JSONB(self, type_, **kw):
return "JSONB"
def visit_INT4RANGE(self, type_, **kw):
return "INT4RANGE"
def visit_INT8RANGE(self, type_, **kw):
return "INT8RANGE"
def visit_NUMRANGE(self, type_, **kw):
return "NUMRANGE"
def visit_DATERANGE(self, type_, **kw):
return "DATERANGE"
def visit_TSRANGE(self, type_, **kw):
return "TSRANGE"
def visit_TSTZRANGE(self, type_, **kw):
return "TSTZRANGE"
def visit_datetime(self, type_, **kw):
return self.visit_TIMESTAMP(type_, **kw)
def visit_enum(self, type_, **kw):
if not type_.native_enum or not self.dialect.supports_native_enum:
return super(PGTypeCompiler, self).visit_enum(type_, **kw)
else:
return self.visit_ENUM(type_, **kw)
def visit_ENUM(self, type_, **kw):
return self.dialect.identifier_preparer.format_type(type_)
def visit_TIMESTAMP(self, type_, **kw):
return "TIMESTAMP%s %s" % (
getattr(type_, 'precision', None) and "(%d)" %
type_.precision or "",
(type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE"
)
def visit_TIME(self, type_, **kw):
return "TIME%s %s" % (
getattr(type_, 'precision', None) and "(%d)" %
type_.precision or "",
(type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE"
)
def visit_INTERVAL(self, type_, **kw):
if type_.precision is not None:
return "INTERVAL(%d)" % type_.precision
else:
return "INTERVAL"
def visit_BIT(self, type_, **kw):
if type_.varying:
compiled = "BIT VARYING"
if type_.length is not None:
compiled += "(%d)" % type_.length
else:
compiled = "BIT(%d)" % type_.length
return compiled
def visit_UUID(self, type_, **kw):
return "UUID"
def visit_large_binary(self, type_, **kw):
return self.visit_BYTEA(type_, **kw)
def visit_BYTEA(self, type_, **kw):
return "BYTEA"
def visit_ARRAY(self, type_, **kw):
return self.process(type_.item_type) + ('[]' * (type_.dimensions
if type_.dimensions
is not None else 1))
class PGIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = RESERVED_WORDS
def _unquote_identifier(self, value):
if value[0] == self.initial_quote:
value = value[1:-1].\
replace(self.escape_to_quote, self.escape_quote)
return value
def format_type(self, type_, use_schema=True):
if not type_.name:
raise exc.CompileError("Postgresql ENUM type requires a name.")
name = self.quote(type_.name)
if not self.omit_schema and use_schema and type_.schema is not None:
name = self.quote_schema(type_.schema) + "." + name
return name
class PGInspector(reflection.Inspector):
def __init__(self, conn):
reflection.Inspector.__init__(self, conn)
def get_table_oid(self, table_name, schema=None):
"""Return the OID for the given table name."""
return self.dialect.get_table_oid(self.bind, table_name, schema,
info_cache=self.info_cache)
def get_enums(self, schema=None):
"""Return a list of ENUM objects.
Each member is a dictionary containing these fields:
* name - name of the enum
* schema - the schema name for the enum.
* visible - boolean, whether or not this enum is visible
in the default search path.
* labels - a list of string labels that apply to the enum.
:param schema: schema name. If None, the default schema
(typically 'public') is used. May also be set to '*' to
indicate load enums for all schemas.
.. versionadded:: 1.0.0
"""
schema = schema or self.default_schema_name
return self.dialect._load_enums(self.bind, schema)
def get_foreign_table_names(self, schema=None):
"""Return a list of FOREIGN TABLE names.
Behavior is similar to that of :meth:`.Inspector.get_table_names`,
except that the list is limited to those tables tha report a
``relkind`` value of ``f``.
.. versionadded:: 1.0.0
"""
schema = schema or self.default_schema_name
return self.dialect._get_foreign_table_names(self.bind, schema)
class CreateEnumType(schema._CreateDropBase):
__visit_name__ = "create_enum_type"
class DropEnumType(schema._CreateDropBase):
__visit_name__ = "drop_enum_type"
class PGExecutionContext(default.DefaultExecutionContext):
def fire_sequence(self, seq, type_):
return self._execute_scalar((
"select nextval('%s')" %
self.dialect.identifier_preparer.format_sequence(seq)), type_)
def get_insert_default(self, column):
if column.primary_key and \
column is column.table._autoincrement_column:
if column.server_default and column.server_default.has_argument:
# pre-execute passive defaults on primary key columns
return self._execute_scalar("select %s" %
column.server_default.arg,
column.type)
elif (column.default is None or
(column.default.is_sequence and
column.default.optional)):
# execute the sequence associated with a SERIAL primary
# key column. for non-primary-key SERIAL, the ID just
# generates server side.
try:
seq_name = column._postgresql_seq_name
except AttributeError:
tab = column.table.name
col = column.name
tab = tab[0:29 + max(0, (29 - len(col)))]
col = col[0:29 + max(0, (29 - len(tab)))]
name = "%s_%s_seq" % (tab, col)
column._postgresql_seq_name = seq_name = name
sch = column.table.schema
if sch is not None:
exc = "select nextval('\"%s\".\"%s\"')" % \
(sch, seq_name)
else:
exc = "select nextval('\"%s\"')" % \
(seq_name, )
return self._execute_scalar(exc, column.type)
return super(PGExecutionContext, self).get_insert_default(column)
class PGDialect(default.DefaultDialect):
name = 'postgresql'
supports_alter = True
max_identifier_length = 63
supports_sane_rowcount = True
supports_native_enum = True
supports_native_boolean = True
supports_smallserial = True
supports_sequences = True
sequences_optional = True
preexecute_autoincrement_sequences = True
postfetch_lastrowid = False
supports_default_values = True
supports_empty_insert = False
supports_multivalues_insert = True
default_paramstyle = 'pyformat'
ischema_names = ischema_names
colspecs = colspecs
statement_compiler = PGCompiler
ddl_compiler = PGDDLCompiler
type_compiler = PGTypeCompiler
preparer = PGIdentifierPreparer
execution_ctx_cls = PGExecutionContext
inspector = PGInspector
isolation_level = None
construct_arguments = [
(schema.Index, {
"using": False,
"where": None,
"ops": {},
"concurrently": False,
"with": {}
}),
(schema.Table, {
"ignore_search_path": False,
"tablespace": None,
"with_oids": None,
"on_commit": None,
"inherits": None
})
]
reflection_options = ('postgresql_ignore_search_path', )
_backslash_escapes = True
def __init__(self, isolation_level=None, json_serializer=None,
json_deserializer=None, **kwargs):
default.DefaultDialect.__init__(self, **kwargs)
self.isolation_level = isolation_level
self._json_deserializer = json_deserializer
self._json_serializer = json_serializer
def initialize(self, connection):
super(PGDialect, self).initialize(connection)
self.implicit_returning = self.server_version_info > (8, 2) and \
self.__dict__.get('implicit_returning', True)
self.supports_native_enum = self.server_version_info >= (8, 3)
if not self.supports_native_enum:
self.colspecs = self.colspecs.copy()
# pop base Enum type
self.colspecs.pop(sqltypes.Enum, None)
# psycopg2, others may have placed ENUM here as well
self.colspecs.pop(ENUM, None)
# http://www.postgresql.org/docs/9.3/static/release-9-2.html#AEN116689
self.supports_smallserial = self.server_version_info >= (9, 2)
self._backslash_escapes = self.server_version_info < (8, 2) or \
connection.scalar(
"show standard_conforming_strings"
) == 'off'
def on_connect(self):
if self.isolation_level is not None:
def connect(conn):
self.set_isolation_level(conn, self.isolation_level)
return connect
else:
return None
_isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED',
'READ COMMITTED', 'REPEATABLE READ'])
def set_isolation_level(self, connection, level):
level = level.replace('_', ' ')
if level not in self._isolation_lookup:
raise exc.ArgumentError(
"Invalid value '%s' for isolation_level. "
"Valid isolation levels for %s are %s" %
(level, self.name, ", ".join(self._isolation_lookup))
)
cursor = connection.cursor()
cursor.execute(
"SET SESSION CHARACTERISTICS AS TRANSACTION "
"ISOLATION LEVEL %s" % level)
cursor.execute("COMMIT")
cursor.close()
def get_isolation_level(self, connection):
cursor = connection.cursor()
cursor.execute('show transaction isolation level')
val = cursor.fetchone()[0]
cursor.close()
return val.upper()
def do_begin_twophase(self, connection, xid):
self.do_begin(connection.connection)
def do_prepare_twophase(self, connection, xid):
connection.execute("PREPARE TRANSACTION '%s'" % xid)
def do_rollback_twophase(self, connection, xid,
is_prepared=True, recover=False):
if is_prepared:
if recover:
# FIXME: ugly hack to get out of transaction
# context when committing recoverable transactions
# Must find out a way how to make the dbapi not
# open a transaction.
connection.execute("ROLLBACK")
connection.execute("ROLLBACK PREPARED '%s'" % xid)
connection.execute("BEGIN")
self.do_rollback(connection.connection)
else:
self.do_rollback(connection.connection)
def do_commit_twophase(self, connection, xid,
is_prepared=True, recover=False):
if is_prepared:
if recover:
connection.execute("ROLLBACK")
connection.execute("COMMIT PREPARED '%s'" % xid)
connection.execute("BEGIN")
self.do_rollback(connection.connection)
else:
self.do_commit(connection.connection)
def do_recover_twophase(self, connection):
resultset = connection.execute(
sql.text("SELECT gid FROM pg_prepared_xacts"))
return [row[0] for row in resultset]
def _get_default_schema_name(self, connection):
return connection.scalar("select current_schema()")
def has_schema(self, connection, schema):
query = ("select nspname from pg_namespace "
"where lower(nspname)=:schema")
cursor = connection.execute(
sql.text(
query,
bindparams=[
sql.bindparam(
'schema', util.text_type(schema.lower()),
type_=sqltypes.Unicode)]
)
)
return bool(cursor.first())
def has_table(self, connection, table_name, schema=None):
# seems like case gets folded in pg_class...
if schema is None:
cursor = connection.execute(
sql.text(
"select relname from pg_class c join pg_namespace n on "
"n.oid=c.relnamespace where "
"pg_catalog.pg_table_is_visible(c.oid) "
"and relname=:name",
bindparams=[
sql.bindparam('name', util.text_type(table_name),
type_=sqltypes.Unicode)]
)
)
else:
cursor = connection.execute(
sql.text(
"select relname from pg_class c join pg_namespace n on "
"n.oid=c.relnamespace where n.nspname=:schema and "
"relname=:name",
bindparams=[
sql.bindparam('name',
util.text_type(table_name),
type_=sqltypes.Unicode),
sql.bindparam('schema',
util.text_type(schema),
type_=sqltypes.Unicode)]
)
)
return bool(cursor.first())
def has_sequence(self, connection, sequence_name, schema=None):
if schema is None:
cursor = connection.execute(
sql.text(
"SELECT relname FROM pg_class c join pg_namespace n on "
"n.oid=c.relnamespace where relkind='S' and "
"n.nspname=current_schema() "
"and relname=:name",
bindparams=[
sql.bindparam('name', util.text_type(sequence_name),
type_=sqltypes.Unicode)
]
)
)
else:
cursor = connection.execute(
sql.text(
"SELECT relname FROM pg_class c join pg_namespace n on "
"n.oid=c.relnamespace where relkind='S' and "
"n.nspname=:schema and relname=:name",
bindparams=[
sql.bindparam('name', util.text_type(sequence_name),
type_=sqltypes.Unicode),
sql.bindparam('schema',
util.text_type(schema),
type_=sqltypes.Unicode)
]
)
)
return bool(cursor.first())
def has_type(self, connection, type_name, schema=None):
if schema is not None:
query = """
SELECT EXISTS (
SELECT * FROM pg_catalog.pg_type t, pg_catalog.pg_namespace n
WHERE t.typnamespace = n.oid
AND t.typname = :typname
AND n.nspname = :nspname
)
"""
query = sql.text(query)
else:
query = """
SELECT EXISTS (
SELECT * FROM pg_catalog.pg_type t
WHERE t.typname = :typname
AND pg_type_is_visible(t.oid)
)
"""
query = sql.text(query)
query = query.bindparams(
sql.bindparam('typname',
util.text_type(type_name), type_=sqltypes.Unicode),
)
if schema is not None:
query = query.bindparams(
sql.bindparam('nspname',
util.text_type(schema), type_=sqltypes.Unicode),
)
cursor = connection.execute(query)
return bool(cursor.scalar())
def _get_server_version_info(self, connection):
v = connection.execute("select version()").scalar()
m = re.match(
'.*(?:PostgreSQL|EnterpriseDB) '
'(\d+)\.(\d+)(?:\.(\d+))?(?:\.\d+)?(?:devel)?',
v)
if not m:
raise AssertionError(
"Could not determine version from string '%s'" % v)
return tuple([int(x) for x in m.group(1, 2, 3) if x is not None])
@reflection.cache
def get_table_oid(self, connection, table_name, schema=None, **kw):
"""Fetch the oid for schema.table_name.
Several reflection methods require the table oid. The idea for using
this method is that it can be fetched one time and cached for
subsequent calls.
"""
table_oid = None
if schema is not None:
schema_where_clause = "n.nspname = :schema"
else:
schema_where_clause = "pg_catalog.pg_table_is_visible(c.oid)"
query = """
SELECT c.oid
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE (%s)
AND c.relname = :table_name AND c.relkind in ('r', 'v', 'm', 'f')
""" % schema_where_clause
# Since we're binding to unicode, table_name and schema_name must be
# unicode.
table_name = util.text_type(table_name)
if schema is not None:
schema = util.text_type(schema)
s = sql.text(query).bindparams(table_name=sqltypes.Unicode)
s = s.columns(oid=sqltypes.Integer)
if schema:
s = s.bindparams(sql.bindparam('schema', type_=sqltypes.Unicode))
c = connection.execute(s, table_name=table_name, schema=schema)
table_oid = c.scalar()
if table_oid is None:
raise exc.NoSuchTableError(table_name)
return table_oid
@reflection.cache
def get_schema_names(self, connection, **kw):
s = """
SELECT nspname
FROM pg_namespace
ORDER BY nspname
"""
rp = connection.execute(s)
# what about system tables?
if util.py2k:
schema_names = [row[0].decode(self.encoding) for row in rp
if not row[0].startswith('pg_')]
else:
schema_names = [row[0] for row in rp
if not row[0].startswith('pg_')]
return schema_names
@reflection.cache
def get_table_names(self, connection, schema=None, **kw):
if schema is not None:
current_schema = schema
else:
current_schema = self.default_schema_name
result = connection.execute(
sql.text("SELECT relname FROM pg_class c "
"WHERE relkind = 'r' "
"AND '%s' = (select nspname from pg_namespace n "
"where n.oid = c.relnamespace) " %
current_schema,
typemap={'relname': sqltypes.Unicode}
)
)
return [row[0] for row in result]
@reflection.cache
def _get_foreign_table_names(self, connection, schema=None, **kw):
if schema is not None:
current_schema = schema
else:
current_schema = self.default_schema_name
result = connection.execute(
sql.text("SELECT relname FROM pg_class c "
"WHERE relkind = 'f' "
"AND '%s' = (select nspname from pg_namespace n "
"where n.oid = c.relnamespace) " %
current_schema,
typemap={'relname': sqltypes.Unicode}
)
)
return [row[0] for row in result]
@reflection.cache
def get_view_names(self, connection, schema=None, **kw):
if schema is not None:
current_schema = schema
else:
current_schema = self.default_schema_name
s = """
SELECT relname
FROM pg_class c
WHERE relkind IN ('m', 'v')
AND '%(schema)s' = (select nspname from pg_namespace n
where n.oid = c.relnamespace)
""" % dict(schema=current_schema)
if util.py2k:
view_names = [row[0].decode(self.encoding)
for row in connection.execute(s)]
else:
view_names = [row[0] for row in connection.execute(s)]
return view_names
@reflection.cache
def get_view_definition(self, connection, view_name, schema=None, **kw):
if schema is not None:
current_schema = schema
else:
current_schema = self.default_schema_name
s = """
SELECT definition FROM pg_views
WHERE schemaname = :schema
AND viewname = :view_name
"""
rp = connection.execute(sql.text(s),
view_name=view_name, schema=current_schema)
if rp:
if util.py2k:
view_def = rp.scalar().decode(self.encoding)
else:
view_def = rp.scalar()
return view_def
@reflection.cache
def get_columns(self, connection, table_name, schema=None, **kw):
table_oid = self.get_table_oid(connection, table_name, schema,
info_cache=kw.get('info_cache'))
SQL_COLS = """
SELECT a.attname,
pg_catalog.format_type(a.atttypid, a.atttypmod),
(SELECT pg_catalog.pg_get_expr(d.adbin, d.adrelid)
FROM pg_catalog.pg_attrdef d
WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum
AND a.atthasdef)
AS DEFAULT,
a.attnotnull, a.attnum, a.attrelid as table_oid
FROM pg_catalog.pg_attribute a
WHERE a.attrelid = :table_oid
AND a.attnum > 0 AND NOT a.attisdropped
ORDER BY a.attnum
"""
s = sql.text(SQL_COLS,
bindparams=[
sql.bindparam('table_oid', type_=sqltypes.Integer)],
typemap={
'attname': sqltypes.Unicode,
'default': sqltypes.Unicode}
)
c = connection.execute(s, table_oid=table_oid)
rows = c.fetchall()
domains = self._load_domains(connection)
enums = dict(
(
"%s.%s" % (rec['schema'], rec['name'])
if not rec['visible'] else rec['name'], rec) for rec in
self._load_enums(connection, schema='*')
)
# format columns
columns = []
for name, format_type, default, notnull, attnum, table_oid in rows:
column_info = self._get_column_info(
name, format_type, default, notnull, domains, enums, schema)
columns.append(column_info)
return columns
def _get_column_info(self, name, format_type, default,
notnull, domains, enums, schema):
# strip (*) from character varying(5), timestamp(5)
# with time zone, geometry(POLYGON), etc.
attype = re.sub(r'\(.*\)', '', format_type)
# strip '[]' from integer[], etc.
attype = re.sub(r'\[\]', '', attype)
nullable = not notnull
is_array = format_type.endswith('[]')
charlen = re.search('\(([\d,]+)\)', format_type)
if charlen:
charlen = charlen.group(1)
args = re.search('\((.*)\)', format_type)
if args and args.group(1):
args = tuple(re.split('\s*,\s*', args.group(1)))
else:
args = ()
kwargs = {}
if attype == 'numeric':
if charlen:
prec, scale = charlen.split(',')
args = (int(prec), int(scale))
else:
args = ()
elif attype == 'double precision':
args = (53, )
elif attype == 'integer':
args = ()
elif attype in ('timestamp with time zone',
'time with time zone'):
kwargs['timezone'] = True
if charlen:
kwargs['precision'] = int(charlen)
args = ()
elif attype in ('timestamp without time zone',
'time without time zone', 'time'):
kwargs['timezone'] = False
if charlen:
kwargs['precision'] = int(charlen)
args = ()
elif attype == 'bit varying':
kwargs['varying'] = True
if charlen:
args = (int(charlen),)
else:
args = ()
elif attype in ('interval', 'interval year to month',
'interval day to second'):
if charlen:
kwargs['precision'] = int(charlen)
args = ()
elif charlen:
args = (int(charlen),)
while True:
if attype in self.ischema_names:
coltype = self.ischema_names[attype]
break
elif attype in enums:
enum = enums[attype]
coltype = ENUM
kwargs['name'] = enum['name']
if not enum['visible']:
kwargs['schema'] = enum['schema']
args = tuple(enum['labels'])
break
elif attype in domains:
domain = domains[attype]
attype = domain['attype']
# A table can't override whether the domain is nullable.
nullable = domain['nullable']
if domain['default'] and not default:
# It can, however, override the default
# value, but can't set it to null.
default = domain['default']
continue
else:
coltype = None
break
if coltype:
coltype = coltype(*args, **kwargs)
if is_array:
coltype = self.ischema_names['_array'](coltype)
else:
util.warn("Did not recognize type '%s' of column '%s'" %
(attype, name))
coltype = sqltypes.NULLTYPE
# adjust the default value
autoincrement = False
if default is not None:
match = re.search(r"""(nextval\(')([^']+)('.*$)""", default)
if match is not None:
autoincrement = True
# the default is related to a Sequence
sch = schema
if '.' not in match.group(2) and sch is not None:
# unconditionally quote the schema name. this could
# later be enhanced to obey quoting rules /
# "quote schema"
default = match.group(1) + \
('"%s"' % sch) + '.' + \
match.group(2) + match.group(3)
column_info = dict(name=name, type=coltype, nullable=nullable,
default=default, autoincrement=autoincrement)
return column_info
@reflection.cache
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
table_oid = self.get_table_oid(connection, table_name, schema,
info_cache=kw.get('info_cache'))
if self.server_version_info < (8, 4):
PK_SQL = """
SELECT a.attname
FROM
pg_class t
join pg_index ix on t.oid = ix.indrelid
join pg_attribute a
on t.oid=a.attrelid AND %s
WHERE
t.oid = :table_oid and ix.indisprimary = 't'
ORDER BY a.attnum
""" % self._pg_index_any("a.attnum", "ix.indkey")
else:
# unnest() and generate_subscripts() both introduced in
# version 8.4
PK_SQL = """
SELECT a.attname
FROM pg_attribute a JOIN (
SELECT unnest(ix.indkey) attnum,
generate_subscripts(ix.indkey, 1) ord
FROM pg_index ix
WHERE ix.indrelid = :table_oid AND ix.indisprimary
) k ON a.attnum=k.attnum
WHERE a.attrelid = :table_oid
ORDER BY k.ord
"""
t = sql.text(PK_SQL, typemap={'attname': sqltypes.Unicode})
c = connection.execute(t, table_oid=table_oid)
cols = [r[0] for r in c.fetchall()]
PK_CONS_SQL = """
SELECT conname
FROM pg_catalog.pg_constraint r
WHERE r.conrelid = :table_oid AND r.contype = 'p'
ORDER BY 1
"""
t = sql.text(PK_CONS_SQL, typemap={'conname': sqltypes.Unicode})
c = connection.execute(t, table_oid=table_oid)
name = c.scalar()
return {'constrained_columns': cols, 'name': name}
@reflection.cache
def get_foreign_keys(self, connection, table_name, schema=None,
postgresql_ignore_search_path=False, **kw):
preparer = self.identifier_preparer
table_oid = self.get_table_oid(connection, table_name, schema,
info_cache=kw.get('info_cache'))
FK_SQL = """
SELECT r.conname,
pg_catalog.pg_get_constraintdef(r.oid, true) as condef,
n.nspname as conschema
FROM pg_catalog.pg_constraint r,
pg_namespace n,
pg_class c
WHERE r.conrelid = :table AND
r.contype = 'f' AND
c.oid = confrelid AND
n.oid = c.relnamespace
ORDER BY 1
"""
# http://www.postgresql.org/docs/9.0/static/sql-createtable.html
FK_REGEX = re.compile(
r'FOREIGN KEY \((.*?)\) REFERENCES (?:(.*?)\.)?(.*?)\((.*?)\)'
r'[\s]?(MATCH (FULL|PARTIAL|SIMPLE)+)?'
r'[\s]?(ON UPDATE '
r'(CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?'
r'[\s]?(ON DELETE '
r'(CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?'
r'[\s]?(DEFERRABLE|NOT DEFERRABLE)?'
r'[\s]?(INITIALLY (DEFERRED|IMMEDIATE)+)?'
)
t = sql.text(FK_SQL, typemap={
'conname': sqltypes.Unicode,
'condef': sqltypes.Unicode})
c = connection.execute(t, table=table_oid)
fkeys = []
for conname, condef, conschema in c.fetchall():
m = re.search(FK_REGEX, condef).groups()
constrained_columns, referred_schema, \
referred_table, referred_columns, \
_, match, _, onupdate, _, ondelete, \
deferrable, _, initially = m
if deferrable is not None:
deferrable = True if deferrable == 'DEFERRABLE' else False
constrained_columns = [preparer._unquote_identifier(x)
for x in re.split(
r'\s*,\s*', constrained_columns)]
if postgresql_ignore_search_path:
# when ignoring search path, we use the actual schema
# provided it isn't the "default" schema
if conschema != self.default_schema_name:
referred_schema = conschema
else:
referred_schema = schema
elif referred_schema:
# referred_schema is the schema that we regexp'ed from
# pg_get_constraintdef(). If the schema is in the search
# path, pg_get_constraintdef() will give us None.
referred_schema = \
preparer._unquote_identifier(referred_schema)
elif schema is not None and schema == conschema:
# If the actual schema matches the schema of the table
# we're reflecting, then we will use that.
referred_schema = schema
referred_table = preparer._unquote_identifier(referred_table)
referred_columns = [preparer._unquote_identifier(x)
for x in
re.split(r'\s*,\s', referred_columns)]
fkey_d = {
'name': conname,
'constrained_columns': constrained_columns,
'referred_schema': referred_schema,
'referred_table': referred_table,
'referred_columns': referred_columns,
'options': {
'onupdate': onupdate,
'ondelete': ondelete,
'deferrable': deferrable,
'initially': initially,
'match': match
}
}
fkeys.append(fkey_d)
return fkeys
def _pg_index_any(self, col, compare_to):
if self.server_version_info < (8, 1):
# http://www.postgresql.org/message-id/10279.1124395722@sss.pgh.pa.us
# "In CVS tip you could replace this with "attnum = ANY (indkey)".
# Unfortunately, most array support doesn't work on int2vector in
# pre-8.1 releases, so I think you're kinda stuck with the above
# for now.
# regards, tom lane"
return "(%s)" % " OR ".join(
"%s[%d] = %s" % (compare_to, ind, col)
for ind in range(0, 10)
)
else:
return "%s = ANY(%s)" % (col, compare_to)
@reflection.cache
def get_indexes(self, connection, table_name, schema, **kw):
table_oid = self.get_table_oid(connection, table_name, schema,
info_cache=kw.get('info_cache'))
# cast indkey as varchar since it's an int2vector,
# returned as a list by some drivers such as pypostgresql
if self.server_version_info < (8, 5):
IDX_SQL = """
SELECT
i.relname as relname,
ix.indisunique, ix.indexprs, ix.indpred,
a.attname, a.attnum, NULL, ix.indkey%s,
%s, am.amname
FROM
pg_class t
join pg_index ix on t.oid = ix.indrelid
join pg_class i on i.oid = ix.indexrelid
left outer join
pg_attribute a
on t.oid = a.attrelid and %s
left outer join
pg_am am
on i.relam = am.oid
WHERE
t.relkind IN ('r', 'v', 'f', 'm')
and t.oid = :table_oid
and ix.indisprimary = 'f'
ORDER BY
t.relname,
i.relname
""" % (
# version 8.3 here was based on observing the
# cast does not work in PG 8.2.4, does work in 8.3.0.
# nothing in PG changelogs regarding this.
"::varchar" if self.server_version_info >= (8, 3) else "",
"i.reloptions" if self.server_version_info >= (8, 2)
else "NULL",
self._pg_index_any("a.attnum", "ix.indkey")
)
else:
IDX_SQL = """
SELECT
i.relname as relname,
ix.indisunique, ix.indexprs, ix.indpred,
a.attname, a.attnum, c.conrelid, ix.indkey::varchar,
i.reloptions, am.amname
FROM
pg_class t
join pg_index ix on t.oid = ix.indrelid
join pg_class i on i.oid = ix.indexrelid
left outer join
pg_attribute a
on t.oid = a.attrelid and a.attnum = ANY(ix.indkey)
left outer join
pg_constraint c
on (ix.indrelid = c.conrelid and
ix.indexrelid = c.conindid and
c.contype in ('p', 'u', 'x'))
left outer join
pg_am am
on i.relam = am.oid
WHERE
t.relkind IN ('r', 'v', 'f', 'm')
and t.oid = :table_oid
and ix.indisprimary = 'f'
ORDER BY
t.relname,
i.relname
"""
t = sql.text(IDX_SQL, typemap={'attname': sqltypes.Unicode})
c = connection.execute(t, table_oid=table_oid)
indexes = defaultdict(lambda: defaultdict(dict))
sv_idx_name = None
for row in c.fetchall():
(idx_name, unique, expr, prd, col,
col_num, conrelid, idx_key, options, amname) = row
if expr:
if idx_name != sv_idx_name:
util.warn(
"Skipped unsupported reflection of "
"expression-based index %s"
% idx_name)
sv_idx_name = idx_name
continue
if prd and not idx_name == sv_idx_name:
util.warn(
"Predicate of partial index %s ignored during reflection"
% idx_name)
sv_idx_name = idx_name
has_idx = idx_name in indexes
index = indexes[idx_name]
if col is not None:
index['cols'][col_num] = col
if not has_idx:
index['key'] = [int(k.strip()) for k in idx_key.split()]
index['unique'] = unique
if conrelid is not None:
index['duplicates_constraint'] = idx_name
if options:
index['options'] = dict(
[option.split("=") for option in options])
# it *might* be nice to include that this is 'btree' in the
# reflection info. But we don't want an Index object
# to have a ``postgresql_using`` in it that is just the
# default, so for the moment leaving this out.
if amname and amname != 'btree':
index['amname'] = amname
result = []
for name, idx in indexes.items():
entry = {
'name': name,
'unique': idx['unique'],
'column_names': [idx['cols'][i] for i in idx['key']]
}
if 'duplicates_constraint' in idx:
entry['duplicates_constraint'] = idx['duplicates_constraint']
if 'options' in idx:
entry.setdefault(
'dialect_options', {})["postgresql_with"] = idx['options']
if 'amname' in idx:
entry.setdefault(
'dialect_options', {})["postgresql_using"] = idx['amname']
result.append(entry)
return result
@reflection.cache
def get_unique_constraints(self, connection, table_name,
schema=None, **kw):
table_oid = self.get_table_oid(connection, table_name, schema,
info_cache=kw.get('info_cache'))
UNIQUE_SQL = """
SELECT
cons.conname as name,
cons.conkey as key,
a.attnum as col_num,
a.attname as col_name
FROM
pg_catalog.pg_constraint cons
join pg_attribute a
on cons.conrelid = a.attrelid AND
a.attnum = ANY(cons.conkey)
WHERE
cons.conrelid = :table_oid AND
cons.contype = 'u'
"""
t = sql.text(UNIQUE_SQL, typemap={'col_name': sqltypes.Unicode})
c = connection.execute(t, table_oid=table_oid)
uniques = defaultdict(lambda: defaultdict(dict))
for row in c.fetchall():
uc = uniques[row.name]
uc["key"] = row.key
uc["cols"][row.col_num] = row.col_name
return [
{'name': name,
'column_names': [uc["cols"][i] for i in uc["key"]]}
for name, uc in uniques.items()
]
def _load_enums(self, connection, schema=None):
schema = schema or self.default_schema_name
if not self.supports_native_enum:
return {}
# Load data types for enums:
SQL_ENUMS = """
SELECT t.typname as "name",
-- no enum defaults in 8.4 at least
-- t.typdefault as "default",
pg_catalog.pg_type_is_visible(t.oid) as "visible",
n.nspname as "schema",
e.enumlabel as "label"
FROM pg_catalog.pg_type t
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace
LEFT JOIN pg_catalog.pg_enum e ON t.oid = e.enumtypid
WHERE t.typtype = 'e'
"""
if schema != '*':
SQL_ENUMS += "AND n.nspname = :schema "
# e.oid gives us label order within an enum
SQL_ENUMS += 'ORDER BY "schema", "name", e.oid'
s = sql.text(SQL_ENUMS, typemap={
'attname': sqltypes.Unicode,
'label': sqltypes.Unicode})
if schema != '*':
s = s.bindparams(schema=schema)
c = connection.execute(s)
enums = []
enum_by_name = {}
for enum in c.fetchall():
key = (enum['schema'], enum['name'])
if key in enum_by_name:
enum_by_name[key]['labels'].append(enum['label'])
else:
enum_by_name[key] = enum_rec = {
'name': enum['name'],
'schema': enum['schema'],
'visible': enum['visible'],
'labels': [enum['label']],
}
enums.append(enum_rec)
return enums
def _load_domains(self, connection):
# Load data types for domains:
SQL_DOMAINS = """
SELECT t.typname as "name",
pg_catalog.format_type(t.typbasetype, t.typtypmod) as "attype",
not t.typnotnull as "nullable",
t.typdefault as "default",
pg_catalog.pg_type_is_visible(t.oid) as "visible",
n.nspname as "schema"
FROM pg_catalog.pg_type t
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace
WHERE t.typtype = 'd'
"""
s = sql.text(SQL_DOMAINS, typemap={'attname': sqltypes.Unicode})
c = connection.execute(s)
domains = {}
for domain in c.fetchall():
# strip (30) from character varying(30)
attype = re.search('([^\(]+)', domain['attype']).group(1)
if domain['visible']:
# 'visible' just means whether or not the domain is in a
# schema that's on the search path -- or not overridden by
# a schema with higher precedence. If it's not visible,
# it will be prefixed with the schema-name when it's used.
name = domain['name']
else:
name = "%s.%s" % (domain['schema'], domain['name'])
domains[name] = {
'attype': attype,
'nullable': domain['nullable'],
'default': domain['default']
}
return domains
| 35.738824 | 87 | 0.579048 |
ace3671c8e793c35029cccf0f40a243718ec1150 | 8,645 | py | Python | luna/gateware/platform/netv2.py | macdaliot/luna | 97e725d0af001a6d4c4811eefb43b6c31a9b45e1 | [
"BSD-3-Clause"
] | 2 | 2021-12-04T22:33:23.000Z | 2022-02-03T09:52:18.000Z | luna/gateware/platform/netv2.py | macdaliot/luna | 97e725d0af001a6d4c4811eefb43b6c31a9b45e1 | [
"BSD-3-Clause"
] | 4 | 2020-11-11T17:32:33.000Z | 2020-11-30T13:08:05.000Z | luna/gateware/platform/netv2.py | macdaliot/luna | 97e725d0af001a6d4c4811eefb43b6c31a9b45e1 | [
"BSD-3-Clause"
] | 2 | 2021-06-26T06:06:52.000Z | 2022-01-19T22:36:19.000Z | #
# This file is part of LUNA.
#
# Copyright (c) 2020 Great Scott Gadgets <info@greatscottgadgets.com>
# SPDX-License-Identifier: BSD-3-Clause
""" NeTV2 Platform Definition
This is a non-core platform. To use it, you'll need to set your LUNA_PLATFORM variable:
> export LUNA_PLATFORM="luna.gateware.platform.netv2:NeTV2Platform"
The NeTV2 has a fixed pull-up resistor on D-; which prevents it from being used as a
FS device. To use the platform for full-speed USB, you'll need to move the resistor
populated as R23 over to R24.
"""
import os
import subprocess
from nmigen import *
from nmigen.build import *
from nmigen.vendor.xilinx_7series import Xilinx7SeriesPlatform
from nmigen_boards.resources import *
from ..interface.serdes_phy import SerDesPHY, LunaArtix7SerDes
from .core import LUNAPlatform
class NeTV2ClockDomainGenerator(Elaboratable):
""" Clock/Reset Controller for the NeTV2. """
def __init__(self, *, clock_frequencies=None, clock_signal_name=None):
pass
def elaborate(self, platform):
m = Module()
# Create our domains; but don't do anything else for them, for now.
m.domains.usb = ClockDomain()
m.domains.usb_io = ClockDomain()
m.domains.sync = ClockDomain()
m.domains.ss = ClockDomain()
m.domains.fast = ClockDomain()
# Grab our main clock.
clk50 = platform.request(platform.default_clk)
# USB2 PLL connections.
clk12 = Signal()
clk48 = Signal()
usb2_locked = Signal()
usb2_feedback = Signal()
m.submodules.usb2_pll = Instance("PLLE2_ADV",
p_BANDWIDTH = "OPTIMIZED",
p_COMPENSATION = "ZHOLD",
p_STARTUP_WAIT = "FALSE",
p_DIVCLK_DIVIDE = 1,
p_CLKFBOUT_MULT = 24,
p_CLKFBOUT_PHASE = 0.000,
p_CLKOUT0_DIVIDE = 100,
p_CLKOUT0_PHASE = 0.000,
p_CLKOUT0_DUTY_CYCLE = 0.500,
p_CLKOUT1_DIVIDE = 25,
p_CLKOUT1_PHASE = 0.000,
p_CLKOUT1_DUTY_CYCLE = 0.500,
p_CLKIN1_PERIOD = 20.000,
i_CLKFBIN = usb2_feedback,
o_CLKFBOUT = usb2_feedback,
i_CLKIN1 = clk50,
o_CLKOUT0 = clk12,
o_CLKOUT1 = clk48,
o_LOCKED = usb2_locked,
)
# USB3 PLL connections.
clk16 = Signal()
clk125 = Signal()
clk250 = Signal()
usb3_locked = Signal()
usb3_feedback = Signal()
m.submodules.usb3_pll = Instance("PLLE2_ADV",
p_BANDWIDTH = "OPTIMIZED",
p_COMPENSATION = "ZHOLD",
p_STARTUP_WAIT = "FALSE",
p_DIVCLK_DIVIDE = 1,
p_CLKFBOUT_MULT = 20, # VCO = 1000 MHz
p_CLKFBOUT_PHASE = 0.000,
p_CLKOUT0_DIVIDE = 4, # CLKOUT0 = 250 MHz (1000/4)
p_CLKOUT0_PHASE = 0.000,
p_CLKOUT0_DUTY_CYCLE = 0.500,
p_CLKOUT1_DIVIDE = 8, # CLKOUT1 = 125 MHz (1000/8)
p_CLKOUT1_PHASE = 0.000,
p_CLKOUT1_DUTY_CYCLE = 0.500,
p_CLKOUT2_DIVIDE = 64, # CLKOUT2 = 16 MHz (1000/64)
p_CLKOUT2_PHASE = 0.000,
p_CLKOUT2_DUTY_CYCLE = 0.500,
p_CLKIN1_PERIOD = 20.000,
i_CLKFBIN = usb3_feedback,
o_CLKFBOUT = usb3_feedback,
i_CLKIN1 = clk50,
o_CLKOUT0 = clk250,
o_CLKOUT1 = clk125,
o_CLKOUT2 = clk16,
o_LOCKED = usb3_locked,
)
# Connect up our clock domains.
m.d.comb += [
ClockSignal("usb") .eq(clk12),
ClockSignal("usb_io") .eq(clk48),
ClockSignal("sync") .eq(clk125),
ClockSignal("ss") .eq(clk125),
ClockSignal("fast") .eq(clk250),
ResetSignal("usb") .eq(~usb2_locked),
ResetSignal("usb_io") .eq(~usb2_locked),
ResetSignal("sync") .eq(~usb3_locked),
ResetSignal("ss") .eq(~usb3_locked),
ResetSignal("fast") .eq(~usb3_locked),
]
return m
class NeTV2SuperSpeedPHY(SerDesPHY):
""" Superspeed PHY configuration for the NeTV2. """
SS_FREQUENCY = 125e6
FAST_FREQUENCY = 250e6
def __init__(self, platform):
# Grab the I/O that implements our SerDes interface, ensuring our directions are '-',
# so we don't create any I/O buffering hardware.
serdes_io_directions = {'tx': "-", 'rx': "-"}
serdes_io = platform.request("serdes", dir=serdes_io_directions)
# Create our SerDes interface...
self.serdes = LunaArtix7SerDes(
ss_clock_frequency = self.SS_FREQUENCY,
refclk_pads = ClockSignal("ss"),
refclk_frequency = self.SS_FREQUENCY,
tx_pads = serdes_io.tx,
rx_pads = serdes_io.rx,
)
# ... and use it to create our core PHY interface.
super().__init__(
serdes = self.serdes,
ss_clk_frequency = self.SS_FREQUENCY,
fast_clk_frequency = self.FAST_FREQUENCY
)
def elaborate(self, platform):
m = super().elaborate(platform)
# Patch in our SerDes as a submodule.
m.submodules.serdes = self.serdes
return m
class NeTV2Platform(Xilinx7SeriesPlatform, LUNAPlatform):
""" Board description for the NeTV2. """
name = "NeTV2"
device = "xc7a35t"
package = "fgg484"
speed = "2"
default_clk = "clk50"
# Provide the type that'll be used to create our clock domains.
clock_domain_generator = NeTV2ClockDomainGenerator
# Use our direct USB connection for USB2, and our SerDes for USB3.
default_usb_connection = "usb"
default_usb3_phy = NeTV2SuperSpeedPHY
#
# I/O resources.
#
resources = [
Resource("clk50", 0, Pins("J19"), Attrs(IOSTANDARD="LVCMOS33"), Clock(50e6)),
# R/G leds
*LEDResources(pins="M21 N20 L21 AA21 R19 M16", attrs=Attrs(IOSTANDARD="LVCMOS33"), invert=True),
# Comms
#DirectUSBResource(0, d_p="C14", d_n="C15", attrs=Attrs(IOSTANDARD="LVCMOS33")),
# XXX
DirectUSBResource(0, d_p="A15", d_n="A14", pullup="C17", attrs=Attrs(IOSTANDARD="LVCMOS33")),
UARTResource(0, rx="E13", tx="E14", attrs=Attrs(IOSTANDARD="LVCMOS33")),
# PCIe gold fingers (for USB3)
Resource("serdes", 0,
Subsignal("tx", DiffPairs("D5", "C5")),
Subsignal("rx", DiffPairs("D11", "C11")),
),
# User I/O (labeled "hax")
Resource("user_io", 0, Pins("B15"), Attrs(IOSTANDARD="LVCMOS33")),
Resource("user_io", 1, Pins("B16"), Attrs(IOSTANDARD="LVCMOS33")),
Resource("user_io", 2, Pins("B13"), Attrs(IOSTANDARD="LVCMOS33")),
Resource("user_io", 3, Pins("A15"), Attrs(IOSTANDARD="LVCMOS33")),
Resource("user_io", 4, Pins("A16"), Attrs(IOSTANDARD="LVCMOS33")),
Resource("user_io", 5, Pins("A13"), Attrs(IOSTANDARD="LVCMOS33")),
Resource("user_io", 6, Pins("A14"), Attrs(IOSTANDARD="LVCMOS33")),
Resource("user_io", 7, Pins("B17"), Attrs(IOSTANDARD="LVCMOS33")),
Resource("user_io", 8, Pins("A18"), Attrs(IOSTANDARD="LVCMOS33")),
Resource("user_io", 9, Pins("C17"), Attrs(IOSTANDARD="LVCMOS33")),
]
connectors = []
def toolchain_prepare(self, fragment, name, **kwargs):
extra_constraints = [
# Allow use to drive our SerDes from FPGA fabric.
"set_property SEVERITY {Warning} [get_drc_checks REQP-49]"
]
overrides = { "add_constraints": "\n".join(extra_constraints) }
return super().toolchain_prepare(fragment, name, **overrides, **kwargs)
#
# FIXME: figure out a better tool to use for running with the NeTV attached
# to a raspberry pi
#
def toolchain_program(self, products, name):
xc3sprog = os.environ.get("XC3SPROG", "xc3sprog")
with products.extract("{}.bit".format(name)) as bitstream_file:
subprocess.check_call([xc3sprog, "-c", "ft4232h", bitstream_file])
| 35 | 104 | 0.562522 |
ace3673c74a5d5e0c2b1820939da64b86cc24263 | 212 | py | Python | backend/database/tests/test_unit.py | JMSoler7/database | b90326a8f3929d1bad9a810fcbe91d9bb2c3d5f4 | [
"MIT"
] | null | null | null | backend/database/tests/test_unit.py | JMSoler7/database | b90326a8f3929d1bad9a810fcbe91d9bb2c3d5f4 | [
"MIT"
] | null | null | null | backend/database/tests/test_unit.py | JMSoler7/database | b90326a8f3929d1bad9a810fcbe91d9bb2c3d5f4 | [
"MIT"
] | null | null | null |
from unittest import TestCase
class UnitTestCase(TestCase):
def setUp(self):
pass
def test_easy(self):
self.assertTrue(True)
def test_easy_2(self):
self.assertTrue(True)
| 14.133333 | 29 | 0.646226 |
ace3688a268154de23a1001d483f01bed2e7689f | 27 | py | Python | examples/py33-0011-yield-from.py | jwilk-forks/python-grammar-changes | 5cbc14e520fadfef8539760a4ffdbe14b9d02f39 | [
"MIT"
] | 8 | 2020-11-21T22:39:41.000Z | 2022-03-13T18:45:53.000Z | examples/py33-0011-yield-from.py | jwilk-forks/python-grammar-changes | 5cbc14e520fadfef8539760a4ffdbe14b9d02f39 | [
"MIT"
] | 1 | 2021-12-10T10:45:38.000Z | 2021-12-10T10:45:38.000Z | examples/py33-0011-yield-from.py | jwilk-forks/python-grammar-changes | 5cbc14e520fadfef8539760a4ffdbe14b9d02f39 | [
"MIT"
] | 1 | 2022-02-07T11:16:38.000Z | 2022-02-07T11:16:38.000Z | def f(x):
yield from x
| 9 | 16 | 0.555556 |
ace36a142a80b1a07467b1652bd2faa4bf85d47b | 10,981 | py | Python | simulacra/cli/main.py | mdd423/WobbleSim | 21edee9b8adf8111fa50975c363fd8512fb39e89 | [
"BSD-3-Clause"
] | null | null | null | simulacra/cli/main.py | mdd423/WobbleSim | 21edee9b8adf8111fa50975c363fd8512fb39e89 | [
"BSD-3-Clause"
] | null | null | null | simulacra/cli/main.py | mdd423/WobbleSim | 21edee9b8adf8111fa50975c363fd8512fb39e89 | [
"BSD-3-Clause"
] | null | null | null | # Standard library
import argparse
import os
import pathlib
import shutil
import sys
import simulacra.star
import simulacra.tellurics
import simulacra.detector
import simulacra.gascell
# Third-party
import numpy as np
# from threadpoolctl import threadpool_limits
# Package
# from .helpers import get_parser
# from ..log import logger
import random
import astropy.coordinates as coord
import astropy.units as u
import astropy.time as at
random.seed(102102102)
np.random.seed(102102102)
def run_simulation(detector,transmission_models,exp_times,epoches,window):
# parser args into these constants and filename
tstart = at.Time('2020-01-01T08:10:00.123456789',format='isot',scale='utc')
tend = tstart + window * u.day
night_grid = simulacra.star.get_night_grid(detector.loc,tstart,tend,steps_per_night=5)
possible_times, airmass = simulacra.star.get_realistic_times(detector.stellar_model.target,detector.loc,night_grid)
obs_ints = random.sample(range(len(airmass)),epoches)
obs_times, obs_airmass = possible_times[obs_ints], airmass[obs_ints]
for model in transmission_models:
detector.add_model(model)
data = detector.simulate(obs_times,exp_times)
return data
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument('-e','--epoches',type=int,required=True,help='number of epoches of data to generate')
parser.add_argument('-d','--distance',type=float,required=True,help='distance to star in pcs')
parser.add_argument('-p','--period',type=float,default=40.3,help='period of the star wobble in days')
parser.add_argument('-a','--amp',type=float,default=2,help='amplitude of velocity wobble in km/s')
parser.add_argument('--alpha',type=float,default=0.4,help='The alpha ratio of the star being observed must be in the PHOENIX repository')
parser.add_argument('-z',type=float,default=-1.0,help='The z ratio of the star being observed must be in the PHOENIX repository')
parser.add_argument('-T','--temp',type=float,default=4600,help='The temperature in Kelvin of the star being observed must be in the PHOENIX repository')
parser.add_argument('--logg',type=float,default=1.0,help='The logg of the star being observed must be in the PHOENIX repository')
parser.add_argument('--amplitude',type=float,default=2.0,help='The amplitude of oscillation of the star in km/s being observed must be in the PHOENIX repository')
parser.add_argument('--epsilon',type=float,default=1.0,help='random property of the wave transformation')
parser.add_argument('-w',type=float,default=0.0, help='random property of the wave transformation')
parser.add_argument('--gamma',type=float,default=1.0, help='user set parameter to control SNR')
parser.add_argument('--ra',type=float,default=None,help='The right ascension of the star being observed if left empty it will be random set.')
parser.add_argument('--dec',type=float,default=None,help='The right ascension of the star being observed if left empty it will be random set.')
parser.add_argument('--window',type=float,default=180,help='Time in days to observe the star over')
parser.add_argument('--exp_time',type=float,default=8,help='Time in minutes of each exposure')
parser.add_argument('-o','--output',type=str,default='../../out',help='Output directory, default assumes you are in the home directory of this package.')
return parser
def add_tellurics_args(parser):
parser.add_argument('--pressure',type=float,default=1.0e6,help='The pressure at the observatory at the time of the observations in pascals')
parser.add_argument('--temperature',type=float,default=300,help='The temperature at the observatory at the time of the observations in Kelvin')
parser.add_argument('--humidity',type=float,default=50.0,help='The humidity at the observatory at the time of the observations in percentage')
return parser
def get_star(loc,args):
if args.ra is None:
args.ra = np.random.uniform(0,360) * u.degree
if args.dec is None:
args.dec = np.random.uniform(loc.lat.to(u.degree).value-30,loc.lat.to(u.degree).value+30) * u.degree
target = coord.SkyCoord(args.ra,args.dec,frame='icrs')
stellar_model = simulacra.star.PhoenixModel(args.distance * u.pc,args.alpha,args.z,\
args.temp,args.logg,target,\
args.amplitude * u.km/u.s,args.period * u.day)
return stellar_model
def get_tellurics(loc,wave_min,wave_max,args):
tellurics_model = simulacra.tellurics.TelFitModel(wave_min,wave_max,loc)
tellurics_model.pressure = args.pressure * u.Pa
tellurics_model.temperature = args.temperature * u.Kelvin
tellurics_model.humidity = args.humidity
return tellurics_model
class CLI:
"""To add a new subcommand, just add a new classmethod and a docstring!"""
_usage = None
def __init__(self):
parser = argparse.ArgumentParser(
description='A pipeline utility for running The Joker',
usage=self._usage.strip())
parser.add_argument('command', help='Subcommand to run')
args = parser.parse_args(sys.argv[1:2])
if not hasattr(self, args.command):
print(f"Unsupported command '{args.command}'")
parser.print_help()
sys.exit(1)
getattr(self, args.command)()
def apogee(self):
"""Generate APOGEE data with a simple command."""
parser = get_parser()
parser = add_tellurics_args(parser)
args = parser.parse_args(sys.argv[2:])
obs = 'APO'
loc = coord.EarthLocation.of_site(obs)
# print(loc.lon,loc.latlat)
stellar_model = get_star(loc,args)
wave_min = 1.51*u.um
wave_max = 1.70*u.um
# Detector physical parameters
################################
det_dict = {'resolution':22_500.0,
'area': np.pi * (2.5*u.m/2)**2,
'dark_current': 100/u.s,
'read_noise': 100,
'ccd_eff':0.99,
'through_put':0.05,
'epsilon': args.epsilon,
'w':args.w,
'gamma':args.gamma}
tellurics_model = get_tellurics(loc,wave_min,wave_max,args)
exp_times = np.ones(args.epoches)*args.exp_time * u.minute
delta_x = simulacra.detector.spacing_from_res(4*det_dict['resolution'])
x_grid = np.arange(np.log(wave_min.to(u.Angstrom).value),np.log(wave_max.to(u.Angstrom).value),delta_x)
wave_grid = np.exp(x_grid) * u.Angstrom
detector = simulacra.detector.Detector(stellar_model,loc=loc,wave_grid=wave_grid,**det_dict)
data = run_simulation(detector,[tellurics_model],exp_times,args.epoches,args.window)
filename = os.path.join(args.output,'apogee_e{}_a{}_p{}'.format(args.epoches,stellar_model.amplitude.to(u.m/u.s).value,stellar_model.period.to(u.day).value))
print(filename)
data.to_h5(filename + '.h5')
def keckhires(self):
"""Generate Keck HIRES data with a simple command."""
# parser = argparse.ArgumentParser(sys.argv)
parser = get_parser()
parser = add_tellurics_args(parser)
args = parser.parse_args(sys.argv[2:])
obs = 'Keck Observatory'
loc = coord.EarthLocation.of_site(obs)
stellar_model = get_star(loc,args)
wave_min = 500*u.nm
wave_max = 630*u.nm
# Detector physical parameters
################################
det_dict = {'resolution':100_000.0,
'area': np.pi * (10*u.m/2)**2,
'dark_current': 100/u.s,
'read_noise': 100,
'ccd_eff':0.99,
'through_put':0.05,
'epsilon': args.epsilon,
'w':args.w,
'gamma':args.gamma}
tellurics_model = get_tellurics(loc,wave_min,wave_max,args)
gascell_model = simulacra.gascell.GasCellModel(filename='data/gascell/keck_fts_inUse.idl')
exp_times = np.ones(args.epoches)*args.exp_time * u.minute
delta_x = simulacra.detector.spacing_from_res(4*det_dict['resolution'])
x_grid = np.arange(np.log(wave_min.to(u.Angstrom).value),np.log(wave_max.to(u.Angstrom).value),delta_x)
wave_grid = np.exp(x_grid) * u.Angstrom
detector = simulacra.detector.Detector(stellar_model,loc=loc,wave_grid=wave_grid,**det_dict)
data = run_simulation(detector,[tellurics_model,gascell_model],exp_times,args.epoches,args.window)
filename = os.path.join(args.output,'keck_e{}_a{}_p{}'.format(args.epoches,stellar_model.amplitude.to(u.m/u.s).value,stellar_model.period.to(u.day).value))
print(filename)
data.to_h5(filename + '.h5')
def expres(self):
"""Generate EXPRES data with a simple command."""
parser = get_parser()
parser = add_tellurics_args(parser)
args = parser.parse_args(sys.argv[2:])
obs = 'Lowell Observatory'
loc = coord.EarthLocation.of_site(obs)
stellar_model = get_star(loc,args)
wave_min = 700*u.nm
wave_max = 950*u.nm
# Detector physical parameters
################################
det_dict = {'resolution':130_000.0,
'area': np.pi * (4.3*u.m/2)**2,
'dark_current': 100/u.s,
'read_noise': 100,
'ccd_eff':0.99,
'through_put':0.05,
'epsilon': args.epsilon,
'w':args.w,
'gamma':args.gamma}
tellurics_model = get_tellurics(loc,wave_min,wave_max,args)
exp_times = np.ones(args.epoches)*args.exp_time * u.minute
delta_x = simulacra.detector.spacing_from_res(4*det_dict['resolution'])
x_grid = np.arange(np.log(wave_min.to(u.Angstrom).value),np.log(wave_max.to(u.Angstrom).value),delta_x)
wave_grid = np.exp(x_grid) * u.Angstrom
detector = simulacra.detector.Detector(stellar_model,loc=loc,wave_grid=wave_grid,**det_dict)
data = run_simulation(detector,[tellurics_model],exp_times,args.epoches,args.window)
filename = os.path.join(args.output,'expres_e{}_a{}_p{}'.format(args.epoches,stellar_model.amplitude.to(u.m/u.s).value,stellar_model.period.to(u.day).value))
print(filename)
data.to_h5(filename + '.h5')
# Auto-generate the usage block:
cmds = []
maxlen = max([len(name) for name in CLI.__dict__.keys()])
for name, attr in CLI.__dict__.items():
if not name.startswith('_'):
cmds.append(f' {name.ljust(maxlen)} {attr.__doc__}\n')
CLI._usage = f"""
simulacra <command> [<args>]
Available commands:
{''.join(cmds)}
See more usage information about a given command by running:
simulacra <command> --help
"""
# keck hires, gaia, apogee, expres
def main():
CLI()
| 41.127341 | 166 | 0.656315 |
ace36ab19d84fb31f6ac9113f63a561c9dee7d4b | 9,720 | py | Python | tests/test_fields.py | kpchand/firepack | 082b2477024c928e1999691b17eb3c07f015d79f | [
"MIT"
] | 1 | 2021-06-14T10:18:57.000Z | 2021-06-14T10:18:57.000Z | tests/test_fields.py | kpchand/firepack | 082b2477024c928e1999691b17eb3c07f015d79f | [
"MIT"
] | null | null | null | tests/test_fields.py | kpchand/firepack | 082b2477024c928e1999691b17eb3c07f015d79f | [
"MIT"
] | null | null | null | import pytest
from firepack.validators import *
from firepack.fields import *
from firepack.errors import ValidationError
from firepack.data import FireData
FIELD_NAME = 'a'
def init_field_holder(field_instance):
class FieldHolder:
a = field_instance
return FieldHolder()
class Data(FireData):
a = IntField(required=True)
b = IntField(required=True)
@pytest.mark.parametrize('field, value', [
(BoolField(), 1),
(CharField(), 1),
(StrField(), 1),
(NumericField(), 'a'),
(IntField(), 1.5),
(FloatField(), 1),
(DateField(), datetime.now()),
(DateTimeField(), datetime.now().date()),
(DictField(), []),
(EmailField(), 'aaa.com'),
(FireDataField(Data), 'a'),
(ListField(StrField()), {})
])
def test_default_validation_raises_error(field, value):
# Given: required is True
fh = init_field_holder(field)
# When: init with invalid value type
setattr(fh, FIELD_NAME, value)
# Then: raise error
with pytest.raises(ValidationError):
field._run_validation(value)
@pytest.mark.parametrize('field', [
BoolField(required=True),
CharField(required=True),
StrField(required=True),
NumericField(required=True),
IntField(required=True),
FloatField(required=True),
DateField(required=True),
DateTimeField(required=True),
DictField(required=True),
EmailField(required=True),
FireDataField(Data, required=True),
ListField(StrField(required=True), required=True)
])
def test_required_field_empty_raises_error(field):
# Given: required is True
fh = init_field_holder(field)
# When: value is None
setattr(fh, FIELD_NAME, None)
# Then: raise error
with pytest.raises(ValidationError):
field._run_validation(None)
@pytest.mark.parametrize('field, value', [
(BoolField(), True),
(CharField(), 'a'),
(StrField(), 'aaa'),
(NumericField(), 1),
(NumericField(), 1.0),
(IntField(), 1),
(FloatField(), 1.5),
(DateField(), datetime.now().date()),
(DateTimeField(), datetime.now()),
(DictField(), {}),
(EmailField(), 'aaa@aaa.com'),
(ListField(StrField()), ['aaa'])
])
def test_returns_valid_value(field, value):
# Given: required is True
fh = init_field_holder(field)
# When: init with valid value
setattr(fh, FIELD_NAME, value)
# Then: return value
assert getattr(fh, FIELD_NAME) == value
@pytest.mark.parametrize('field', [
BoolField(default=True),
CharField(default='a'),
StrField(default='aaa'),
NumericField(default=1.0),
IntField(default=1),
FloatField(default=1.5),
DateField(default=datetime.now().date()),
DateTimeField(default=datetime.now()),
DictField(default={}),
EmailField(default='aaa@aaa.com'),
ListField(StrField(), default=['aaa'])
])
def test_returns_default_value(field):
# Given: required is True and default is provided
fh = init_field_holder(field)
# When: value is None
setattr(fh, FIELD_NAME, None)
# Then: return default value
assert getattr(fh, FIELD_NAME) == field.options['default']
@pytest.mark.parametrize('field', [
BoolField(required=False),
CharField(required=False),
StrField(required=False),
NumericField(required=False),
IntField(required=False),
FloatField(required=False),
DateField(required=False),
DateTimeField(required=False),
DictField(required=False),
EmailField(required=False),
ListField(StrField(required=False), required=False)
])
def test_returns_none_when_value_not_required(field):
# Given: required is False
fh = init_field_holder(field)
# When: value is None
setattr(fh, FIELD_NAME, None)
# Then: return None
assert getattr(fh, FIELD_NAME) is None
def always_error_validator(name, value):
raise ValidationError(name, '')
@pytest.mark.parametrize('field, value', [
(BoolField(validators=[always_error_validator]), True),
(CharField(validators=[always_error_validator]), 'a'),
(StrField(validators=[always_error_validator]), 'aa'),
(NumericField(validators=[always_error_validator]), 1),
(IntField(validators=[always_error_validator]), 1),
(FloatField(validators=[always_error_validator]), 1.0),
(DateField(validators=[always_error_validator]), datetime.now().date()),
(DateTimeField(validators=[always_error_validator]), datetime.now()),
(DictField(validators=[always_error_validator]), {}),
(EmailField(validators=[always_error_validator]), 'test@example.com'),
(ListField(StrField(), validators=[always_error_validator]), ['a', 'b', 'c'])
])
def test_custom_validation_raises_error(field, value):
# Given: field with custom validator
fh = init_field_holder(field)
# When: using a valid value
setattr(fh, FIELD_NAME, value)
# Then: raise error because of custom validator
with pytest.raises(ValidationError):
field._run_validation(value)
@pytest.mark.parametrize('field, value', [
(NumericField(validators=[interval(min_value=0)]), -1),
(IntField(validators=[interval(min_value=0)]), -1),
(FloatField(validators=[interval(min_value=2.0)]), 1.0)
])
def test_min_value_violation_raises_error(field, value):
# Given: min option
fh = init_field_holder(field)
# When: value less than min
setattr(fh, FIELD_NAME, value)
# Then: raise error
with pytest.raises(ValidationError):
field._run_validation(value)
@pytest.mark.parametrize('field, value', [
(NumericField(validators=[interval(max_value=1)]), 2),
(IntField(validators=[interval(max_value=1)]), 2),
(FloatField(validators=[interval(max_value=1.0)]), 2.0)
])
def test_max_value_violation_raises_error(field, value):
# Given: max option
fh = init_field_holder(field)
# When: value greater than max
setattr(fh, FIELD_NAME, value)
# Then: raise error
with pytest.raises(ValidationError):
field._run_validation(value)
@pytest.mark.parametrize('field, value', [
(StrField(validators=[length(min_length=2)]), 'a'),
(ListField(IntField(), validators=[length(min_length=2)]), [1])
])
def test_min_length_violation_raises_error(field, value):
# Given: min_length option
fh = init_field_holder(field)
# When: value length less than min_length
setattr(fh, FIELD_NAME, value)
# Then: raise error
with pytest.raises(ValidationError):
field._run_validation(value)
@pytest.mark.parametrize('field, value', [
(StrField(validators=[length(max_length=2)]), 'aaa'),
(ListField(IntField(), validators=[length(max_length=2)]), [1, 2, 3])
])
def test_max_length_violation_raises_error(field, value):
# Given: max_length option
fh = init_field_holder(field)
# When: value length greater than max_length
setattr(fh, FIELD_NAME, value)
# Then: raise error
with pytest.raises(ValidationError):
field._run_validation(value)
@pytest.mark.parametrize('field, value', [
(ListField(ListField(IntField())), [[1, 2], [3, 4]]),
(ListField(ListField(ListField(IntField()))), [[[1, 2], [3, 4]], [[[5, 6], [7, 8]]]])
])
def test_nested_list(field, value):
# Given: field with nested list field
fh = init_field_holder(field)
# When: initialize with nested list value
setattr(fh, FIELD_NAME, value)
# Then: return the value
assert getattr(fh, FIELD_NAME) == value
@pytest.mark.parametrize('field, value', [
(ListField(ListField(IntField(max_value=0))), [[1, 2], [3, 4]]),
# extra nested list value
(ListField(ListField(IntField())), [[[1, 2], [3, 4]], [[[5, 6], [7, 8]]]]),
(ListField(ListField(ListField(IntField(), min_length=3))), [[[1, 2], [3, 4]], [[[5, 6], [7, 8]]]]),
(ListField(ListField(StrField(min_length=3))), [['aaaa', 'bbb'], ['c', 'd']])
])
def test_nested_list_violation_raises_error(field, value):
# Given: field with nested list field
fh = init_field_holder(field)
# When: initialize with nested list value violating validation
setattr(fh, FIELD_NAME, value)
# Then: raise error
with pytest.raises(ValidationError):
field._run_validation(value)
def test_firedatafield_returns_valid_value():
# Given: a firedatafield
field = FireDataField(Data)
fh = init_field_holder(field)
# When: init with value
d = Data()
d.a = 100
setattr(fh, FIELD_NAME, d)
# Then: return that firedatafield instance
assert getattr(fh, FIELD_NAME) == d
def test_firedatafield_returns_default_value():
# Given: a firedatafield with default value
d = Data()
d.a = 100
field = FireDataField(Data, default=d)
fh = init_field_holder(field)
# When: init with no value
setattr(fh, FIELD_NAME, None)
# Then: return default firedata instance
assert getattr(fh, FIELD_NAME) == d
def test_firedatafield_inside_listfield_returns_valid_value():
# Given: firedatafield inside listfield
field = ListField(FireDataField(Data))
fh = init_field_holder(field)
# When: init
d = Data()
d.a = 100
value = [d]
setattr(fh, FIELD_NAME, value)
# Then: return list containing firedatafield instance
ret = getattr(fh, FIELD_NAME)
assert ret == value
assert ret[0] == d
def test_firedatafield_inside_listfield_returns_default_value():
# Given: listfield with default value
d = Data()
d.a = 100
field = ListField(FireDataField(Data, required=True), default=[d])
fh = init_field_holder(field)
# When: init
setattr(fh, FIELD_NAME, None)
# Then: return list containing firedatafield
ret = getattr(fh, FIELD_NAME)
assert getattr(fh, FIELD_NAME) == [d]
assert ret[0] == d
| 28.928571 | 104 | 0.67644 |
ace36b31ac690cdc61c761b02fb6937591ccfc97 | 4,882 | py | Python | pyannote/audio/applications/base_labeling.py | lgalmant/pyannote-audio | d58a9d2e18fb2fddaab99dbc6f93fdbdcfc5f290 | [
"MIT"
] | null | null | null | pyannote/audio/applications/base_labeling.py | lgalmant/pyannote-audio | d58a9d2e18fb2fddaab99dbc6f93fdbdcfc5f290 | [
"MIT"
] | null | null | null | pyannote/audio/applications/base_labeling.py | lgalmant/pyannote-audio | d58a9d2e18fb2fddaab99dbc6f93fdbdcfc5f290 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# encoding: utf-8
# The MIT License (MIT)
# Copyright (c) 2019 CNRS
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# AUTHORS
# Hervé BREDIN - http://herve.niderb.fr
from tqdm import tqdm
from .base import Application
from pyannote.database import FileFinder
from pyannote.database import get_protocol
from pyannote.audio.features import Precomputed
from pyannote.audio.features import RawAudio
from pyannote.audio.labeling.extraction import SequenceLabeling
from pyannote.core.utils.helper import get_class_by_name
from functools import partial
import multiprocessing as mp
class BaseLabeling(Application):
def __init__(self, experiment_dir, db_yml=None, training=False):
super(BaseLabeling, self).__init__(
experiment_dir, db_yml=db_yml, training=training)
# task
Task = get_class_by_name(
self.config_['task']['name'],
default_module_name='pyannote.audio.labeling.tasks')
self.task_ = Task(
**self.config_['task'].get('params', {}))
# architecture
Architecture = get_class_by_name(
self.config_['architecture']['name'],
default_module_name='pyannote.audio.labeling.models')
params = self.config_['architecture'].get('params', {})
self.get_model_ = partial(Architecture, **params)
if hasattr(Architecture, 'get_frame_info'):
self.frame_info_ = Architecture.get_frame_info(**params)
else:
self.frame_info_ = None
if hasattr(Architecture, 'frame_crop'):
self.frame_crop_ = Architecture.frame_crop
else:
self.frame_crop_ = None
def validate_init(self, protocol_name, subset='development'):
protocol = get_protocol(protocol_name, progress=False,
preprocessors=self.preprocessors_)
files = getattr(protocol, subset)()
n_jobs = getattr(self, 'n_jobs', 1)
self.pool_ = mp.Pool(n_jobs)
if isinstance(self.feature_extraction_, (Precomputed, RawAudio)):
return list(files)
validation_data = []
for current_file in tqdm(files, desc='Feature extraction'):
current_file['features'] = self.feature_extraction_(current_file)
validation_data.append(current_file)
return validation_data
def apply(self, protocol_name, output_dir, step=None, subset=None):
model = self.model_.to(self.device)
model.eval()
duration = self.task_.duration
if step is None:
step = 0.25 * duration
# do not use memmap as this would lead to too many open files
if isinstance(self.feature_extraction_, Precomputed):
self.feature_extraction_.use_memmap = False
# initialize embedding extraction
sequence_labeling = SequenceLabeling(
model=model, feature_extraction=self.feature_extraction_,
duration=duration, step=step, batch_size=self.batch_size,
device=self.device)
sliding_window = sequence_labeling.sliding_window
# create metadata file at root that contains
# sliding window and dimension information
precomputed = Precomputed(
root_dir=output_dir,
sliding_window=sliding_window,
labels=model.classes)
# file generator
protocol = get_protocol(protocol_name, progress=True,
preprocessors=self.preprocessors_)
if subset is None:
files = FileFinder.protocol_file_iter(protocol,
extra_keys=['audio'])
else:
files = getattr(protocol, subset)()
for current_file in files:
fX = sequence_labeling(current_file)
precomputed.dump(current_file, fX)
| 36.706767 | 79 | 0.676567 |
ace36f00e00b03752106b3d015022c5c2216675a | 16,984 | py | Python | flexget/utils/tools.py | ksurl/flexget | a54dc25780b62f80d2e638278277a945428ffd05 | [
"MIT"
] | null | null | null | flexget/utils/tools.py | ksurl/flexget | a54dc25780b62f80d2e638278277a945428ffd05 | [
"MIT"
] | 13 | 2022-03-28T03:25:30.000Z | 2022-03-28T10:25:44.000Z | flexget/utils/tools.py | aidan-/Flexget | 5622436a412918ef204c51e9f984cd9fe784ea7c | [
"MIT"
] | null | null | null | """Contains miscellaneous helpers"""
import ast
import copy
import hashlib
import locale
import operator
import os
import queue
import re
import sys
import weakref
from collections import OrderedDict, defaultdict
from collections.abc import MutableMapping
from datetime import datetime, timedelta
from html.entities import name2codepoint
from pprint import pformat
from typing import (
TYPE_CHECKING,
Any,
Dict,
Iterable,
Iterator,
List,
NamedTuple,
Optional,
Pattern,
Sequence,
Tuple,
Union,
)
import psutil
import requests
from loguru import logger
import flexget
if TYPE_CHECKING:
from flexget.entry import Entry
from flexget.task import Task
logger = logger.bind(name='utils')
def str_to_boolean(string: str) -> bool:
return string.lower() in ['true', '1', 't', 'y', 'yes']
def str_to_int(string: str) -> Optional[int]:
try:
return int(string.replace(',', ''))
except ValueError:
return None
def convert_bytes(bytes_num: Union[int, float]) -> str:
"""Returns given bytes as prettified string."""
bytes_num = float(bytes_num)
units_prefixes = OrderedDict(
{
'T': 1099511627776, # 1024 ** 4
'G': 1073741824, # 1024 ** 3
'M': 1048576, # 1024 ** 2
'K': 1024,
}
)
for unit, threshold in units_prefixes.items():
if bytes_num > threshold:
return f'{bytes_num/threshold:.2f}{unit}'
return f'{bytes_num:.2f}b'
class MergeException(Exception):
def __init__(self, value: str):
self.value = value
def __str__(self) -> str:
return repr(self.value)
def strip_html(text: str) -> str:
"""Tries to strip all HTML tags from *text*. If unsuccessful returns original text."""
from bs4 import BeautifulSoup
try:
text = ' '.join(BeautifulSoup(text).find_all(text=True))
return ' '.join(text.split())
except Exception:
return text
# This pattern matches a character entity reference (a decimal numeric
# references, a hexadecimal numeric reference, or a named reference).
charrefpat = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?')
def _htmldecode(text: str) -> str:
"""Decode HTML entities in the given text."""
# From screpe.py - licensed under apache 2.0 .. should not be a problem for a MIT afaik
if isinstance(text, str):
uchr = chr
else:
def uchr(value):
value > 127 and chr(value) or chr(value)
def entitydecode(match, uchr=uchr):
entity = match.group(1)
if entity.startswith('#x'):
return uchr(int(entity[2:], 16))
elif entity.startswith('#'):
return uchr(int(entity[1:]))
elif entity in name2codepoint:
return uchr(name2codepoint[entity])
else:
return match.group(0)
return charrefpat.sub(entitydecode, text)
def decode_html(value: str) -> str:
"""
:param string value: String to be html-decoded
:returns: Html decoded string
"""
return _htmldecode(value)
def encode_html(unicode_data: str, encoding: str = 'ascii') -> bytes:
"""
Encode unicode_data for use as XML or HTML, with characters outside
of the encoding converted to XML numeric character references.
"""
return unicode_data.encode(encoding, 'xmlcharrefreplace')
def merge_dict_from_to(d1: dict, d2: dict) -> None:
"""Merges dictionary d1 into dictionary d2. d1 will remain in original form."""
for k, v in d1.items():
if k in d2:
if isinstance(v, type(d2[k])):
if isinstance(v, dict):
merge_dict_from_to(d1[k], d2[k])
elif isinstance(v, list):
d2[k].extend(copy.deepcopy(v))
elif isinstance(v, (str, bool, int, float, type(None))):
pass
else:
raise Exception(f'Unknown type: {type(v)} value: {repr(v)} in dictionary')
elif isinstance(v, (str, bool, int, float, list, type(None))) and isinstance(
d2[k], (str, bool, int, float, list, type(None))
):
# Allow overriding of non-container types with other non-container types
pass
else:
raise MergeException(
'Merging key %s failed, conflicting datatypes %r vs. %r.'
% (k, type(v).__name__, type(d2[k]).__name__)
)
else:
d2[k] = copy.deepcopy(v)
class ReList(list):
"""
A list that stores regexps.
You can add compiled or uncompiled regexps to the list.
It will always return the compiled version.
It will compile the text regexps on demand when first accessed.
"""
# Set the default flags
flags = re.IGNORECASE
def __init__(self, *args, **kwargs) -> None:
"""Optional :flags: keyword argument with regexp flags to compile with"""
if 'flags' in kwargs:
self.flags = kwargs.pop('flags')
list.__init__(self, *args, **kwargs)
def __getitem__(self, k) -> Pattern: # type: ignore
# Doesn't support slices. Do we care?
item = list.__getitem__(self, k)
if isinstance(item, str):
item = re.compile(item, self.flags)
self[k] = item
return item
def __iter__(self) -> Iterator[Pattern]:
for i in range(len(self)):
yield self[i]
# Determine the encoding for io
io_encoding = ''
if hasattr(sys.stdout, 'encoding'):
io_encoding = sys.stdout.encoding
if not io_encoding:
try:
io_encoding = locale.getpreferredencoding()
except Exception:
pass
if not io_encoding:
# Default to utf8 if nothing can be determined
io_encoding = 'utf8'
else:
# Normalize the encoding
io_encoding = io_encoding.lower()
if io_encoding == 'cp65001':
io_encoding = 'utf8'
elif io_encoding in ['us-ascii', '646', 'ansi_x3.4-1968']:
io_encoding = 'ascii'
def parse_timedelta(value: Union[timedelta, str, None]) -> timedelta:
"""Parse a string like '5 days' into a timedelta object. Also allows timedeltas to pass through."""
if isinstance(value, timedelta):
# Allow timedelta objects to pass through
return value
if not value:
# If no time is given, default to 0
return timedelta()
amount, unit = value.lower().split(' ')
# Make sure unit name is plural.
if not unit.endswith('s'):
unit += 's'
params = {unit: float(amount)}
try:
return timedelta(**params) # type: ignore
except TypeError:
raise ValueError(f"Invalid time format '{value}'")
def multiply_timedelta(interval: timedelta, number: Union[int, float]) -> timedelta:
"""`timedelta`s can not normally be multiplied by floating points. This does that."""
return timedelta(seconds=interval.total_seconds() * number)
def pid_exists(pid: int):
try:
return psutil.Process(pid).status() != psutil.STATUS_STOPPED
except psutil.NoSuchProcess:
return False
_binOps = {
ast.Add: operator.add,
ast.Sub: operator.sub,
ast.Mult: operator.mul,
ast.Div: operator.truediv,
ast.Mod: operator.mod,
}
class TimedDict(MutableMapping):
"""Acts like a normal dict, but keys will only remain in the dictionary for a specified time span."""
_instances: Dict[int, 'TimedDict'] = weakref.WeakValueDictionary()
def __init__(self, cache_time: Union[timedelta, str] = '5 minutes'):
self.cache_time = parse_timedelta(cache_time)
self._store: dict = {}
self._last_prune = datetime.now()
self._instances[id(self)] = self
def _prune(self):
"""Prune all expired keys."""
for key, (add_time, _) in list(self._store.items()):
if add_time < datetime.now() - self.cache_time:
del self._store[key]
self._last_prune = datetime.now()
def __getitem__(self, key):
add_time, value = self._store[key]
# Prune data and raise KeyError if expired
if add_time < datetime.now() - self.cache_time:
del self._store[key]
raise KeyError(key, 'cache time expired')
return value
def __setitem__(self, key, value):
# Make sure we clear periodically, even if old keys aren't accessed again
if self._last_prune < datetime.now() - (2 * self.cache_time):
self._prune()
self._store[key] = (datetime.now(), value)
def __delitem__(self, key):
del self._store[key]
def __iter__(self):
# Uses our getitem to skip expired items
return (key for key in list(self._store.keys()) if key in self)
def __len__(self):
return len(list(self.__iter__()))
def __repr__(self):
return '%s(%r)' % (
self.__class__.__name__,
dict(list(zip(self._store, (v[1] for v in list(self._store.values()))))),
)
@classmethod
def clear_all(cls):
"""
Clears all instantiated TimedDicts.
Used by tests to make sure artifacts don't leak between tests.
"""
for store in cls._instances.values():
store.clear()
class BufferQueue(queue.Queue):
"""Used in place of a file-like object to capture text and access it safely from another thread."""
# Allow access to the Empty error from here
Empty = queue.Empty
def write(self, line):
self.put(line)
class TitleYear(NamedTuple):
title: str
year: Optional[int]
def split_title_year(title: str) -> TitleYear:
"""Splits title containing a year into a title, year pair."""
if not title:
return TitleYear('', None)
if not re.search(r'\d{4}', title):
return TitleYear(title, None)
# We only recognize years from the 2nd and 3rd millennium, FlexGetters from the year 3000 be damned!
match = re.search(r'(.*?)\(?([12]\d{3})?\)?$', title)
if not match:
return TitleYear(title, None)
title = match.group(1).strip()
year_match = match.group(2)
if year_match and not title:
# title looks like a year, '2020' for example
title = year_match
year = None
elif title and not year_match:
year = None
else:
year = int(year_match)
return TitleYear(title, year)
def get_latest_flexget_version_number() -> Optional[str]:
"""
Return latest Flexget version from https://pypi.python.org/pypi/FlexGet/json
"""
try:
data = requests.get('https://pypi.python.org/pypi/FlexGet/json').json()
return data.get('info', {}).get('version')
except requests.RequestException:
return None
def get_current_flexget_version() -> str:
return flexget.__version__
def parse_filesize(text_size: str, si: bool = True) -> float:
"""
Parses a data size and returns its value in mebibytes
:param string text_size: string containing the data size to parse i.e. "5 GB"
:param bool si: If True, possibly ambiguous units like KB, MB, GB will be assumed to be base 10 units,
rather than the default base 2. i.e. if si then 50 GB = 47684 else 50GB = 51200
:returns: an float with the data size in mebibytes
"""
prefix_order = {'': 0, 'k': 1, 'm': 2, 'g': 3, 't': 4, 'p': 5}
parsed_size = re.match(
r'(\d+(?:[.,\s]\d+)*)(?:\s*)((?:[ptgmk]i?)?b)', text_size.strip().lower(), flags=re.UNICODE
)
if not parsed_size:
raise ValueError('%s does not look like a file size' % text_size)
amount_str = parsed_size.group(1)
unit = parsed_size.group(2)
if not unit.endswith('b'):
raise ValueError('%s does not look like a file size' % text_size)
unit = unit.rstrip('b')
if unit.endswith('i'):
si = False
unit = unit.rstrip('i')
if unit not in prefix_order:
raise ValueError('%s does not look like a file size' % text_size)
order = prefix_order[unit]
amount = float(amount_str.replace(',', '').replace(' ', ''))
base = 1000 if si else 1024
return (amount * (base**order)) / 1024**2
def get_config_hash(config: Any) -> str:
"""
:param dict config: Configuration
:return: MD5 hash for *config*
"""
if isinstance(config, dict) or isinstance(config, list):
# this does in fact support nested dicts, they're sorted too!
return hashlib.md5(pformat(config).encode('utf-8')).hexdigest()
else:
return hashlib.md5(str(config).encode('utf-8')).hexdigest()
def get_config_as_array(config: dict, key: str) -> list:
"""
Return configuration key as array, even if given as a single string
:param dict config: Configuration
:param string key: Configuration
:return: Array
"""
v = config.get(key, [])
if not isinstance(v, list):
return [v]
return v
def parse_episode_identifier(
ep_id: Union[str, int], identify_season: bool = False
) -> Tuple[str, str]:
"""
Parses series episode identifier, raises ValueError if it fails
:param ep_id: Value to parse
:return: Return identifier type: `sequence`, `ep` or `date`
:raises ValueError: If ep_id does not match any valid types
"""
error = None
identified_by = ''
entity_type = 'episode'
if isinstance(ep_id, int):
if ep_id <= 0:
error = 'sequence type episode must be higher than 0'
identified_by = 'sequence'
elif re.match(r'(?i)^S\d{1,4}E\d{1,3}$', ep_id):
identified_by = 'ep'
elif re.match(r'(?i)^S\d{1,4}$', ep_id) and identify_season:
identified_by = 'ep'
entity_type = 'season'
elif re.match(r'\d{4}-\d{2}-\d{2}', ep_id):
identified_by = 'date'
else:
# Check if a sequence identifier was passed as a string
try:
ep_id = int(ep_id)
if ep_id <= 0:
error = 'sequence type episode must be higher than 0'
identified_by = 'sequence'
except ValueError:
error = f'`{ep_id}` is not a valid episode identifier.'
if error:
raise ValueError(error)
return identified_by, entity_type
def group_entries(entries: Iterable['Entry'], identifier: str) -> Dict[str, List['Entry']]:
from flexget.utils.template import RenderError
grouped_entries = defaultdict(list)
# Group by Identifier
for entry in entries:
try:
rendered_id = entry.render(identifier)
except RenderError:
continue
if not rendered_id:
continue
grouped_entries[rendered_id.lower().strip()].append(entry)
return grouped_entries
def aggregate_inputs(task: 'Task', inputs: List[dict]) -> List['Entry']:
from flexget import plugin
entries = []
entry_titles = set()
entry_urls = set()
entry_locations = set()
for item in inputs:
for input_name, input_config in item.items():
input = plugin.get_plugin_by_name(input_name)
method = input.phase_handlers['input']
try:
result = method(task, input_config)
except plugin.PluginError as e:
logger.warning('Error during input plugin {}: {}', input_name, e)
continue
if not result:
logger.warning('Input {} did not return anything', input_name)
continue
for entry in result:
urls = ([entry['url']] if entry.get('url') else []) + entry.get('urls', [])
if any(url in entry_urls for url in urls):
logger.debug('URL for `{}` already in entry list, skipping.', entry['title'])
continue
if entry['title'] in entry_titles:
logger.debug(
'Ignored duplicate title `{}`', entry['title']
) # TODO: should combine?
continue
if entry.get('location') and entry['location'] in entry_locations:
logger.debug(
'Ignored duplicate location `{}`', entry['location']
) # TODO: should combine?
continue
entries.append(entry)
entry_titles.add(entry['title'])
entry_urls.update(urls)
if entry.get('location'):
entry_locations.add(entry['location'])
return entries
# Mainly used due to Too Many Variables error if we use too many variables at a time in the in_ clause.
# SQLite supports up to 999 by default. Ubuntu, Arch and macOS set this limit to 250,000 though, so it's a rare issue.
def chunked(seq: Sequence, limit: int = 900) -> Iterator[Sequence]:
"""Helper to divide our expired lists into sizes sqlite can handle in a query. (<1000)"""
for i in range(0, len(seq), limit):
yield seq[i : i + limit]
| 31.510204 | 118 | 0.608337 |
ace36f4d7af7c3578312ffcb92ed0ac227513975 | 2,847 | py | Python | ecommerce/urls.py | niketanbothe01/Devops | a9524aa89e64da9eed0cd21fec44582b9d7163d5 | [
"MIT"
] | null | null | null | ecommerce/urls.py | niketanbothe01/Devops | a9524aa89e64da9eed0cd21fec44582b9d7163d5 | [
"MIT"
] | null | null | null | ecommerce/urls.py | niketanbothe01/Devops | a9524aa89e64da9eed0cd21fec44582b9d7163d5 | [
"MIT"
] | null | null | null | """
Developed By : sumit kumar
facebook : fb.com/sumit.luv
Youtube :youtube.com/lazycoders
"""
from django.contrib import admin
from django.urls import path
from ecom import views
from django.contrib.auth.views import LoginView,LogoutView
urlpatterns = [
path('admin/', admin.site.urls),
path('',views.home_view,name=''),
path('afterlogin', views.afterlogin_view,name='afterlogin'),
path('logout', LogoutView.as_view(template_name='ecom/logout.html'),name='logout'),
path('aboutus', views.aboutus_view),
path('contactus', views.contactus_view,name='contactus'),
path('search', views.search_view,name='search'),
path('send-feedback', views.send_feedback_view,name='send-feedback'),
path('view-feedback', views.view_feedback_view,name='view-feedback'),
path('adminclick', views.adminclick_view),
path('adminlogin', LoginView.as_view(template_name='ecom/adminlogin.html'),name='adminlogin'),
path('admin-dashboard', views.admin_dashboard_view,name='admin-dashboard'),
path('view-customer', views.view_customer_view,name='view-customer'),
path('delete-customer/<int:pk>', views.delete_customer_view,name='delete-customer'),
path('update-customer/<int:pk>', views.update_customer_view,name='update-customer'),
path('admin-products', views.admin_products_view,name='admin-products'),
path('admin-add-product', views.admin_add_product_view,name='admin-add-product'),
path('delete-product/<int:pk>', views.delete_product_view,name='delete-product'),
path('update-product/<int:pk>', views.update_product_view,name='update-product'),
path('admin-view-booking', views.admin_view_booking_view,name='admin-view-booking'),
path('delete-order/<int:pk>', views.delete_order_view,name='delete-order'),
path('update-order/<int:pk>', views.update_order_view,name='update-order'),
path('customersignup', views.customer_signup_view),
path('customerlogin', LoginView.as_view(template_name='ecom/customerlogin.html'),name='customerlogin'),
path('customer-home', views.customer_home_view,name='customer-home'),
path('my-order', views.my_order_view,name='my-order'),
path('my-profile', views.my_profile_view,name='my-profile'),
path('edit-profile', views.edit_profile_view,name='edit-profile'),
path('download-invoice/<int:orderID>/<int:productID>', views.download_invoice_view,name='download-invoice'),
path('add-to-cart/<int:pk>', views.add_to_cart_view,name='add-to-cart'),
path('cart', views.cart_view,name='cart'),
path('remove-from-cart/<int:pk>', views.remove_from_cart_view,name='remove-from-cart'),
path('customer-address', views.customer_address_view,name='customer-address'),
path('payment-success', views.payment_success_view,name='payment-success'),
]
| 48.254237 | 113 | 0.7183 |
ace3700bb210ecf51613e5fbfc6e5f6188a9dbb6 | 2,530 | py | Python | lib/systems/18-crown-6.py | pulsar-chem/BPModule | f8e64e04fdb01947708f098e833600c459c2ff0e | [
"BSD-3-Clause"
] | null | null | null | lib/systems/18-crown-6.py | pulsar-chem/BPModule | f8e64e04fdb01947708f098e833600c459c2ff0e | [
"BSD-3-Clause"
] | null | null | null | lib/systems/18-crown-6.py | pulsar-chem/BPModule | f8e64e04fdb01947708f098e833600c459c2ff0e | [
"BSD-3-Clause"
] | null | null | null | import pulsar as psr
def load_ref_system():
""" Returns 18-crown-6 as found in the IQMol fragment library.
All credit to https://github.com/nutjunkie/IQmol
"""
return psr.make_system("""
C 0.52133 2.98003 -0.69606
C 2.02024 2.69966 -0.72281
O 2.27141 1.31380 -0.56707
C 2.84145 1.03853 0.69606
C 3.34810 -0.39974 0.72281
O 2.27348 -1.31020 0.56707
C 2.32012 -1.94150 -0.69606
C 1.32786 -3.09941 -0.72281
O 0.00208 -2.62400 -0.56707
C -0.52133 -2.98003 0.69606
C -2.02024 -2.69967 0.72281
O -2.27141 -1.31379 0.56707
C -2.84145 -1.03853 -0.69606
C -3.34810 0.39974 -0.72281
O -2.27349 1.31020 -0.56707
C -2.32011 1.94150 0.69606
C -1.32786 3.09941 0.72281
O -0.00207 2.62399 0.56707
H 0.40333 4.07155 -0.89000
H 0.02825 2.38963 -1.49976
H 2.52185 3.34807 0.03529
H 2.42796 3.01062 -1.70808
H 2.08360 1.17035 1.49976
H 3.72773 1.68648 0.89000
H 3.82125 -0.59737 1.70808
H 4.16044 -0.50995 -0.03529
H 3.32440 -2.38506 -0.89000
H 2.05535 -1.21928 -1.49976
H 1.63859 -3.85802 0.03529
H 1.39329 -3.60798 -1.70808
H -0.02825 -2.38963 1.49976
H -0.40333 -4.07155 0.89000
H -2.42796 -3.01062 1.70808
H -2.52185 -3.34807 -0.03529
H -3.72773 -1.68648 -0.89000
H -2.08360 -1.17035 -1.49976
H -4.16044 0.50995 0.03529
H -3.82125 0.59737 -1.70808
H -2.05535 1.21928 1.49976
H -3.32440 2.38506 0.89000
H -1.39329 3.60798 1.70808
H -1.63859 3.85802 -0.03529
""")
| 50.6 | 66 | 0.371146 |
ace3701d19591d85de03651b08b2991ea5ef8615 | 811 | py | Python | monai/bundle/__main__.py | function2-llx/MONAI | 4cddaa830b61b88ec78e089bb5f21e05bb1a78f4 | [
"Apache-2.0"
] | null | null | null | monai/bundle/__main__.py | function2-llx/MONAI | 4cddaa830b61b88ec78e089bb5f21e05bb1a78f4 | [
"Apache-2.0"
] | null | null | null | monai/bundle/__main__.py | function2-llx/MONAI | 4cddaa830b61b88ec78e089bb5f21e05bb1a78f4 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from monai.bundle.scripts import ckpt_export, download, init_bundle, run, verify_metadata, verify_net_in_out
if __name__ == "__main__":
from monai.utils import optional_import
fire, _ = optional_import("fire")
fire.Fire()
| 40.55 | 108 | 0.763255 |
ace37308e4768f4d4b1b0147dfa58d608409c69b | 596 | py | Python | ros/build/waypoint_follower/catkin_generated/pkg.develspace.context.pc.py | YuchaoYin/Cap_P13 | 9156948c628038d0248082863103812a12afba30 | [
"MIT"
] | null | null | null | ros/build/waypoint_follower/catkin_generated/pkg.develspace.context.pc.py | YuchaoYin/Cap_P13 | 9156948c628038d0248082863103812a12afba30 | [
"MIT"
] | null | null | null | ros/build/waypoint_follower/catkin_generated/pkg.develspace.context.pc.py | YuchaoYin/Cap_P13 | 9156948c628038d0248082863103812a12afba30 | [
"MIT"
] | null | null | null | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/student/Documents/P13/ros/src/waypoint_follower/include".split(';') if "/home/student/Documents/P13/ros/src/waypoint_follower/include" != "" else []
PROJECT_CATKIN_DEPENDS = "roscpp;std_msgs;tf;geometry_msgs;styx_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-llibwaypoint_follower".split(';') if "-llibwaypoint_follower" != "" else []
PROJECT_NAME = "waypoint_follower"
PROJECT_SPACE_DIR = "/home/student/Documents/P13/ros/devel"
PROJECT_VERSION = "0.0.0"
| 66.222222 | 189 | 0.775168 |
ace37375ad55075b596e448be75e1906069fb347 | 3,861 | py | Python | melodic/lib/python2.7/dist-packages/qt_gui_py_common/simple_settings_dialog.py | Dieptranivsr/Ros_Diep | d790e75e6f5da916701b11a2fdf3e03b6a47086b | [
"MIT"
] | 2 | 2021-07-14T12:33:55.000Z | 2021-11-21T07:14:13.000Z | melodic/src/qt_gui_core/qt_gui_py_common/src/qt_gui_py_common/simple_settings_dialog.py | disorn-inc/ROS-melodic-python3-Opencv-4.1.1-CUDA | 3d265bb64712e3cd7dfa0ad56d78fcdebafdb4b0 | [
"BSD-3-Clause"
] | 1 | 2021-07-08T10:26:06.000Z | 2021-07-08T10:31:11.000Z | melodic/src/qt_gui_core/qt_gui_py_common/src/qt_gui_py_common/simple_settings_dialog.py | disorn-inc/ROS-melodic-python3-Opencv-4.1.1-CUDA | 3d265bb64712e3cd7dfa0ad56d78fcdebafdb4b0 | [
"BSD-3-Clause"
] | null | null | null | # Software License Agreement (BSD License)
#
# Copyright (c) 2012, Dorian Scholz
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import os
from python_qt_binding import loadUi
from python_qt_binding.QtCore import qWarning
from python_qt_binding.QtWidgets import QDialog, QLabel
from rospkg.rospack import RosPack
from .checkbox_group import CheckBoxGroup
from .exclusive_options_group import ExclusiveOptionGroup
class SimpleSettingsDialog(QDialog):
"""Simple dialog that can show multiple settings groups and returns their combined results."""
def __init__(self, title='Options', description=None):
super(SimpleSettingsDialog, self).__init__()
self.setObjectName('SimpleSettingsDialog')
rp = RosPack()
ui_file = os.path.join(
rp.get_path('qt_gui_py_common'), 'resource', 'simple_settings_dialog.ui')
loadUi(ui_file, self)
self.setWindowTitle(title)
self._settings_groups = []
if description is not None:
self.add_label(description)
def add_label(self, text):
self.group_area.layout().addWidget(QLabel(text))
def add_exclusive_option_group(self, *args, **kwargs):
"""Add an ExclusiveOptionGroup."""
self.add_settings_group(ExclusiveOptionGroup(*args, **kwargs))
def add_checkbox_group(self, *args, **kwargs):
"""Add a CheckBoxGroup."""
self.add_settings_group(CheckBoxGroup(*args, **kwargs))
def add_settings_group(self, settings_group):
"""Add a settings group, which is any widget with a get_settings method."""
if not hasattr(settings_group, 'get_settings'):
qWarning(
'add_settings_group(): this settings group has no get_settings method to collect the settings!')
self._settings_groups.append(settings_group)
self.group_area.layout().addWidget(settings_group)
def get_settings(self):
"""Return the combined settings from all settings groups as a list."""
if self.exec_() == QDialog.Accepted:
results = []
for settings_group in self._settings_groups:
if hasattr(settings_group, 'get_settings'):
results.append(settings_group.get_settings())
else:
results.append(None)
return results
return [None] * len(self._settings_groups)
| 41.967391 | 112 | 0.716654 |
ace374cefe56d9fa3ecb38f41ff71ba36ddbef25 | 5,542 | py | Python | sharpy/presharpy/presharpy.py | ACea15/sharpy | c89ecb74be3cb9e37b23ac8a282c73b9b55dd792 | [
"BSD-3-Clause"
] | 80 | 2018-08-30T13:01:52.000Z | 2022-03-24T15:02:48.000Z | sharpy/presharpy/presharpy.py | ACea15/sharpy | c89ecb74be3cb9e37b23ac8a282c73b9b55dd792 | [
"BSD-3-Clause"
] | 88 | 2018-05-17T16:18:58.000Z | 2022-03-11T21:05:48.000Z | sharpy/presharpy/presharpy.py | ACea15/sharpy | c89ecb74be3cb9e37b23ac8a282c73b9b55dd792 | [
"BSD-3-Clause"
] | 44 | 2018-01-02T14:27:28.000Z | 2022-03-12T13:49:36.000Z | import configparser
import configobj
import sharpy.utils.cout_utils as cout
from sharpy.utils.solver_interface import solver, dict_of_solvers
import sharpy.utils.settings as settings
import sharpy.utils.exceptions as exceptions
@solver
class PreSharpy(object):
"""
The PreSharpy solver is the main loader solver of SHARPy. It takes the admin-like settings for the simulation,
including the case name, case route and the list of solvers to run and in which order to run them. This order
of solvers is referred to, throughout SHARPy, as the ``flow`` setting.
This is a mandatory solver for all simulations at the start so it is never included in the ``flow`` setting.
The settings for this solver are parsed through in the configuration file under the header ``SHARPy``. I.e, when
you are defining the config file for a simulation, the settings for PreSharpy are included as:
.. code-block:: python
import configobj
filename = '<case_route>/<case_name>.sharpy'
config = configobj.ConfigObj()
config.filename = filename
config['SHARPy'] = {'case': '<your SHARPy case name>', # an example setting
# Rest of your settings for the PreSHARPy class
}
"""
solver_id = 'PreSharpy'
solver_classification = 'loader'
settings_types = dict()
settings_default = dict()
settings_description = dict()
settings_types['flow'] = 'list(str)'
settings_default['flow'] = None
settings_description['flow'] = "List of the desired solvers' ``solver_id`` to run in sequential order."
settings_types['case'] = 'str'
settings_default['case'] = 'default_case_name'
settings_description['case'] = 'Case name'
settings_types['route'] = 'str'
settings_default['route'] = None
settings_description['route'] = 'Route to case files'
settings_types['write_screen'] = 'bool'
settings_default['write_screen'] = True
settings_description['write_screen'] = 'Display output on terminal screen.'
settings_types['write_log'] = 'bool'
settings_default['write_log'] = False
settings_description['write_log'] = 'Write log file'
settings_types['log_folder'] = 'str'
settings_default['log_folder'] = ''
settings_description['log_folder'] = 'Log folder destination directory'
settings_types['save_settings'] = 'bool'
settings_default['save_settings'] = False
settings_description['save_settings'] = 'Save a copy of the settings to a ``.sharpy`` file in the output ' \
'directory specified in ``log_folder``.'
settings_table = settings.SettingsTable()
__doc__ += settings_table.generate(settings_types, settings_default, settings_description,
header_line='The following are the settings that the PreSharpy class takes:')
def __init__(self, in_settings=None):
self._settings = True
if in_settings is None:
# call for documentation only
self._settings = False
self.ts = 0
self.settings_types['log_file'] = 'str'
self.settings_default['log_file'] = 'log'
if self._settings:
self.settings = in_settings
self.settings['SHARPy']['flow'] = self.settings['SHARPy']['flow']
settings.to_custom_types(self.settings['SHARPy'], self.settings_types, self.settings_default)
cout.cout_wrap.initialise(self.settings['SHARPy']['write_screen'],
self.settings['SHARPy']['write_log'],
self.settings['SHARPy']['log_folder'],
self.settings['SHARPy']['log_file'])
self.case_route = in_settings['SHARPy']['route'] + '/'
self.case_name = in_settings['SHARPy']['case']
for solver_name in in_settings['SHARPy']['flow']:
try:
dict_of_solvers[solver_name]
except KeyError:
exceptions.NotImplementedSolver(solver_name)
if self.settings['SHARPy']['save_settings']:
self.save_settings()
def initialise(self):
pass
def update_settings(self, new_settings):
self.settings = new_settings
self.settings['SHARPy']['flow'] = self.settings['SHARPy']['flow']
settings.to_custom_types(self.settings['SHARPy'], self.settings_types, self.settings_default)
cout.cout_wrap.initialise(self.settings['SHARPy']['write_screen'],
self.settings['SHARPy']['write_log'],
self.settings['SHARPy']['log_folder'],
self.settings['SHARPy']['log_file'])
self.case_route = self.settings['SHARPy']['route'] + '/'
self.case_name = self.settings['SHARPy']['case']
def save_settings(self):
"""
Saves the settings to a ``.sharpy`` config obj file in the output directory.
"""
out_settings = configobj.ConfigObj()
for k, v in self.settings.items():
out_settings[k] = v
out_settings.filename = self.settings['SHARPy']['log_folder'] + '/' + self.settings['SHARPy']['case'] \
+ '.sharpy'
out_settings.write()
@staticmethod
def load_config_file(file_name):
config = configparser.ConfigParser()
config.read(file_name)
return config
| 40.15942 | 116 | 0.623241 |
ace37586c9e2c69517ba944e0a0ed1c2fc1e388f | 1,087 | py | Python | trakt/interfaces/sync/__init__.py | omaralvarez/trakt.py | 93a6beb73cdd37ffb354d2e9c1892dc39d9c4baf | [
"MIT"
] | 11 | 2015-02-01T22:22:48.000Z | 2019-01-24T12:18:07.000Z | trakt/interfaces/sync/__init__.py | omaralvarez/trakt.py | 93a6beb73cdd37ffb354d2e9c1892dc39d9c4baf | [
"MIT"
] | 3 | 2015-03-26T12:18:02.000Z | 2019-02-21T08:12:04.000Z | trakt/interfaces/sync/__init__.py | omaralvarez/trakt.py | 93a6beb73cdd37ffb354d2e9c1892dc39d9c4baf | [
"MIT"
] | 2 | 2016-07-19T22:55:16.000Z | 2019-01-24T12:19:08.000Z | from trakt.core.helpers import deprecated
from trakt.interfaces.base import Interface
# Import child interfaces
from trakt.interfaces.sync.collection import SyncCollectionInterface
from trakt.interfaces.sync.history import SyncHistoryInterface
from trakt.interfaces.sync.playback import SyncPlaybackInterface
from trakt.interfaces.sync.ratings import SyncRatingsInterface
from trakt.interfaces.sync.watched import SyncWatchedInterface
from trakt.interfaces.sync.watchlist import SyncWatchlistInterface
__all__ = [
'SyncInterface',
'SyncCollectionInterface',
'SyncHistoryInterface',
'SyncPlaybackInterface',
'SyncRatingsInterface',
'SyncWatchedInterface',
'SyncWatchlistInterface'
]
class SyncInterface(Interface):
path = 'sync'
def last_activities(self, **kwargs):
return self.get_data(
self.http.get('last_activities'),
**kwargs
)
@deprecated("Trakt['sync'].playback() has been moved to Trakt['sync/playback'].get()")
def playback(self, store=None, **kwargs):
raise NotImplementedError()
| 31.057143 | 90 | 0.75161 |
ace375ec965fc086c3606f9bcfd6c31096385248 | 1,028 | py | Python | google-cloud-sdk/lib/surface/sql/instances/__init__.py | bopopescu/searchparty | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | [
"Apache-2.0"
] | null | null | null | google-cloud-sdk/lib/surface/sql/instances/__init__.py | bopopescu/searchparty | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | [
"Apache-2.0"
] | null | null | null | google-cloud-sdk/lib/surface/sql/instances/__init__.py | bopopescu/searchparty | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | [
"Apache-2.0"
] | 3 | 2017-07-27T18:44:13.000Z | 2020-07-25T17:48:53.000Z | # Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provide commands for managing Cloud SQL instances."""
from googlecloudsdk.calliope import base
@base.ReleaseTracks(base.ReleaseTrack.GA, base.ReleaseTrack.BETA)
@base.CommandSuggestion('connect', 'sql connect')
class Instances(base.Group):
"""Provide commands for managing Cloud SQL instances.
Provide commands for managing Cloud SQL instances including creating,
configuring, restarting, and deleting instances.
"""
| 36.714286 | 74 | 0.772374 |
ace37621423c1c01c6b37c8cd02ef259e5cfe924 | 21,314 | py | Python | spyder/plugins/explorer/widgets/fileassociations.py | Earthman100/spyder | 949ce0f9100a69504c70a5678e8589a05aee7d38 | [
"MIT"
] | 7,956 | 2015-02-17T01:19:09.000Z | 2022-03-31T21:52:15.000Z | spyder/plugins/explorer/widgets/fileassociations.py | Earthman100/spyder | 949ce0f9100a69504c70a5678e8589a05aee7d38 | [
"MIT"
] | 16,326 | 2015-02-16T23:15:21.000Z | 2022-03-31T23:34:34.000Z | spyder/plugins/explorer/widgets/fileassociations.py | Earthman100/spyder | 949ce0f9100a69504c70a5678e8589a05aee7d38 | [
"MIT"
] | 1,918 | 2015-02-20T19:26:26.000Z | 2022-03-31T19:03:25.000Z | # -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
"""
File associations widget for use in global and project preferences.
"""
from __future__ import print_function
# Standard library imports
import os
import re
import sys
# Third party imports
from qtpy.compat import getopenfilename
from qtpy.QtCore import QRegExp, QSize, Qt, Signal
from qtpy.QtGui import QCursor, QRegExpValidator
from qtpy.QtWidgets import (QApplication, QDialog, QDialogButtonBox,
QHBoxLayout, QLabel, QLineEdit,
QListWidget, QListWidgetItem, QPushButton,
QVBoxLayout, QWidget)
# Local imports
from spyder.config.base import _
from spyder.utils.encoding import is_text_file
from spyder.utils.programs import (get_application_icon,
get_installed_applications,
parse_linux_desktop_entry)
class InputTextDialog(QDialog):
"""Input text dialog with regex validation."""
def __init__(self, parent=None, title='', label=''):
"""Input text dialog with regex validation."""
super(InputTextDialog, self).__init__(parent=parent)
self._reg = None
self._regex = None
# Widgets
self.label = QLabel()
self.lineedit = QLineEdit()
self.button_box = QDialogButtonBox(QDialogButtonBox.Ok
| QDialogButtonBox.Cancel)
self.button_ok = self.button_box.button(QDialogButtonBox.Ok)
self.button_cancel = self.button_box.button(QDialogButtonBox.Cancel)
# Widget setup
self.setWindowTitle(title)
self.setMinimumWidth(500) # FIXME: use metrics
self.label.setText(label)
# Layout
layout = QVBoxLayout()
layout.addWidget(self.label)
layout.addWidget(self.lineedit)
layout.addSpacing(24) # FIXME: use metrics
layout.addWidget(self.button_box)
self.setLayout(layout)
# Signals
self.button_ok.clicked.connect(self.accept)
self.button_cancel.clicked.connect(self.reject)
self.lineedit.textChanged.connect(self.validate)
self.validate()
def validate(self):
"""Validate content."""
text = self.text().strip()
is_valid = bool(text)
if self._reg:
res = self._reg.match(text)
if res:
text_matched = res.group(0)
is_valid = is_valid and text_matched == text
else:
is_valid = False
self.button_ok.setEnabled(is_valid)
def set_regex_validation(self, regex):
"""Set the regular expression to validate content."""
self._regex = regex
self._reg = re.compile(regex, re.IGNORECASE)
validator = QRegExpValidator(QRegExp(regex))
self.lineedit.setValidator(validator)
def text(self):
"""Return the text of the lineedit."""
return self.lineedit.text()
def set_text(self, text):
"""Set the text of the lineedit."""
self.lineedit.setText(text)
self.validate()
class ApplicationsDialog(QDialog):
"""Dialog for selection of installed system/user applications."""
def __init__(self, parent=None):
"""Dialog for selection of installed system/user applications."""
super(ApplicationsDialog, self).__init__(parent=parent)
# Widgets
self.label = QLabel()
self.label_browse = QLabel()
self.edit_filter = QLineEdit()
self.list = QListWidget()
self.button_browse = QPushButton(_('Browse...'))
self.button_box = QDialogButtonBox(QDialogButtonBox.Ok
| QDialogButtonBox.Cancel)
self.button_ok = self.button_box.button(QDialogButtonBox.Ok)
self.button_cancel = self.button_box.button(QDialogButtonBox.Cancel)
# Widget setup
self.setWindowTitle(_('Applications'))
self.edit_filter.setPlaceholderText(_('Type to filter by name'))
self.list.setIconSize(QSize(16, 16)) # FIXME: Use metrics
# Layout
layout = QVBoxLayout()
layout.addWidget(self.label)
layout.addWidget(self.edit_filter)
layout.addWidget(self.list)
layout_browse = QHBoxLayout()
layout_browse.addWidget(self.button_browse)
layout_browse.addWidget(self.label_browse)
layout.addLayout(layout_browse)
layout.addSpacing(12) # FIXME: Use metrics
layout.addWidget(self.button_box)
self.setLayout(layout)
# Signals
self.edit_filter.textChanged.connect(self.filter)
self.button_browse.clicked.connect(lambda x: self.browse())
self.button_ok.clicked.connect(self.accept)
self.button_cancel.clicked.connect(self.reject)
self.list.currentItemChanged.connect(self._refresh)
self._refresh()
self.setup()
def setup(self, applications=None):
"""Load installed applications."""
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
self.list.clear()
if applications is None:
apps = get_installed_applications()
else:
apps = applications
for app in sorted(apps, key=lambda x: x.lower()):
fpath = apps[app]
icon = get_application_icon(fpath)
item = QListWidgetItem(icon, app)
item.setToolTip(fpath)
item.fpath = fpath
self.list.addItem(item)
# FIXME: Use metrics
self.list.setMinimumWidth(self.list.sizeHintForColumn(0) + 24)
QApplication.restoreOverrideCursor()
self._refresh()
def _refresh(self):
"""Refresh the status of buttons on widget."""
self.button_ok.setEnabled(self.list.currentRow() != -1)
def browse(self, fpath=None):
"""Prompt user to select an application not found on the list."""
app = None
item = None
if sys.platform == 'darwin':
if fpath is None:
basedir = '/Applications/'
filters = _('Applications (*.app)')
title = _('Select application')
fpath, __ = getopenfilename(self, title, basedir, filters)
if fpath and fpath.endswith('.app') and os.path.isdir(fpath):
app = os.path.basename(fpath).split('.app')[0]
for row in range(self.list.count()):
item = self.list.item(row)
if app == item.text() and fpath == item.fpath:
break
else:
item = None
elif os.name == 'nt':
if fpath is None:
basedir = 'C:\\'
filters = _('Applications (*.exe *.bat *.com)')
title = _('Select application')
fpath, __ = getopenfilename(self, title, basedir, filters)
if fpath:
check_1 = fpath.endswith('.bat') and is_text_file(fpath)
check_2 = (fpath.endswith(('.exe', '.com'))
and not is_text_file(fpath))
if check_1 or check_2:
app = os.path.basename(fpath).capitalize().rsplit('.')[0]
for row in range(self.list.count()):
item = self.list.item(row)
if app == item.text() and fpath == item.fpath:
break
else:
item = None
else:
if fpath is None:
basedir = '/'
filters = _('Applications (*.desktop)')
title = _('Select application')
fpath, __ = getopenfilename(self, title, basedir, filters)
if fpath and fpath.endswith(('.desktop')) and is_text_file(fpath):
entry_data = parse_linux_desktop_entry(fpath)
app = entry_data['name']
for row in range(self.list.count()):
item = self.list.item(row)
if app == item.text() and fpath == item.fpath:
break
else:
item = None
if fpath:
if item:
self.list.setCurrentItem(item)
elif app:
icon = get_application_icon(fpath)
item = QListWidgetItem(icon, app)
item.fpath = fpath
self.list.addItem(item)
self.list.setCurrentItem(item)
self.list.setFocus()
self._refresh()
def filter(self, text):
"""Filter the list of applications based on text."""
text = self.edit_filter.text().lower().strip()
for row in range(self.list.count()):
item = self.list.item(row)
item.setHidden(text not in item.text().lower())
self._refresh()
def set_extension(self, extension):
"""Set the extension on the label of the dialog."""
self.label.setText(_('Choose the application for files of type ')
+ extension)
@property
def application_path(self):
"""Return the selected application path to executable."""
item = self.list.currentItem()
path = item.fpath if item else ''
return path
@property
def application_name(self):
"""Return the selected application name."""
item = self.list.currentItem()
text = item.text() if item else ''
return text
class FileAssociationsWidget(QWidget):
"""Widget to add applications association to file extensions."""
# This allows validating a single extension entry or a list of comma
# separated values (eg `*.json` or `*.json,*.txt,MANIFEST.in`)
_EXTENSIONS_LIST_REGEX = (r'(?:(?:\*{1,1}|\w+)\.\w+)'
r'(?:,(?:\*{1,1}|\w+)\.\w+){0,20}')
sig_data_changed = Signal(dict)
def __init__(self, parent=None):
"""Widget to add applications association to file extensions."""
super(FileAssociationsWidget, self).__init__(parent=parent)
# Variables
self._data = {}
self._dlg_applications = None
self._dlg_input = None
self._regex = re.compile(self._EXTENSIONS_LIST_REGEX)
# Widgets
self.label = QLabel(
_("Here you can associate different external applications "
"to open specific file extensions (e.g. .txt "
"files with Notepad++ or .csv files with Excel).")
)
self.label.setWordWrap(True)
self.label_extensions = QLabel(_('File types:'))
self.list_extensions = QListWidget()
self.button_add = QPushButton(_('Add'))
self.button_remove = QPushButton(_('Remove'))
self.button_edit = QPushButton(_('Edit'))
self.label_applications = QLabel(_('Associated applications:'))
self.list_applications = QListWidget()
self.button_add_application = QPushButton(_('Add'))
self.button_remove_application = QPushButton(_('Remove'))
self.button_default = QPushButton(_('Set default'))
# Layout
layout_extensions = QHBoxLayout()
layout_extensions.addWidget(self.list_extensions, 4)
layout_buttons_extensions = QVBoxLayout()
layout_buttons_extensions.addWidget(self.button_add)
layout_buttons_extensions.addWidget(self.button_remove)
layout_buttons_extensions.addWidget(self.button_edit)
layout_buttons_extensions.addStretch()
layout_applications = QHBoxLayout()
layout_applications.addWidget(self.list_applications, 4)
layout_buttons_applications = QVBoxLayout()
layout_buttons_applications.addWidget(self.button_add_application)
layout_buttons_applications.addWidget(self.button_remove_application)
layout_buttons_applications.addWidget(self.button_default)
layout_buttons_applications.addStretch()
layout_extensions.addLayout(layout_buttons_extensions, 2)
layout_applications.addLayout(layout_buttons_applications, 2)
layout = QVBoxLayout()
layout.addWidget(self.label)
layout.addWidget(self.label_extensions)
layout.addLayout(layout_extensions)
layout.addWidget(self.label_applications)
layout.addLayout(layout_applications)
self.setLayout(layout)
# Signals
self.button_add.clicked.connect(lambda: self.add_association())
self.button_remove.clicked.connect(self.remove_association)
self.button_edit.clicked.connect(self.edit_association)
self.button_add_application.clicked.connect(self.add_application)
self.button_remove_application.clicked.connect(
self.remove_application)
self.button_default.clicked.connect(self.set_default_application)
self.list_extensions.currentRowChanged.connect(self.update_extensions)
self.list_extensions.itemDoubleClicked.connect(self.edit_association)
self.list_applications.currentRowChanged.connect(
self.update_applications)
self._refresh()
self._create_association_dialog()
def _refresh(self):
"""Refresh the status of buttons on widget."""
self.setUpdatesEnabled(False)
for widget in [self.button_remove, self.button_add_application,
self.button_edit,
self.button_remove_application, self.button_default]:
widget.setDisabled(True)
item = self.list_extensions.currentItem()
if item:
for widget in [self.button_remove, self.button_add_application,
self.button_remove_application, self.button_edit]:
widget.setDisabled(False)
self.update_applications()
self.setUpdatesEnabled(True)
def _add_association(self, value):
"""Add association helper."""
# Check value is not pressent
for row in range(self.list_extensions.count()):
item = self.list_extensions.item(row)
if item.text().strip() == value.strip():
break
else:
item = QListWidgetItem(value)
self.list_extensions.addItem(item)
self.list_extensions.setCurrentItem(item)
self._refresh()
def _add_application(self, app_name, fpath):
"""Add application helper."""
app_not_found_text = _(' (Application not found!)')
for row in range(self.list_applications.count()):
item = self.list_applications.item(row)
# Ensure the actual name is checked without the `app not found`
# additional text, in case app was not found
item_text = item.text().replace(app_not_found_text, '').strip()
if item and item_text == app_name:
break
else:
icon = get_application_icon(fpath)
if not (os.path.isfile(fpath) or os.path.isdir(fpath)):
app_name += app_not_found_text
item = QListWidgetItem(icon, app_name)
self.list_applications.addItem(item)
self.list_applications.setCurrentItem(item)
if not (os.path.isfile(fpath) or os.path.isdir(fpath)):
item.setToolTip(_('Application not found!'))
def _update_extensions(self):
"""Update extensions list."""
self.list_extensions.clear()
for extension, _ in sorted(self._data.items()):
self._add_association(extension)
# Select first item
self.list_extensions.setCurrentRow(0)
self.update_extensions()
self.update_applications()
def _create_association_dialog(self):
"""Create input extension dialog and save it to for reuse."""
self._dlg_input = InputTextDialog(
self,
title=_('File association'),
label=(
_('Enter new file extension. You can add several values '
'separated by commas.<br>Examples include:')
+ '<ul><li><code>*.txt</code></li>'
+ '<li><code>*.json,*.csv</code></li>'
+ '<li><code>*.json,README.md</code></li></ul>'
),
)
self._dlg_input.set_regex_validation(self._EXTENSIONS_LIST_REGEX)
def load_values(self, data=None):
"""
Load file associations data.
Format {'*.ext': [['Application Name', '/path/to/app/executable']]}
`/path/to/app/executable` is an executable app on mac and windows and
a .desktop xdg file on linux.
"""
self._data = {} if data is None else data
self._update_extensions()
def add_association(self, value=None):
"""Add extension file association."""
if value is None:
text, ok_pressed = '', False
self._dlg_input.set_text('')
if self._dlg_input.exec_():
text = self._dlg_input.text()
ok_pressed = True
else:
match = self._regex.match(value)
text, ok_pressed = value, bool(match)
if ok_pressed:
if text not in self._data:
self._data[text] = []
self._add_association(text)
self.check_data_changed()
def remove_association(self):
"""Remove extension file association."""
if self._data:
if self.current_extension:
self._data.pop(self.current_extension)
self._update_extensions()
self._refresh()
self.check_data_changed()
def edit_association(self):
"""Edit text of current selected association."""
old_text = self.current_extension
self._dlg_input.set_text(old_text)
if self._dlg_input.exec_():
new_text = self._dlg_input.text()
if old_text != new_text:
values = self._data.pop(self.current_extension)
self._data[new_text] = values
self._update_extensions()
self._refresh()
for row in range(self.list_extensions.count()):
item = self.list_extensions.item(row)
if item.text() == new_text:
self.list_extensions.setCurrentItem(item)
break
self.check_data_changed()
def add_application(self):
"""Remove application to selected extension."""
if self.current_extension:
if self._dlg_applications is None:
self._dlg_applications = ApplicationsDialog(self)
self._dlg_applications.set_extension(self.current_extension)
if self._dlg_applications.exec_():
app_name = self._dlg_applications.application_name
fpath = self._dlg_applications.application_path
self._data[self.current_extension].append((app_name, fpath))
self._add_application(app_name, fpath)
self.check_data_changed()
def remove_application(self):
"""Remove application from selected extension."""
current_row = self.list_applications.currentRow()
values = self._data.get(self.current_extension)
if values and current_row != -1:
values.pop(current_row)
self.update_extensions()
self.update_applications()
self.check_data_changed()
def set_default_application(self):
"""
Set the selected item on the application list as default application.
"""
current_row = self.list_applications.currentRow()
if current_row != -1:
values = self._data[self.current_extension]
value = values.pop(current_row)
values.insert(0, value)
self._data[self.current_extension] = values
self.update_extensions()
self.check_data_changed()
def update_extensions(self, row=None):
"""Update extensiosn list after additions or deletions."""
self.list_applications.clear()
for extension, values in self._data.items():
if extension.strip() == self.current_extension:
for (app_name, fpath) in values:
self._add_application(app_name, fpath)
break
self.list_applications.setCurrentRow(0)
self._refresh()
def update_applications(self, row=None):
"""Update application list after additions or deletions."""
current_row = self.list_applications.currentRow()
self.button_default.setEnabled(current_row != 0)
def check_data_changed(self):
"""Check if data has changed and emit signal as needed."""
self.sig_data_changed.emit(self._data)
@property
def current_extension(self):
"""Return the current selected extension text."""
item = self.list_extensions.currentItem()
if item:
return item.text()
@property
def data(self):
"""Return the current file associations data."""
return self._data.copy()
| 37.99287 | 78 | 0.603172 |
ace376262d71ba57e279112099be824bffd1a0be | 550 | py | Python | play.py | sohyongsheng/gol | d5d37621c2b3ed6d0718b6ccd8d433c9b44d97ec | [
"MIT"
] | 1 | 2020-05-12T14:35:31.000Z | 2020-05-12T14:35:31.000Z | play.py | sohyongsheng/gol | d5d37621c2b3ed6d0718b6ccd8d433c9b44d97ec | [
"MIT"
] | null | null | null | play.py | sohyongsheng/gol | d5d37621c2b3ed6d0718b6ccd8d433c9b44d97ec | [
"MIT"
] | null | null | null | from gol.board import Board
from gol.controller import Controller
from gol.errors import Error
from gol.view import View
from gol.parse import Parser
if __name__ == '__main__':
try:
parser = Parser()
args = parser.parse()
view = View()
board = Board(
size = args.size,
config_path = args.seed_path,
wrap_around = args.wrap_around,
)
controller = Controller(view, board, args.time_delay)
controller.play()
except Error as error:
print(error)
| 26.190476 | 61 | 0.614545 |
ace376728acd4f5626deb277351eea4b705849a7 | 13,840 | py | Python | ibis/backends/dask/execution/util.py | GrapeBaBa/ibis | 507bb14efdcfd719a0487ee23fe1c85c177517f6 | [
"Apache-2.0"
] | null | null | null | ibis/backends/dask/execution/util.py | GrapeBaBa/ibis | 507bb14efdcfd719a0487ee23fe1c85c177517f6 | [
"Apache-2.0"
] | null | null | null | ibis/backends/dask/execution/util.py | GrapeBaBa/ibis | 507bb14efdcfd719a0487ee23fe1c85c177517f6 | [
"Apache-2.0"
] | null | null | null | from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union
import dask.dataframe as dd
import dask.delayed
import pandas as pd
from dask.dataframe.groupby import SeriesGroupBy
import ibis.backends.pandas.execution.util as pd_util
import ibis.common.exceptions as com
import ibis.expr.operations as ops
import ibis.util
from ibis.backends.pandas.client import ibis_dtype_to_pandas
from ibis.backends.pandas.trace import TraceTwoLevelDispatcher
from ibis.expr import datatypes as dt
from ibis.expr import types as ir
from ibis.expr.scope import Scope
from ibis.expr.typing import TimeContext
from ..core import execute
DispatchRule = Tuple[Tuple[Union[Type, Tuple], ...], Callable]
TypeRegistrationDict = Dict[
Union[Type[ops.Node], Tuple[Type[ops.Node], ...]], List[DispatchRule]
]
def register_types_to_dispatcher(
dispatcher: TraceTwoLevelDispatcher, types: TypeRegistrationDict
):
"""
Many dask operations utilize the functions defined in the pandas backend
without modification. This function helps perform registrations in bulk
"""
for ibis_op, registration_list in types.items():
for types_to_register, fn in registration_list:
dispatcher.register(ibis_op, *types_to_register)(fn)
def make_meta_series(dtype, name=None, index_name=None):
return pd.Series(
[],
index=pd.Index([], name=index_name),
dtype=dtype,
name=name,
)
def make_selected_obj(gs: SeriesGroupBy) -> Union[dd.DataFrame, dd.Series]:
"""
When you select a column from a `pandas.DataFrameGroupBy` the underlying
`.obj` reflects that selection. This function emulates that behavior.
"""
# TODO profile this for data shuffling
# We specify drop=False in the case that we are grouping on the column
# we are selecting
if isinstance(gs.obj, dd.Series):
return gs.obj
else:
return gs.obj.set_index(gs.index, drop=False)[
gs._meta._selected_obj.name
]
def coerce_to_output(
result: Any, expr: ir.Expr, index: Optional[pd.Index] = None
) -> Union[dd.Series, dd.DataFrame]:
"""Cast the result to either a Series of DataFrame, renaming as needed.
Reimplementation of `coerce_to_output` in the pandas backend, but
creates dask objects and adds special handling for dd.Scalars.
Parameters
----------
result: Any
The result to cast
expr: ibis.expr.types.Expr
The expression associated with the result
index: pd.Index
Optional. If passed, scalar results will be broadcasted according
to the index.
Returns
-------
result: A `dd.Series` or `dd.DataFrame`
Raises
------
ValueError
If unable to coerce result
Examples
--------
For dataframe outputs, see ``_coerce_to_dataframe``. Examples below use
pandas objects for legibility, but functionality is the same on dask
objects.
>>> coerce_to_output(pd.Series(1), expr)
0 1
Name: result, dtype: int64
>>> coerce_to_output(1, expr)
0 1
Name: result, dtype: int64
>>> coerce_to_output(1, expr, [1,2,3])
1 1
2 1
3 1
Name: result, dtype: int64
>>> coerce_to_output([1,2,3], expr)
0 [1, 2, 3]
Name: result, dtype: object
"""
result_name = expr.get_name()
dataframe_exprs = (
ir.DestructColumn,
ir.StructColumn,
ir.DestructScalar,
ir.StructScalar,
)
if isinstance(expr, dataframe_exprs):
return _coerce_to_dataframe(
result, expr.type().names, expr.type().types
)
elif isinstance(result, (pd.Series, dd.Series)):
# Series from https://github.com/ibis-project/ibis/issues/2711
return result.rename(result_name)
elif isinstance(expr.op(), ops.Reduction):
if isinstance(result, dd.core.Scalar):
# wrap the scalar in a series
out_dtype = _pandas_dtype_from_dd_scalar(result)
out_len = 1 if index is None else len(index)
meta = make_meta_series(dtype=out_dtype, name=result_name)
# Specify `divisions` so that the created Dask object has
# known divisions (to be concatenatable with Dask objects
# created using `dd.from_pandas`)
series = dd.from_delayed(
_wrap_dd_scalar(result, result_name, out_len),
meta=meta,
divisions=(0, out_len - 1),
)
return series
else:
return dd.from_pandas(
pd_util.coerce_to_output(result, expr, index), npartitions=1
)
else:
raise ValueError(f"Cannot coerce_to_output. Result: {result}")
@dask.delayed
def _wrap_dd_scalar(x, name=None, series_len=1):
return pd.Series([x for _ in range(series_len)], name=name)
def _pandas_dtype_from_dd_scalar(x: dd.core.Scalar):
try:
return x.dtype
except AttributeError:
return pd.Series([x._meta]).dtype
def _coerce_to_dataframe(
data: Any,
column_names: List[str],
types: List[dt.DataType],
) -> dd.DataFrame:
"""
Clone of ibis.util.coerce_to_dataframe that deals well with dask types
Coerce the following shapes to a DataFrame.
The following shapes are allowed:
(1) A list/tuple of Series -> each series is a column
(2) A list/tuple of scalars -> each scalar is a column
(3) A Dask Series of list/tuple -> each element inside becomes a column
(4) dd.DataFrame -> the data is unchanged
Examples
--------
Note: these examples demonstrate functionality with pandas objects in order
to make them more legible, but this works the same with dask.
>>> coerce_to_dataframe(pd.DataFrame({'a': [1, 2, 3]}), ['b'])
b
0 1
1 2
2 3
>>> coerce_to_dataframe(pd.Series([[1, 2, 3]]), ['a', 'b', 'c'])
a b c
0 1 2 3
>>> coerce_to_dataframe(pd.Series([range(3), range(3)]), ['a', 'b', 'c'])
a b c
0 0 1 2
1 0 1 2
>>> coerce_to_dataframe([pd.Series(x) for x in [1, 2, 3]], ['a', 'b', 'c'])
a b c
0 1 2 3
>>> coerce_to_dataframe([1, 2, 3], ['a', 'b', 'c'])
a b c
0 1 2 3
"""
if isinstance(data, dd.DataFrame):
result = data
elif isinstance(data, dd.Series):
# This takes a series where the values are iterables and converts each
# value into its own row in a new dataframe.
# NOTE - We add a detailed meta here so we do not drop the key index
# downstream. This seems to be fixed in versions of dask > 2020.12.0
dtypes = map(ibis_dtype_to_pandas, types)
series = [
data.apply(
_select_item_in_iter,
selection=i,
meta=make_meta_series(dtype, index_name=data.index.name),
)
for i, dtype in enumerate(dtypes)
]
result = dd.concat(series, axis=1)
elif isinstance(data, (tuple, list)):
if len(data) == 0:
result = dd.from_pandas(
pd.DataFrame(columns=column_names), npartitions=1
)
elif isinstance(data[0], dd.Series):
result = dd.concat(data, axis=1)
else:
result = dd.from_pandas(
pd.concat([pd.Series([v]) for v in data], axis=1),
npartitions=1,
)
else:
raise ValueError(f"Cannot coerce to DataFrame: {data}")
result.columns = column_names
return result
def _select_item_in_iter(t, selection):
return t[selection]
def safe_concat(dfs: List[Union[dd.Series, dd.DataFrame]]) -> dd.DataFrame:
"""
Concat a list of `dd.Series` or `dd.DataFrame` objects into one DataFrame
This will use `DataFrame.concat` if all pieces are the same length.
Otherwise we will iterratively join.
When axis=1 and divisions are unknown, Dask `DataFrame.concat` can only
operate on objects with equal lengths, otherwise it will raise a
ValueError in `concat_and_check`.
See https://github.com/dask/dask/blob/2c2e837674895cafdb0612be81250ef2657d947e/dask/dataframe/multi.py#L907 # noqa
Note - Repeatedly joining dataframes is likely to be quite slow, but this
should be hit rarely in real usage. A situtation that triggeres this slow
path is aggregations where aggregations return different numbers of rows
(see `test_aggregation_group_by` for a specific example).
TODO - performance.
"""
if len(dfs) == 1:
maybe_df = dfs[0]
if isinstance(maybe_df, dd.Series):
return maybe_df.to_frame()
else:
return maybe_df
lengths = list(map(len, dfs))
if len(set(lengths)) != 1:
result = dfs[0].to_frame()
for other in dfs[1:]:
result = result.join(other.to_frame(), how="outer")
else:
result = dd.concat(dfs, axis=1)
return result
def compute_sort_key(
key: ops.SortKey,
data: dd.DataFrame,
timecontext: Optional[TimeContext] = None,
scope: Scope = None,
**kwargs,
):
"""
Note - we use this function instead of the pandas.execution.util so that we
use the dask `execute` method
This function borrows the logic in the pandas backend. ``by`` can be a
string or an expression. If ``by.get_name()`` raises an exception, we must
``execute`` the expression and sort by the new derived column.
"""
by = key.to_expr()
name = ibis.util.guid()
try:
if isinstance(by, str):
return name, data[by]
return name, data[by.get_name()]
except com.ExpressionError:
if scope is None:
scope = Scope()
scope = scope.merge_scopes(
Scope({t: data}, timecontext) for t in by.op().root_tables()
)
new_column = execute(by, scope=scope, **kwargs)
new_column.name = name
return name, new_column
def compute_sorted_frame(
df: dd.DataFrame,
order_by: ir.SortExpr,
timecontext: Optional[TimeContext] = None,
**kwargs,
) -> dd.DataFrame:
sort_col_name, temporary_column = compute_sort_key(
order_by.op(), df, timecontext, **kwargs
)
result = df.assign(**{sort_col_name: temporary_column})
result = result.set_index(sort_col_name).reset_index(drop=True)
return result
def assert_identical_grouping_keys(*args):
indices = [arg.index for arg in args]
# Depending on whether groupby was called like groupby("col") or
# groupby(["cold"]) index will be a string or a list
if isinstance(indices[0], list):
indices = [tuple(index) for index in indices]
grouping_keys = set(indices)
if len(grouping_keys) != 1:
raise AssertionError(
f"Differing grouping keys passed: {grouping_keys}"
)
def add_partitioned_sorted_column(
df: Union[dd.DataFrame, dd.Series],
) -> dd.DataFrame:
"""Add a column that is already partitioned and sorted
This columns acts as if we had a global index across the distributed data.
Important properties:
- Each row has a unique id (i.e. a value in this column)
- IDs within each partition are already sorted
- Any id in partition N_{t} is less than any id in partition N_{t+1}
We do this by designating a sufficiently large space of integers per
partition via a base and adding the existing index to that base. See
`helper` below.
Though the space per partition is bounded, real world usage should not
hit these bounds. We also do not explicity deal with overflow in the
bounds.
Parameters
----------
df : dd.DataFrame
Dataframe to add the column to
Returns
-------
dd.DataFrame
New dask dataframe with sorted partitioned index
Examples
--------
>>> ddf = dd.from_pandas(pd.DataFrame({'a': [1, 2,3, 4]}), npartitions=2)
>>> ddf
Dask DataFrame Structure:
a
npartitions=2
0 int64
2 ...
3 ...
Dask Name: from_pandas, 2 task
>>> ddf.compute()
a
0 1
1 2
2 3
3 4
>>> ddf = add_partitioned_sorted_column(ddf)
>>> ddf
Dask DataFrame Structure:
a
npartitions=2
0 int64
4294967296 ...
8589934592 ...
Dask Name: set_index, 8 tasks
Name: result, dtype: int64
>>> ddf.compute()
a
_ibis_index
0 1
1 2
4294967296 3
4294967297 4
"""
if isinstance(df, dd.Series):
df = df.to_frame()
col_name = "_ibis_index"
if col_name in df.columns:
raise ValueError(f"Column {col_name} is already present in DataFrame")
def helper(
df: Union[pd.Series, pd.DataFrame],
partition_info: Dict[str, Any], # automatically injected by dask
col_name: str,
):
"""Assigns a column with a unique id for each row"""
if len(df) > (2**31):
raise ValueError(
f"Too many items in partition {partition_info} to add"
"partitioned sorted column without overflowing."
)
base = partition_info["number"] << 32
return df.assign(**{col_name: [base + idx for idx in df.index]})
original_meta = df._meta.dtypes.to_dict()
new_meta = {**original_meta, **{col_name: "int64"}}
df = df.reset_index(drop=True)
df = df.map_partitions(helper, col_name=col_name, meta=new_meta)
# Divisions include the minimum value of every partition's index and the
# maximum value of the last partition's index
divisions = tuple(x << 32 for x in range(df.npartitions + 1))
df = df.set_index(col_name, sorted=True, divisions=divisions)
return df
| 31.526196 | 118 | 0.628107 |
ace3771eb4586bab498b6648124aa00ee1331a4d | 2,866 | py | Python | awx/main/tests/functional/api/test_job.py | tota45/awx | 64ac1ee238b33bb45f5d4878c7e6dcc166871b0c | [
"Apache-2.0"
] | null | null | null | awx/main/tests/functional/api/test_job.py | tota45/awx | 64ac1ee238b33bb45f5d4878c7e6dcc166871b0c | [
"Apache-2.0"
] | null | null | null | awx/main/tests/functional/api/test_job.py | tota45/awx | 64ac1ee238b33bb45f5d4878c7e6dcc166871b0c | [
"Apache-2.0"
] | null | null | null | import pytest
from awx.api.versioning import reverse
from awx.main.models import JobTemplate, User
@pytest.mark.django_db
def test_extra_credentials(get, organization_factory, job_template_factory, credential):
objs = organization_factory("org", superusers=['admin'])
jt = job_template_factory("jt", organization=objs.organization,
inventory='test_inv', project='test_proj').job_template
jt.credentials.add(credential)
jt.save()
job = jt.create_unified_job()
url = reverse('api:job_extra_credentials_list', kwargs={'version': 'v2', 'pk': job.pk})
response = get(url, user=objs.superusers.admin)
assert response.data.get('count') == 1
@pytest.mark.django_db
def test_job_relaunch_permission_denied_response(
post, get, inventory, project, credential, net_credential, machine_credential):
jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project)
jt.credentials.add(machine_credential)
jt_user = User.objects.create(username='jobtemplateuser')
jt.execute_role.members.add(jt_user)
job = jt.create_unified_job()
# User capability is shown for this
r = get(job.get_absolute_url(), jt_user, expect=200)
assert r.data['summary_fields']['user_capabilities']['start']
# Job has prompted extra_credential, launch denied w/ message
job.launch_config.credentials.add(net_credential)
r = post(reverse('api:job_relaunch', kwargs={'pk':job.pk}), {}, jt_user, expect=403)
assert 'launched with prompted fields' in r.data['detail']
assert 'do not have permission' in r.data['detail']
@pytest.mark.django_db
@pytest.mark.parametrize("status,hosts", [
('all', 'host1,host2,host3'),
('failed', 'host3'),
])
def test_job_relaunch_on_failed_hosts(post, inventory, project, machine_credential, admin_user, status, hosts):
h1 = inventory.hosts.create(name='host1') # no-op
h2 = inventory.hosts.create(name='host2') # changed host
h3 = inventory.hosts.create(name='host3') # failed host
jt = JobTemplate.objects.create(
name='testjt', inventory=inventory,
project=project
)
jt.credentials.add(machine_credential)
job = jt.create_unified_job(_eager_fields={'status': 'failed', 'limit': 'host1,host2,host3'})
job.job_events.create(event='playbook_on_stats')
job.job_host_summaries.create(host=h1, failed=False, ok=1, changed=0, failures=0, host_name=h1.name)
job.job_host_summaries.create(host=h2, failed=False, ok=0, changed=1, failures=0, host_name=h2.name)
job.job_host_summaries.create(host=h3, failed=False, ok=0, changed=0, failures=1, host_name=h3.name)
r = post(
url=reverse('api:job_relaunch', kwargs={'pk':job.pk}),
data={'hosts': status},
user=admin_user,
expect=201
)
assert r.data.get('limit') == hosts
| 41.536232 | 111 | 0.706909 |
ace37862d9b252bff093481379b546d8db1cf247 | 835 | py | Python | proj05/proj05_01.py | pixlcoder/proj1 | 381e33b216996c6905ec8b6cee9b2f85e3a6eec8 | [
"MIT"
] | 4 | 2018-06-11T16:13:15.000Z | 2018-06-11T16:13:18.000Z | proj05/proj05_01.py | pixlcoder/proj1 | 381e33b216996c6905ec8b6cee9b2f85e3a6eec8 | [
"MIT"
] | null | null | null | proj05/proj05_01.py | pixlcoder/proj1 | 381e33b216996c6905ec8b6cee9b2f85e3a6eec8 | [
"MIT"
] | null | null | null | # Name:
# Date:
# proj05: functions and lists
# Part I
def divisors(num):
"""
Takes a number and returns all divisors of the number, ordered least to greatest
:param num: int
:return: list (int)
"""
# Fill in the function and change the return statment.
return 0
def prime(num):
"""
Takes a number and returns True if the number is prime, otherwise False
:param num: int
:return: bool
"""
# Fill in the function and change the return statment.
return False
# Part II
def intersection(lst1, lst2):
"""
Takes two lists and returns a list of the elements in common between the lists
:param lst1: list, any type
:param lst2: list, any type
:return: list, any type
"""
# Fill in the function and change the return statment.
return ["test"]
| 19.418605 | 84 | 0.643114 |
ace3797c77732150541cae5558dcdbed8201ef5b | 930 | py | Python | test/python/xarray7.py | dtip/magics | 3247535760ca962f859c203295b508d442aca4ed | [
"ECL-2.0",
"Apache-2.0"
] | 7 | 2019-03-19T09:32:41.000Z | 2022-02-07T13:20:33.000Z | test/python/xarray7.py | dtip/magics | 3247535760ca962f859c203295b508d442aca4ed | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2021-03-30T05:37:20.000Z | 2021-08-17T13:58:04.000Z | test/python/xarray7.py | dtip/magics | 3247535760ca962f859c203295b508d442aca4ed | [
"ECL-2.0",
"Apache-2.0"
] | 5 | 2019-03-19T10:43:46.000Z | 2021-09-09T14:28:39.000Z | # (C) Copyright 1996-2019 ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
# In applying this licence, ECMWF does not waive the privileges and immunities
# granted to it by virtue of its status as an intergovernmental organisation nor
# does it submit to any jurisdiction.
import cftime
import xarray as xr
import numpy as np
from Magics import macro as magics
ref = "xarray7"
ds = xr.open_dataset('2dlatlon.nc')
time = cftime.DatetimeNoLeap(2081, 2, 15, 0, 0, 0, 0, 5, 46)
png = magics.output(output_name_first_page_number = "off", output_name = ref)
data = magics.mxarray(
xarray_dataset = ds,
xarray_variable_name = "sic",
xarray_dimension_settings = {"bnds": 1.0, "time": time})
contour = magics.mcont(contour_automatic_setting = "ecmwf")
magics.plot(png, data, contour, magics.mcoast())
| 34.444444 | 80 | 0.734409 |
ace37a3bb859b479d4455acd07bca5199cc22a30 | 467 | py | Python | setup.py | TheBB/WordFreq | b5de4a106e0c83d963b895d3242adc0f53256da0 | [
"MIT"
] | null | null | null | setup.py | TheBB/WordFreq | b5de4a106e0c83d963b895d3242adc0f53256da0 | [
"MIT"
] | null | null | null | setup.py | TheBB/WordFreq | b5de4a106e0c83d963b895d3242adc0f53256da0 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
from distutils.core import setup
setup(
name='WordFreq',
version='0.1',
description='Simple natural language word counter',
author='Eivind Fonn',
author_email='evfonn@gmail.com',
license='MIT',
url='https://github.com/TheBB/wordfreq',
py_modules=['wordfreq'],
entry_points={
'console_scripts': [
'wordfreq=wordfreq:wordfreq',
],
},
install_requires=['click', 'nltk'],
)
| 22.238095 | 55 | 0.618844 |
ace37bb026390d9f523665888bb4ffb3255c510a | 933 | py | Python | migrations/0012_operatingsystems.py | ercasta/CoderDojo-Mobile-Toolbox | 94d9fb845923a5c698f4bdbe5c8ecedd68f3c0a1 | [
"Apache-2.0"
] | 4 | 2018-09-21T19:50:49.000Z | 2018-09-22T10:57:23.000Z | migrations/0012_operatingsystems.py | ercasta/CoderDojo-Mobile-Toolbox | 94d9fb845923a5c698f4bdbe5c8ecedd68f3c0a1 | [
"Apache-2.0"
] | 5 | 2018-07-17T17:04:58.000Z | 2018-12-07T14:31:09.000Z | migrations/0012_operatingsystems.py | ercasta/CoderDojo-Mobile-Toolbox | 94d9fb845923a5c698f4bdbe5c8ecedd68f3c0a1 | [
"Apache-2.0"
] | 1 | 2018-07-06T23:49:01.000Z | 2018-07-06T23:49:01.000Z | # Generated by Django 2.0.6 on 2018-08-18 19:02
from django.db import migrations
def create_os_names(apps, schema_editor):
op_sys = apps.get_model('coderdojomobile', 'OperatingSystem')
op_sys.objects.create(
title='Windows 10',
description='Windows 10 system')
op_sys.objects.create(title='Windows 7', description='Windows 7 system')
op_sys.objects.create(
title='Windows XP',
description='Windows XP system'
)
op_sys.objects.create(title='Mac OS X', description='Mac OS X')
op_sys.objects.create(title='Mac OS 10.5', description='Mac OS 10.5')
class Migration(migrations.Migration):
dependencies = [
('coderdojomobile', '0011_operatingsystem_softwaretool'),
]
operations = [
migrations.RunPython(create_os_names),
]
| 32.172414 | 77 | 0.595927 |
ace37c1994ceff0b86a0c74f03f25e5db74b0eac | 20,634 | py | Python | reinforcement_learning/rl_deepracer_robomaker_coach_gazebo/src/markov/evaluation_worker.py | danabens/amazon-sagemaker-examples | 5133eb81be55564ab34fa6f5302cc84cd9b3988f | [
"Apache-2.0"
] | 2 | 2020-05-19T09:05:15.000Z | 2021-05-30T14:00:12.000Z | reinforcement_learning/rl_deepracer_robomaker_coach_gazebo/src/markov/evaluation_worker.py | danabens/amazon-sagemaker-examples | 5133eb81be55564ab34fa6f5302cc84cd9b3988f | [
"Apache-2.0"
] | 4 | 2020-09-26T00:53:42.000Z | 2022-02-10T01:41:50.000Z | reinforcement_learning/rl_deepracer_robomaker_coach_gazebo/src/markov/evaluation_worker.py | danabens/amazon-sagemaker-examples | 5133eb81be55564ab34fa6f5302cc84cd9b3988f | [
"Apache-2.0"
] | 1 | 2020-04-12T17:19:16.000Z | 2020-04-12T17:19:16.000Z | '''This module is responsible for launching evaluation jobs'''
import argparse
import json
import logging
import os
import time
import rospy
from rl_coach.base_parameters import TaskParameters
from rl_coach.core_types import EnvironmentSteps
from rl_coach.data_stores.data_store import SyncFiles
from markov import utils
from markov.agent_ctrl.constants import ConfigParams
from markov.agents.rollout_agent_factory import create_rollout_agent, create_obstacles_agent, create_bot_cars_agent
from markov.agents.utils import RunPhaseSubject
from markov.defaults import reward_function
from markov.deepracer_exceptions import GenericRolloutError, GenericRolloutException
from markov.environments.constants import VELOCITY_TOPICS, STEERING_TOPICS, LINK_NAMES
from markov.metrics.s3_metrics import EvalMetrics
from markov.metrics.s3_writer import S3Writer
from markov.metrics.iteration_data import IterationData
from markov.metrics.constants import MetricsS3Keys, IterationDataLocalFileNames, ITERATION_DATA_LOCAL_FILE_PATH
from markov.s3_boto_data_store import S3BotoDataStore, S3BotoDataStoreParameters
from markov.s3_client import SageS3Client
from markov.sagemaker_graph_manager import get_graph_manager
from markov.rollout_utils import PhaseObserver, signal_robomaker_markov_package_ready
from markov.rospy_wrappers import ServiceProxyWrapper
from markov.camera_utils import configure_camera
from markov.utils_parse_model_metadata import parse_model_metadata
from markov.checkpoint_utils import TEMP_RENAME_FOLDER, wait_for_checkpoints, modify_checkpoint_variables
from std_srvs.srv import Empty, EmptyRequest
logger = utils.Logger(__name__, logging.INFO).get_logger()
EVALUATION_SIMTRACE_DATA_S3_OBJECT_KEY = "sim_inference_logs/EvaluationSimTraceData.csv"
MIN_RESET_COUNT = 10000 #TODO: change when console passes float("inf")
CUSTOM_FILES_PATH = "./custom_files"
if not os.path.exists(CUSTOM_FILES_PATH):
os.makedirs(CUSTOM_FILES_PATH)
if not os.path.exists(TEMP_RENAME_FOLDER):
os.makedirs(TEMP_RENAME_FOLDER)
def evaluation_worker(graph_manager, number_of_trials, task_parameters, s3_writers, is_continuous):
""" Evaluation worker function
Arguments:
graph_manager {[MultiAgentGraphManager]} -- [Graph manager of multiagent graph manager]
number_of_trials {[int]} -- [Number of trails you want to run the evaluation]
task_parameters {[TaskParameters]} -- [Information of the checkpoint, gpu/cpu, framework etc of rlcoach]
s3_writers {[S3Writer]} -- [Information to upload to the S3 bucket all the simtrace and mp4]
is_continuous {bool} -- [The termination condition for the car]
"""
checkpoint_dirs = list()
agent_names = list()
subscribe_to_save_mp4_topic, unsubscribe_from_save_mp4_topic = list(), list()
subscribe_to_save_mp4, unsubscribe_from_save_mp4 = list(), list()
for agent_param in graph_manager.agents_params:
_checkpoint_dir = task_parameters.checkpoint_restore_path if len(graph_manager.agents_params) == 1\
else os.path.join(task_parameters.checkpoint_restore_path, agent_param.name)
agent_names.append(agent_param.name)
checkpoint_dirs.append(_checkpoint_dir)
racecar_name = 'racecar' if len(agent_param.name.split("_")) == 1\
else "racecar_{}".format(agent_param.name.split("_")[1])
subscribe_to_save_mp4_topic.append("/{}/save_mp4/subscribe_to_save_mp4".format(racecar_name))
unsubscribe_from_save_mp4_topic.append("/{}/save_mp4/unsubscribe_from_save_mp4".format(racecar_name))
wait_for_checkpoints(checkpoint_dirs, graph_manager.data_store)
modify_checkpoint_variables(checkpoint_dirs, agent_names)
# Make the clients that will allow us to pause and unpause the physics
rospy.wait_for_service('/gazebo/pause_physics')
rospy.wait_for_service('/gazebo/unpause_physics')
pause_physics = ServiceProxyWrapper('/gazebo/pause_physics', Empty)
unpause_physics = ServiceProxyWrapper('/gazebo/unpause_physics', Empty)
for mp4_sub, mp4_unsub in zip(subscribe_to_save_mp4_topic, unsubscribe_from_save_mp4_topic):
rospy.wait_for_service(mp4_sub)
rospy.wait_for_service(mp4_unsub)
for mp4_sub, mp4_unsub in zip(subscribe_to_save_mp4_topic, unsubscribe_from_save_mp4_topic):
subscribe_to_save_mp4.append(ServiceProxyWrapper(mp4_sub, Empty))
unsubscribe_from_save_mp4.append(ServiceProxyWrapper(mp4_unsub, Empty))
graph_manager.create_graph(task_parameters=task_parameters, stop_physics=pause_physics,
start_physics=unpause_physics, empty_service_call=EmptyRequest)
logger.info("Graph manager successfully created the graph: Unpausing physics")
unpause_physics(EmptyRequest())
graph_manager.reset_internal_state(True)
is_save_mp4_enabled = rospy.get_param('MP4_S3_BUCKET', None)
if is_save_mp4_enabled:
for subscribe_mp4 in subscribe_to_save_mp4:
subscribe_mp4(EmptyRequest())
if is_continuous:
graph_manager.evaluate(EnvironmentSteps(1))
else:
for _ in range(number_of_trials):
graph_manager.evaluate(EnvironmentSteps(1))
if is_save_mp4_enabled:
for unsubscribe_mp4 in unsubscribe_from_save_mp4:
unsubscribe_mp4(EmptyRequest())
for s3_writer in s3_writers:
s3_writer.upload_to_s3()
time.sleep(1)
pause_physics(EmptyRequest())
# Close the down the job
utils.cancel_simulation_job(os.environ.get('AWS_ROBOMAKER_SIMULATION_JOB_ARN'),
rospy.get_param('AWS_REGION'))
def main():
""" Main function for evaluation worker """
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--preset',
help="(string) Name of a preset to run \
(class name from the 'presets' directory.)",
type=str,
required=False)
parser.add_argument('--s3_bucket',
help='list(string) S3 bucket',
type=str,
nargs='+',
default=rospy.get_param("MODEL_S3_BUCKET", ["gsaur-test"]))
parser.add_argument('--s3_prefix',
help='list(string) S3 prefix',
type=str,
nargs='+',
default=rospy.get_param("MODEL_S3_PREFIX", ["sagemaker"]))
parser.add_argument('--aws_region',
help='(string) AWS region',
type=str,
default=rospy.get_param("AWS_REGION", "us-east-1"))
parser.add_argument('--number_of_trials',
help='(integer) Number of trials',
type=int,
default=int(rospy.get_param("NUMBER_OF_TRIALS", 10)))
parser.add_argument('-c', '--local_model_directory',
help='(string) Path to a folder containing a checkpoint \
to restore the model from.',
type=str,
default='./checkpoint')
parser.add_argument('--number_of_resets',
help='(integer) Number of resets',
type=int,
default=int(rospy.get_param("NUMBER_OF_RESETS", 0)))
parser.add_argument('--penalty_seconds',
help='(float) penalty second',
type=float,
default=float(rospy.get_param("PENALTY_SECONDS", 2.0)))
parser.add_argument('--job_type',
help='(string) job type',
type=str,
default=rospy.get_param("JOB_TYPE", "EVALUATION"))
parser.add_argument('--is_continuous',
help='(boolean) is continous after lap completion',
type=bool,
default=utils.str2bool(rospy.get_param("IS_CONTINUOUS", False)))
parser.add_argument('--race_type',
help='(string) Race type',
type=str,
default=rospy.get_param("RACE_TYPE", "TIME_TRIAL"))
parser.add_argument('--off_track_penalty',
help='(float) off track penalty second',
type=float,
default=float(rospy.get_param("OFF_TRACK_PENALTY", 2.0)))
parser.add_argument('--collision_penalty',
help='(float) collision penalty second',
type=float,
default=float(rospy.get_param("COLLISION_PENALTY", 5.0)))
args = parser.parse_args()
arg_s3_bucket = args.s3_bucket
arg_s3_prefix = args.s3_prefix
logger.info("S3 bucket: %s \n S3 prefix: %s", arg_s3_bucket, arg_s3_prefix)
metrics_s3_buckets = rospy.get_param('METRICS_S3_BUCKET')
metrics_s3_object_keys = rospy.get_param('METRICS_S3_OBJECT_KEY')
arg_s3_bucket, arg_s3_prefix = utils.force_list(arg_s3_bucket), utils.force_list(arg_s3_prefix)
metrics_s3_buckets = utils.force_list(metrics_s3_buckets)
metrics_s3_object_keys = utils.force_list(metrics_s3_object_keys)
validate_list = [arg_s3_bucket, arg_s3_prefix, metrics_s3_buckets, metrics_s3_object_keys]
simtrace_s3_bucket = rospy.get_param('SIMTRACE_S3_BUCKET', None)
mp4_s3_bucket = rospy.get_param('MP4_S3_BUCKET', None)
if simtrace_s3_bucket:
simtrace_s3_object_prefix = rospy.get_param('SIMTRACE_S3_PREFIX')
simtrace_s3_bucket = utils.force_list(simtrace_s3_bucket)
simtrace_s3_object_prefix = utils.force_list(simtrace_s3_object_prefix)
validate_list.extend([simtrace_s3_bucket, simtrace_s3_object_prefix])
if mp4_s3_bucket:
mp4_s3_object_prefix = rospy.get_param('MP4_S3_OBJECT_PREFIX')
mp4_s3_bucket = utils.force_list(mp4_s3_bucket)
mp4_s3_object_prefix = utils.force_list(mp4_s3_object_prefix)
validate_list.extend([mp4_s3_bucket, mp4_s3_object_prefix])
if not all([lambda x: len(x) == len(validate_list[0]), validate_list]):
utils.log_and_exit("Eval worker error: Incorrect arguments passed: {}".format(validate_list),
utils.SIMAPP_SIMULATION_WORKER_EXCEPTION,
utils.SIMAPP_EVENT_ERROR_CODE_500)
if args.number_of_resets != 0 and args.number_of_resets < MIN_RESET_COUNT:
raise GenericRolloutException("number of resets is less than {}".format(MIN_RESET_COUNT))
# Instantiate Cameras
if len(arg_s3_bucket) == 1:
configure_camera(namespaces=['racecar'])
else:
configure_camera(namespaces=[
'racecar_{}'.format(str(agent_index)) for agent_index in range(len(arg_s3_bucket))])
agent_list = list()
s3_bucket_dict = dict()
s3_prefix_dict = dict()
s3_writers = list()
for agent_index, s3_bucket_val in enumerate(arg_s3_bucket):
agent_name = 'agent' if len(arg_s3_bucket) == 1 else 'agent_{}'.format(str(agent_index))
racecar_name = 'racecar' if len(arg_s3_bucket) == 1 else 'racecar_{}'.format(str(agent_index))
s3_bucket_dict[agent_name] = arg_s3_bucket[agent_index]
s3_prefix_dict[agent_name] = arg_s3_prefix[agent_index]
s3_client = SageS3Client(bucket=arg_s3_bucket[agent_index],
s3_prefix=arg_s3_prefix[agent_index],
aws_region=args.aws_region)
# Load the model metadata
if not os.path.exists(os.path.join(CUSTOM_FILES_PATH, agent_name)):
os.makedirs(os.path.join(CUSTOM_FILES_PATH, agent_name))
model_metadata_local_path = os.path.join(os.path.join(CUSTOM_FILES_PATH, agent_name), 'model_metadata.json')
utils.load_model_metadata(s3_client,
os.path.normpath("%s/model/model_metadata.json" % arg_s3_prefix[agent_index]),
model_metadata_local_path)
# Handle backward compatibility
_, _, version = parse_model_metadata(model_metadata_local_path)
if float(version) < float(utils.SIMAPP_VERSION) and \
not utils.has_current_ckpnt_name(arg_s3_bucket[agent_index], arg_s3_prefix[agent_index], args.aws_region):
utils.make_compatible(arg_s3_bucket[agent_index], arg_s3_prefix[agent_index], args.aws_region,
SyncFiles.TRAINER_READY.value)
#Select the optimal model
utils.do_model_selection(s3_bucket=arg_s3_bucket[agent_index],
s3_prefix=arg_s3_prefix[agent_index],
region=args.aws_region)
# Download hyperparameters from SageMaker
if not os.path.exists(agent_name):
os.makedirs(agent_name)
hyperparameters_file_success = False
hyperparams_s3_key = os.path.normpath(arg_s3_prefix[agent_index] + "/ip/hyperparameters.json")
hyperparameters_file_success = s3_client.download_file(s3_key=hyperparams_s3_key,
local_path=os.path.join(agent_name,
"hyperparameters.json"))
sm_hyperparams_dict = {}
if hyperparameters_file_success:
logger.info("Received Sagemaker hyperparameters successfully!")
with open(os.path.join(agent_name, "hyperparameters.json")) as file:
sm_hyperparams_dict = json.load(file)
else:
logger.info("SageMaker hyperparameters not found.")
agent_config = {
'model_metadata': model_metadata_local_path,
ConfigParams.CAR_CTRL_CONFIG.value: {
ConfigParams.LINK_NAME_LIST.value: [
link_name.replace('racecar', racecar_name) for link_name in LINK_NAMES],
ConfigParams.VELOCITY_LIST.value : [
velocity_topic.replace('racecar', racecar_name) for velocity_topic in VELOCITY_TOPICS],
ConfigParams.STEERING_LIST.value : [
steering_topic.replace('racecar', racecar_name) for steering_topic in STEERING_TOPICS],
ConfigParams.CHANGE_START.value : utils.str2bool(rospy.get_param('CHANGE_START_POSITION', False)),
ConfigParams.ALT_DIR.value : utils.str2bool(rospy.get_param('ALTERNATE_DRIVING_DIRECTION', False)),
ConfigParams.ACTION_SPACE_PATH.value : 'custom_files/'+agent_name+'/model_metadata.json',
ConfigParams.REWARD.value : reward_function,
ConfigParams.AGENT_NAME.value : racecar_name,
ConfigParams.VERSION.value : version,
ConfigParams.NUMBER_OF_RESETS.value: args.number_of_resets,
ConfigParams.PENALTY_SECONDS.value: args.penalty_seconds,
ConfigParams.NUMBER_OF_TRIALS.value: args.number_of_trials,
ConfigParams.IS_CONTINUOUS.value: args.is_continuous,
ConfigParams.RACE_TYPE.value: args.race_type,
ConfigParams.COLLISION_PENALTY.value: args.collision_penalty,
ConfigParams.OFF_TRACK_PENALTY.value: args.off_track_penalty}}
metrics_s3_config = {MetricsS3Keys.METRICS_BUCKET.value: metrics_s3_buckets[agent_index],
MetricsS3Keys.METRICS_KEY.value: metrics_s3_object_keys[agent_index],
# Replaced rospy.get_param('AWS_REGION') to be equal to the argument being passed
# or default argument set
MetricsS3Keys.REGION.value: args.aws_region,
# Replaced rospy.get_param('MODEL_S3_BUCKET') to be equal to the argument being passed
# or default argument set
MetricsS3Keys.STEP_BUCKET.value: arg_s3_bucket[agent_index],
# Replaced rospy.get_param('MODEL_S3_PREFIX') to be equal to the argument being passed
# or default argument set
MetricsS3Keys.STEP_KEY.value: os.path.join(arg_s3_prefix[agent_index],
EVALUATION_SIMTRACE_DATA_S3_OBJECT_KEY)}
aws_region = rospy.get_param('AWS_REGION', args.aws_region)
s3_writer_job_info = []
if simtrace_s3_bucket:
s3_writer_job_info.append(
IterationData('simtrace', simtrace_s3_bucket[agent_index], simtrace_s3_object_prefix[agent_index],
aws_region,
os.path.join(ITERATION_DATA_LOCAL_FILE_PATH, agent_name,
IterationDataLocalFileNames.SIM_TRACE_EVALUATION_LOCAL_FILE.value)))
if mp4_s3_bucket:
s3_writer_job_info.extend([
IterationData('pip', mp4_s3_bucket[agent_index], mp4_s3_object_prefix[agent_index], aws_region,
os.path.join(
ITERATION_DATA_LOCAL_FILE_PATH, agent_name,
IterationDataLocalFileNames.CAMERA_PIP_MP4_VALIDATION_LOCAL_PATH.value)),
IterationData('45degree', mp4_s3_bucket[agent_index], mp4_s3_object_prefix[agent_index], aws_region,
os.path.join(
ITERATION_DATA_LOCAL_FILE_PATH, agent_name,
IterationDataLocalFileNames.CAMERA_45DEGREE_MP4_VALIDATION_LOCAL_PATH.value)),
IterationData('topview', mp4_s3_bucket[agent_index], mp4_s3_object_prefix[agent_index], aws_region,
os.path.join(
ITERATION_DATA_LOCAL_FILE_PATH, agent_name,
IterationDataLocalFileNames.CAMERA_TOPVIEW_MP4_VALIDATION_LOCAL_PATH.value))])
s3_writers.append(S3Writer(job_info=s3_writer_job_info))
run_phase_subject = RunPhaseSubject()
agent_list.append(create_rollout_agent(agent_config, EvalMetrics(agent_name, metrics_s3_config),
run_phase_subject))
agent_list.append(create_obstacles_agent())
agent_list.append(create_bot_cars_agent())
# ROS service to indicate all the robomaker markov packages are ready for consumption
signal_robomaker_markov_package_ready()
PhaseObserver('/agent/training_phase', run_phase_subject)
graph_manager, _ = get_graph_manager(hp_dict=sm_hyperparams_dict, agent_list=agent_list,
run_phase_subject=run_phase_subject)
ds_params_instance = S3BotoDataStoreParameters(aws_region=args.aws_region,
bucket_names=s3_bucket_dict,
base_checkpoint_dir=args.local_model_directory,
s3_folders=s3_prefix_dict)
graph_manager.data_store = S3BotoDataStore(params=ds_params_instance, graph_manager=graph_manager,
ignore_lock=True)
graph_manager.env_params.seed = 0
task_parameters = TaskParameters()
task_parameters.checkpoint_restore_path = args.local_model_directory
evaluation_worker(
graph_manager=graph_manager,
number_of_trials=args.number_of_trials,
task_parameters=task_parameters,
s3_writers=s3_writers,
is_continuous=args.is_continuous
)
if __name__ == '__main__':
try:
rospy.init_node('rl_coach', anonymous=True)
main()
except ValueError as err:
if utils.is_error_bad_ckpnt(err):
utils.log_and_exit("User modified model: {}".format(err),
utils.SIMAPP_SIMULATION_WORKER_EXCEPTION,
utils.SIMAPP_EVENT_ERROR_CODE_400)
else:
utils.log_and_exit("Eval worker value error: {}".format(err),
utils.SIMAPP_SIMULATION_WORKER_EXCEPTION,
utils.SIMAPP_EVENT_ERROR_CODE_500)
except GenericRolloutError as ex:
ex.log_except_and_exit()
except GenericRolloutException as ex:
ex.log_except_and_exit()
except Exception as ex:
utils.log_and_exit("Eval worker error: {}".format(ex),
utils.SIMAPP_SIMULATION_WORKER_EXCEPTION,
utils.SIMAPP_EVENT_ERROR_CODE_500)
| 55.024 | 116 | 0.656053 |
ace37c4bc7d3ef9390761be94399582a3018234f | 2,478 | py | Python | waveform-modeling/SCRIPTS/sub_06_norm_acousticfeature.py | nii-yamagishilab/project-CURRENNT-scripts | 3de6d32e5e556a71fac1b4010d00b7c000fa5912 | [
"BSD-3-Clause"
] | 65 | 2018-12-27T05:36:33.000Z | 2021-11-08T12:10:14.000Z | waveform-modeling/SCRIPTS/sub_06_norm_acousticfeature.py | nii-yamagishilab/project-CURRENNT-scripts | 3de6d32e5e556a71fac1b4010d00b7c000fa5912 | [
"BSD-3-Clause"
] | 3 | 2019-12-30T23:11:52.000Z | 2021-08-11T12:20:58.000Z | waveform-modeling/SCRIPTS/sub_06_norm_acousticfeature.py | nii-yamagishilab/project-CURRENNT-scripts | 3de6d32e5e556a71fac1b4010d00b7c000fa5912 | [
"BSD-3-Clause"
] | 17 | 2019-04-30T02:29:57.000Z | 2020-12-06T12:32:05.000Z | #!/usr/bin/python
from __future__ import absolute_import
from __future__ import print_function
import os
import sys
import importlib
from ioTools import readwrite as py_rw
try:
meanStdToolPath = sys.argv[9] + os.path.sep + 'dataProcess'
sys.path.append(meanStdToolPath)
meanStdTool = importlib.import_module('meanStd')
except ImportError:
print("Cannot found %s/dataProcess" % (sys.argv[9]))
if __name__ == "__main__":
dataLst = sys.argv[1]
acousDirs = sys.argv[2]
acousExts = sys.argv[3]
acousDims = sys.argv[4]
normMask = sys.argv[5]
f0Ext = sys.argv[6]
dataLstDir = sys.argv[7]
mvoutputPath = sys.argv[8]
acousDirList = acousDirs.split(',')
acousExtList = acousExts.split(',')
acousDimList = [int(x) for x in acousDims.split('_')]
try:
normMaskList = [[int(x)] for x in normMask.split('_')]
except ValueError:
# by default, normlize every feature dimension
normMaskList = [[] for dimCnt in acousDimList]
assert len(acousDirList) == len(acousExtList), "Error: unequal length of acousDirs, acousExts"
assert len(acousExtList) == len(acousDimList), "Error: unequal length of acousDims, acousExts"
assert len(acousExtList) == len(normMaskList), "Error: unequal length of acousDims, normmask"
fileListsBuff = []
dimCnt = 0
f0Dim = -1
for acousDir, acousExt, acousDim in zip(acousDirList, acousExtList, acousDimList):
# confirm the F0 dimension
if acousExt == f0Ext:
f0Dim = dimCnt
# clearn the extension
if acousExt.startswith('.'):
acousExt = acousExt[1:]
# write the file script
fileOutput = dataLstDir + os.path.sep + acousExt + '.scp'
fileListsBuff.append(fileOutput)
writePtr = open(fileOutput, 'w')
with open(dataLst, 'r') as readfilePtr:
for line in readfilePtr:
filename = line.rstrip('\n')
writePtr.write('%s/%s.%s\n' % (acousDir, filename, acousExt))
writePtr.close()
dimCnt = dimCnt + acousDim
meanStdTool.meanStdNormMask(fileListsBuff, acousDimList, normMaskList, mvoutputPath,
f0Dim = f0Dim)
meanstd_data = py_rw.read_raw_mat(mvoutputPath, 1)
if f0Dim >= 0:
print("Please note:")
print("F0 mean: %f" % (meanstd_data[f0Dim]))
print("F0 std: %f" % (meanstd_data[dimCnt+f0Dim]))
| 32.605263 | 98 | 0.630751 |
ace37ce7b95f423a9432cc1f0c4bd970087e1f33 | 11,099 | py | Python | examples/imagenet/dataset.py | ethanluoyc/dm-haiku | 1b8e14d5e2b239523b73f3fa5403666ac45ae435 | [
"Apache-2.0"
] | null | null | null | examples/imagenet/dataset.py | ethanluoyc/dm-haiku | 1b8e14d5e2b239523b73f3fa5403666ac45ae435 | [
"Apache-2.0"
] | null | null | null | examples/imagenet/dataset.py | ethanluoyc/dm-haiku | 1b8e14d5e2b239523b73f3fa5403666ac45ae435 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""ImageNet dataset with typical pre-processing."""
import enum
import itertools as it
import types
from typing import Generator, Iterable, Mapping, Optional, Sequence, Tuple
import jax
import jax.numpy as jnp
import numpy as np
from packaging import version
import tensorflow.compat.v2 as tf
import tensorflow_datasets as tfds
Batch = Mapping[str, np.ndarray]
MEAN_RGB = (0.485 * 255, 0.456 * 255, 0.406 * 255)
STDDEV_RGB = (0.229 * 255, 0.224 * 255, 0.225 * 255)
class Split(enum.Enum):
"""Imagenet dataset split."""
TRAIN = 1
TRAIN_AND_VALID = 2
VALID = 3
TEST = 4
@classmethod
def from_string(cls, name: str) -> 'Split':
return {'TRAIN': Split.TRAIN, 'TRAIN_AND_VALID': Split.TRAIN_AND_VALID,
'VALID': Split.VALID, 'VALIDATION': Split.VALID,
'TEST': Split.TEST}[name.upper()]
@property
def num_examples(self):
return {Split.TRAIN_AND_VALID: 1281167, Split.TRAIN: 1271167,
Split.VALID: 10000, Split.TEST: 50000}[self]
def _check_min_version(mod: types.ModuleType, min_ver: str):
actual_ver = getattr(mod, '__version__')
if version.parse(actual_ver) < version.parse(min_ver):
raise ValueError(
f'{mod.__name__} >= {min_ver} is required, you have {actual_ver}')
def check_versions():
_check_min_version(tf, '2.5.0')
_check_min_version(tfds, '4.2.0')
def load(
split: Split,
*,
is_training: bool,
batch_dims: Sequence[int],
dtype: jnp.dtype = jnp.float32,
transpose: bool = False,
zeros: bool = False,
) -> Generator[Batch, None, None]:
"""Loads the given split of the dataset."""
if zeros:
h, w, c = 224, 224, 3
if transpose:
image_dims = (*batch_dims[:-1], h, w, c, batch_dims[0])
else:
image_dims = (*batch_dims, h, w, c)
batch = {'images': np.zeros(image_dims, dtype=dtype),
'labels': np.zeros(batch_dims, dtype=np.uint32)}
if is_training:
yield from it.repeat(batch)
else:
num_batches = split.num_examples // np.prod(batch_dims)
yield from it.repeat(batch, num_batches)
if is_training:
start, end = _shard(split, jax.host_id(), jax.host_count())
else:
start, end = _shard(split, 0, 1)
tfds_split = tfds.core.ReadInstruction(_to_tfds_split(split),
from_=start, to=end, unit='abs')
ds = tfds.load('imagenet2012:5.*.*', split=tfds_split,
decoders={'image': tfds.decode.SkipDecoding()})
total_batch_size = np.prod(batch_dims)
options = tf.data.Options()
options.experimental_threading.private_threadpool_size = 48
options.experimental_threading.max_intra_op_parallelism = 1
options.experimental_optimization.map_parallelization = True
if is_training:
options.experimental_deterministic = False
ds = ds.with_options(options)
if is_training:
if jax.host_count() > 1:
# Only cache if we are reading a subset of the dataset.
ds = ds.cache()
ds = ds.repeat()
ds = ds.shuffle(buffer_size=10 * total_batch_size, seed=0)
else:
if split.num_examples % total_batch_size != 0:
raise ValueError(f'Test/valid must be divisible by {total_batch_size}')
def preprocess(example):
image = _preprocess_image(example['image'], is_training)
label = tf.cast(example['label'], tf.int32)
return {'images': image, 'labels': label}
ds = ds.map(preprocess, num_parallel_calls=tf.data.experimental.AUTOTUNE)
def transpose_fn(batch):
# We use the "double transpose trick" to improve performance for TPUs. Note
# that this (typically) requires a matching HWCN->NHWC transpose in your
# model code. The compiler cannot make this optimization for us since our
# data pipeline and model are compiled separately.
batch = dict(**batch)
batch['images'] = tf.transpose(batch['images'], (1, 2, 3, 0))
return batch
def cast_fn(batch):
batch = dict(**batch)
batch['images'] = tf.cast(batch['images'], tf.dtypes.as_dtype(dtype))
return batch
for i, batch_size in enumerate(reversed(batch_dims)):
ds = ds.batch(batch_size)
if i == 0:
if transpose:
ds = ds.map(transpose_fn) # NHWC -> HWCN
# NOTE: You may be tempted to move the casting earlier on in the pipeline,
# but for bf16 some operations will end up silently placed on the TPU and
# this causes stalls while TF and JAX battle for the accelerator.
if dtype != jnp.float32:
ds = ds.map(cast_fn)
ds = ds.prefetch(tf.data.experimental.AUTOTUNE)
yield from tfds.as_numpy(ds)
def _device_put_sharded(sharded_tree, devices):
leaves, treedef = jax.tree_flatten(sharded_tree)
n = leaves[0].shape[0]
return jax.api.device_put_sharded(
[jax.tree_unflatten(treedef, [l[i] for l in leaves]) for i in range(n)],
devices)
def double_buffer(ds: Iterable[Batch]) -> Generator[Batch, None, None]:
"""Keeps at least two batches on the accelerator.
The current GPU allocator design reuses previous allocations. For a training
loop this means batches will (typically) occupy the same region of memory as
the previous batch. An issue with this is that it means we cannot overlap a
host->device copy for the next batch until the previous step has finished and
the previous batch has been freed.
By double buffering we ensure that there are always two batches on the device.
This means that a given batch waits on the N-2'th step to finish and free,
meaning that it can allocate and copy the next batch to the accelerator in
parallel with the N-1'th step being executed.
Args:
ds: Iterable of batches of numpy arrays.
Yields:
Batches of sharded device arrays.
"""
batch = None
devices = jax.local_devices()
for next_batch in ds:
assert next_batch is not None
next_batch = _device_put_sharded(next_batch, devices)
if batch is not None:
yield batch
batch = next_batch
if batch is not None:
yield batch
def _to_tfds_split(split: Split) -> tfds.Split:
"""Returns the TFDS split appropriately sharded."""
# NOTE: Imagenet did not release labels for the test split used in the
# competition, so it has been typical at DeepMind to consider the VALID
# split the TEST split and to reserve 10k images from TRAIN for VALID.
if split in (Split.TRAIN, Split.TRAIN_AND_VALID, Split.VALID):
return tfds.Split.TRAIN
else:
assert split == Split.TEST
return tfds.Split.VALIDATION
def _shard(split: Split, shard_index: int, num_shards: int) -> Tuple[int, int]:
"""Returns [start, end) for the given shard index."""
assert shard_index < num_shards
arange = np.arange(split.num_examples)
shard_range = np.array_split(arange, num_shards)[shard_index]
start, end = shard_range[0], (shard_range[-1] + 1)
if split == Split.TRAIN:
# Note that our TRAIN=TFDS_TRAIN[10000:] and VALID=TFDS_TRAIN[:10000].
offset = Split.VALID.num_examples
start += offset
end += offset
return start, end
def _preprocess_image(
image_bytes: tf.Tensor,
is_training: bool,
) -> tf.Tensor:
"""Returns processed and resized images."""
if is_training:
image = _decode_and_random_crop(image_bytes)
image = tf.image.random_flip_left_right(image)
else:
image = _decode_and_center_crop(image_bytes)
assert image.dtype == tf.uint8
# NOTE: Bicubic resize (1) casts uint8 to float32 and (2) resizes without
# clamping overshoots. This means values returned will be outside the range
# [0.0, 255.0] (e.g. we have observed outputs in the range [-51.1, 336.6]).
image = tf.image.resize(image, [224, 224], tf.image.ResizeMethod.BICUBIC)
image = _normalize_image(image)
return image
def _normalize_image(image: tf.Tensor) -> tf.Tensor:
"""Normalize the image to zero mean and unit variance."""
image -= tf.constant(MEAN_RGB, shape=[1, 1, 3], dtype=image.dtype)
image /= tf.constant(STDDEV_RGB, shape=[1, 1, 3], dtype=image.dtype)
return image
def _distorted_bounding_box_crop(
image_bytes: tf.Tensor,
*,
jpeg_shape: tf.Tensor,
bbox: tf.Tensor,
min_object_covered: float,
aspect_ratio_range: Tuple[float, float],
area_range: Tuple[float, float],
max_attempts: int,
) -> tf.Tensor:
"""Generates cropped_image using one of the bboxes randomly distorted."""
bbox_begin, bbox_size, _ = tf.image.sample_distorted_bounding_box(
jpeg_shape,
bounding_boxes=bbox,
min_object_covered=min_object_covered,
aspect_ratio_range=aspect_ratio_range,
area_range=area_range,
max_attempts=max_attempts,
use_image_if_no_bounding_boxes=True)
# Crop the image to the specified bounding box.
offset_y, offset_x, _ = tf.unstack(bbox_begin)
target_height, target_width, _ = tf.unstack(bbox_size)
crop_window = tf.stack([offset_y, offset_x, target_height, target_width])
image = tf.image.decode_and_crop_jpeg(image_bytes, crop_window, channels=3)
return image
def _decode_and_random_crop(image_bytes: tf.Tensor) -> tf.Tensor:
"""Make a random crop of 224."""
jpeg_shape = tf.image.extract_jpeg_shape(image_bytes)
bbox = tf.constant([0.0, 0.0, 1.0, 1.0], dtype=tf.float32, shape=[1, 1, 4])
image = _distorted_bounding_box_crop(
image_bytes,
jpeg_shape=jpeg_shape,
bbox=bbox,
min_object_covered=0.1,
aspect_ratio_range=(3 / 4, 4 / 3),
area_range=(0.08, 1.0),
max_attempts=10)
if tf.reduce_all(tf.equal(jpeg_shape, tf.shape(image))):
# If the random crop failed fall back to center crop.
image = _decode_and_center_crop(image_bytes, jpeg_shape)
return image
def _decode_and_center_crop(
image_bytes: tf.Tensor,
jpeg_shape: Optional[tf.Tensor] = None,
) -> tf.Tensor:
"""Crops to center of image with padding then scales."""
if jpeg_shape is None:
jpeg_shape = tf.image.extract_jpeg_shape(image_bytes)
image_height = jpeg_shape[0]
image_width = jpeg_shape[1]
padded_center_crop_size = tf.cast(
((224 / (224 + 32)) *
tf.cast(tf.minimum(image_height, image_width), tf.float32)), tf.int32)
offset_height = ((image_height - padded_center_crop_size) + 1) // 2
offset_width = ((image_width - padded_center_crop_size) + 1) // 2
crop_window = tf.stack([offset_height, offset_width,
padded_center_crop_size, padded_center_crop_size])
image = tf.image.decode_and_crop_jpeg(image_bytes, crop_window, channels=3)
return image
| 35.460064 | 80 | 0.69727 |
ace37d8bb68abdba810ac25f59dd63e1209e8957 | 501 | py | Python | test/filesystemtest.py | jfveronelli/sqink | 5e9e6bc6c5c6c00abbc07099bc1fa1ab6cf79577 | [
"Unlicense"
] | 32 | 2015-11-06T02:59:41.000Z | 2021-02-12T02:44:42.000Z | test/filesystemtest.py | jfveronelli/sqink | 5e9e6bc6c5c6c00abbc07099bc1fa1ab6cf79577 | [
"Unlicense"
] | 6 | 2017-04-26T02:30:16.000Z | 2017-10-13T16:53:08.000Z | test/filesystemtest.py | jfveronelli/sqink | 5e9e6bc6c5c6c00abbc07099bc1fa1ab6cf79577 | [
"Unlicense"
] | 4 | 2016-02-01T09:15:05.000Z | 2020-04-30T03:41:04.000Z | # coding:utf-8
from crossknight.sqink.provider.filesystem import FilesystemNoteProvider
from os.path import dirname
from os.path import normpath
from unittest import TestCase
class FilesystemNoteProviderTest(TestCase):
def testListShouldSucceed(self):
path = normpath(dirname(__file__) + "/resources")
provider = FilesystemNoteProvider(path)
notes = provider.list()
self.assertEqual(1, len(notes))
self.assertEqual("Probando 1,2,3...", notes[0].title)
| 27.833333 | 72 | 0.726547 |
ace37de63471ad7ad0a8b9c8cd46e72f1fe6e8e7 | 5,561 | py | Python | cifra/tests/test_launcher.py | dante-signal31/cifra | 3914822ec49cd238fe31fd58424fa2b058116b9a | [
"BSD-3-Clause"
] | null | null | null | cifra/tests/test_launcher.py | dante-signal31/cifra | 3914822ec49cd238fe31fd58424fa2b058116b9a | [
"BSD-3-Clause"
] | null | null | null | cifra/tests/test_launcher.py | dante-signal31/cifra | 3914822ec49cd238fe31fd58424fa2b058116b9a | [
"BSD-3-Clause"
] | null | null | null | """
Tests for some launcher commands.
"""
import pytest
import os.path
import tempfile
from test_common.fs.temp import temp_dir
import cifra.cifra_launcher as cifra_launcher
import cifra.cipher.substitution as substitution
from cifra.tests.test_dictionaries import loaded_dictionaries, LoadedDictionaries
from cifra.tests.test_caesar import ORIGINAL_MESSAGE as caesar_ORIGINAL_MESSAGE
from cifra.tests.test_caesar import CIPHERED_MESSAGE_KEY_13 as caesar_CIPHERED_MESSAGE_KEY_13
from cifra.tests.test_caesar import TEST_KEY as caesar_TEST_KEY
from cifra.tests.test_substitution import ORIGINAL_MESSAGE as substitution_ORIGINAL_MESSAGE
from cifra.tests.test_substitution import CIPHERED_MESSAGE as substitution_CIPHERED_MESSAGE
from cifra.tests.test_substitution import TEST_KEY as substitution_TEST_KEY
from cifra.tests.test_substitution import TEST_CHARSET as substitution_TEST_CHARSET
@pytest.mark.quick_slow
def test_cipher_caesar(temp_dir, loaded_dictionaries: LoadedDictionaries):
with tempfile.NamedTemporaryFile(mode="w") as message_file:
message_file.write(caesar_ORIGINAL_MESSAGE)
message_file.flush()
output_file_pathname = os.path.join(temp_dir, "ciphered_message.txt")
provided_args = f"cipher caesar {caesar_TEST_KEY} {message_file.name} --ciphered_file {output_file_pathname}".split()
cifra_launcher.main(provided_args, loaded_dictionaries.temp_dir)
with open(output_file_pathname, mode="r") as output_file:
recovered_content = output_file.read()
assert caesar_CIPHERED_MESSAGE_KEY_13 == recovered_content
@pytest.mark.quick_slow
def test_decipher_caesar(temp_dir, loaded_dictionaries: LoadedDictionaries):
with tempfile.NamedTemporaryFile(mode="w") as message_file:
message_file.write(caesar_CIPHERED_MESSAGE_KEY_13)
message_file.flush()
output_file_pathname = os.path.join(temp_dir, "deciphered_message.txt")
provided_args = f"decipher caesar {caesar_TEST_KEY} {message_file.name} --deciphered_file {output_file_pathname}".split()
cifra_launcher.main(provided_args, loaded_dictionaries.temp_dir)
with open(output_file_pathname, mode="r") as output_file:
recovered_content = output_file.read()
assert caesar_ORIGINAL_MESSAGE == recovered_content
@pytest.mark.quick_slow
def test_cipher_substitution(temp_dir, loaded_dictionaries: LoadedDictionaries):
with tempfile.NamedTemporaryFile(mode="w") as message_file:
message_file.write(substitution_ORIGINAL_MESSAGE)
message_file.flush()
output_file_pathname = os.path.join(temp_dir, "ciphered_message.txt")
provided_args = f"cipher substitution {substitution_TEST_KEY} {message_file.name} --ciphered_file {output_file_pathname} --charset {substitution_TEST_CHARSET}".split()
cifra_launcher.main(provided_args, loaded_dictionaries.temp_dir)
with open(output_file_pathname, mode="r") as output_file:
recovered_content = output_file.read()
assert substitution_CIPHERED_MESSAGE == recovered_content
@pytest.mark.quick_slow
def test_decipher_substitution(temp_dir, loaded_dictionaries: LoadedDictionaries):
with tempfile.NamedTemporaryFile(mode="w") as message_file:
message_file.write(substitution_CIPHERED_MESSAGE)
message_file.flush()
output_file_pathname = os.path.join(temp_dir, "deciphered_message.txt")
provided_args = f"decipher substitution {substitution_TEST_KEY} {message_file.name} --deciphered_file {output_file_pathname} --charset {substitution_TEST_CHARSET}".split()
cifra_launcher.main(provided_args, loaded_dictionaries.temp_dir)
with open(output_file_pathname, mode="r") as output_file:
recovered_content = output_file.read()
assert substitution_ORIGINAL_MESSAGE == recovered_content
@pytest.mark.quick_slow
def test_attack_caesar(temp_dir, loaded_dictionaries: LoadedDictionaries):
with tempfile.NamedTemporaryFile(mode="w") as message_file:
message_file.write(caesar_CIPHERED_MESSAGE_KEY_13)
message_file.flush()
output_file_pathname = os.path.join(temp_dir, "recovered_message.txt")
provided_args = f"attack caesar {message_file.name} --deciphered_file {output_file_pathname}".split()
cifra_launcher.main(provided_args, loaded_dictionaries.temp_dir)
with open(output_file_pathname, mode="r") as output_file:
recovered_content = output_file.read()
assert caesar_ORIGINAL_MESSAGE == recovered_content
@pytest.mark.quick_slow
def test_attack_substitution(temp_dir, loaded_dictionaries: LoadedDictionaries):
with tempfile.NamedTemporaryFile(mode="w") as message_file, \
open(os.path.join(os.getcwd(), "cifra", "tests", "resources/english_book_c1.txt")) as english_book:
original_message = english_book.read()
ciphered_text = substitution.cipher(original_message, substitution_TEST_KEY, substitution_TEST_CHARSET)
message_file.write(ciphered_text)
message_file.flush()
output_file_pathname = os.path.join(temp_dir, "recovered_message.txt")
provided_args = f"attack substitution {message_file.name} --deciphered_file {output_file_pathname} --charset {substitution_TEST_CHARSET}".split()
cifra_launcher.main(provided_args, loaded_dictionaries.temp_dir)
with open(output_file_pathname, mode="r") as output_file:
recovered_content = output_file.read()
assert original_message == recovered_content
| 55.61 | 179 | 0.773782 |
ace37e89e079b36050410d77e5cb44e0115db890 | 224 | py | Python | tests/urls.py | exolever/django-earlyparrot | 031b7f2d67e1e78d525f85ae10e82d4cc0c198f3 | [
"MIT"
] | null | null | null | tests/urls.py | exolever/django-earlyparrot | 031b7f2d67e1e78d525f85ae10e82d4cc0c198f3 | [
"MIT"
] | 7 | 2019-09-04T10:59:39.000Z | 2019-12-12T22:16:25.000Z | tests/urls.py | exolever/django-earlyparrot | 031b7f2d67e1e78d525f85ae10e82d4cc0c198f3 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from django.conf.urls import url, include
urlpatterns = [
url(r'^api/referral/', include('referral.api.urls', namespace='referral')),
]
| 22.4 | 79 | 0.714286 |
ace37fbed6d60b7165772a91d5e045c52828a215 | 46 | py | Python | vnpy/gateway/onetoken/__init__.py | funrunskypalace/vnpy | 2d87aede685fa46278d8d3392432cc127b797926 | [
"MIT"
] | 19,529 | 2015-03-02T12:17:35.000Z | 2022-03-31T17:18:27.000Z | vnpy/gateway/onetoken/__init__.py | funrunskypalace/vnpy | 2d87aede685fa46278d8d3392432cc127b797926 | [
"MIT"
] | 2,186 | 2015-03-04T23:16:33.000Z | 2022-03-31T03:44:01.000Z | vnpy/gateway/onetoken/__init__.py | funrunskypalace/vnpy | 2d87aede685fa46278d8d3392432cc127b797926 | [
"MIT"
] | 8,276 | 2015-03-02T05:21:04.000Z | 2022-03-31T13:13:13.000Z | from .onetoken_gateway import OnetokenGateway
| 23 | 45 | 0.891304 |
ace37fc68631874363834ea06c6f30bd995aff04 | 7,259 | py | Python | astropy/modeling/optimizers.py | emirkmo/astropy | d96cd45b25ae55117d1bcc9c40e83a82037fc815 | [
"BSD-3-Clause"
] | 1 | 2019-03-11T12:26:49.000Z | 2019-03-11T12:26:49.000Z | astropy/modeling/optimizers.py | emirkmo/astropy | d96cd45b25ae55117d1bcc9c40e83a82037fc815 | [
"BSD-3-Clause"
] | 1 | 2019-10-09T18:54:27.000Z | 2019-10-09T18:54:27.000Z | astropy/modeling/optimizers.py | emirkmo/astropy | d96cd45b25ae55117d1bcc9c40e83a82037fc815 | [
"BSD-3-Clause"
] | null | null | null | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# pylint: disable=invalid-name
"""
Optimization algorithms used in `~astropy.modeling.fitting`.
"""
import abc
import warnings
import numpy as np
from astropy.utils.exceptions import AstropyUserWarning
__all__ = ["Optimization", "SLSQP", "Simplex"]
# Maximum number of iterations
DEFAULT_MAXITER = 100
# Step for the forward difference approximation of the Jacobian
DEFAULT_EPS = np.sqrt(np.finfo(float).eps)
# Default requested accuracy
DEFAULT_ACC = 1e-07
DEFAULT_BOUNDS = (-10 ** 12, 10 ** 12)
class Optimization(metaclass=abc.ABCMeta):
"""
Base class for optimizers.
Parameters
----------
opt_method : callable
Implements optimization method
Notes
-----
The base Optimizer does not support any constraints by default; individual
optimizers should explicitly set this list to the specific constraints
it supports.
"""
supported_constraints = []
def __init__(self, opt_method):
self._opt_method = opt_method
self._maxiter = DEFAULT_MAXITER
self._eps = DEFAULT_EPS
self._acc = DEFAULT_ACC
@property
def maxiter(self):
"""Maximum number of iterations"""
return self._maxiter
@maxiter.setter
def maxiter(self, val):
"""Set maxiter"""
self._maxiter = val
@property
def eps(self):
"""Step for the forward difference approximation of the Jacobian"""
return self._eps
@eps.setter
def eps(self, val):
"""Set eps value"""
self._eps = val
@property
def acc(self):
"""Requested accuracy"""
return self._acc
@acc.setter
def acc(self, val):
"""Set accuracy"""
self._acc = val
def __repr__(self):
fmt = f"{self.__class__.__name__}()"
return fmt
@property
def opt_method(self):
""" Return the optimization method."""
return self._opt_method
@abc.abstractmethod
def __call__(self):
raise NotImplementedError("Subclasses should implement this method")
class SLSQP(Optimization):
"""
Sequential Least Squares Programming optimization algorithm.
The algorithm is described in [1]_. It supports tied and fixed
parameters, as well as bounded constraints. Uses
`scipy.optimize.fmin_slsqp`.
References
----------
.. [1] http://www.netlib.org/toms/733
"""
supported_constraints = ['bounds', 'eqcons', 'ineqcons', 'fixed', 'tied']
def __init__(self):
from scipy.optimize import fmin_slsqp
super().__init__(fmin_slsqp)
self.fit_info = {
'final_func_val': None,
'numiter': None,
'exit_mode': None,
'message': None
}
def __call__(self, objfunc, initval, fargs, **kwargs):
"""
Run the solver.
Parameters
----------
objfunc : callable
objection function
initval : iterable
initial guess for the parameter values
fargs : tuple
other arguments to be passed to the statistic function
kwargs : dict
other keyword arguments to be passed to the solver
"""
kwargs['iter'] = kwargs.pop('maxiter', self._maxiter)
if 'epsilon' not in kwargs:
kwargs['epsilon'] = self._eps
if 'acc' not in kwargs:
kwargs['acc'] = self._acc
# Get the verbosity level
disp = kwargs.pop('verblevel', None)
# set the values of constraints to match the requirements of fmin_slsqp
model = fargs[0]
pars = [getattr(model, name) for name in model.param_names]
bounds = [par.bounds for par in pars if not (par.fixed or par.tied)]
bounds = np.asarray(bounds)
for i in bounds:
if i[0] is None:
i[0] = DEFAULT_BOUNDS[0]
if i[1] is None:
i[1] = DEFAULT_BOUNDS[1]
# older versions of scipy require this array to be float
bounds = np.asarray(bounds, dtype=float)
eqcons = np.array(model.eqcons)
ineqcons = np.array(model.ineqcons)
fitparams, final_func_val, numiter, exit_mode, mess = self.opt_method(
objfunc, initval, args=fargs, full_output=True, disp=disp,
bounds=bounds, eqcons=eqcons, ieqcons=ineqcons,
**kwargs)
self.fit_info['final_func_val'] = final_func_val
self.fit_info['numiter'] = numiter
self.fit_info['exit_mode'] = exit_mode
self.fit_info['message'] = mess
if exit_mode != 0:
warnings.warn("The fit may be unsuccessful; check "
"fit_info['message'] for more information.",
AstropyUserWarning)
return fitparams, self.fit_info
class Simplex(Optimization):
"""
Neald-Mead (downhill simplex) algorithm.
This algorithm [1]_ only uses function values, not derivatives.
Uses `scipy.optimize.fmin`.
References
----------
.. [1] Nelder, J.A. and Mead, R. (1965), "A simplex method for function
minimization", The Computer Journal, 7, pp. 308-313
"""
supported_constraints = ['bounds', 'fixed', 'tied']
def __init__(self):
from scipy.optimize import fmin as simplex
super().__init__(simplex)
self.fit_info = {
'final_func_val': None,
'numiter': None,
'exit_mode': None,
'num_function_calls': None
}
def __call__(self, objfunc, initval, fargs, **kwargs):
"""
Run the solver.
Parameters
----------
objfunc : callable
objection function
initval : iterable
initial guess for the parameter values
fargs : tuple
other arguments to be passed to the statistic function
kwargs : dict
other keyword arguments to be passed to the solver
"""
if 'maxiter' not in kwargs:
kwargs['maxiter'] = self._maxiter
if 'acc' in kwargs:
self._acc = kwargs['acc']
kwargs.pop('acc')
if 'xtol' in kwargs:
self._acc = kwargs['xtol']
kwargs.pop('xtol')
# Get the verbosity level
disp = kwargs.pop('verblevel', None)
fitparams, final_func_val, numiter, funcalls, exit_mode = self.opt_method(
objfunc, initval, args=fargs, xtol=self._acc, disp=disp,
full_output=True, **kwargs)
self.fit_info['final_func_val'] = final_func_val
self.fit_info['numiter'] = numiter
self.fit_info['exit_mode'] = exit_mode
self.fit_info['num_function_calls'] = funcalls
if self.fit_info['exit_mode'] == 1:
warnings.warn("The fit may be unsuccessful; "
"Maximum number of function evaluations reached.",
AstropyUserWarning)
if self.fit_info['exit_mode'] == 2:
warnings.warn("The fit may be unsuccessful; "
"Maximum number of iterations reached.",
AstropyUserWarning)
return fitparams, self.fit_info
| 29.388664 | 82 | 0.594434 |
ace37fe4e9b52ddf41e0e03fc4a7a9d01f255ee0 | 828 | py | Python | lintcode/589.connecting-graph.py | geemaple/algorithm | 68bc5032e1ee52c22ef2f2e608053484c487af54 | [
"MIT"
] | 177 | 2017-08-21T08:57:43.000Z | 2020-06-22T03:44:22.000Z | lintcode/589.connecting-graph.py | geemaple/algorithm | 68bc5032e1ee52c22ef2f2e608053484c487af54 | [
"MIT"
] | 2 | 2018-09-06T13:39:12.000Z | 2019-06-03T02:54:45.000Z | lintcode/589.connecting-graph.py | geemaple/algorithm | 68bc5032e1ee52c22ef2f2e608053484c487af54 | [
"MIT"
] | 23 | 2017-08-23T06:01:28.000Z | 2020-04-20T03:17:36.000Z | class ConnectingGraph:
"""
@param: n: An integer
"""
def __init__(self, n):
# do intialization if necessary
self.dict = {}
for i in range(1, n + 1):
self.dict[i] = i
def find(self, node):
if self.dict[node] == node:
return node
self.dict[node] = self.find(self.dict[node])
return self.dict[node]
"""
@param: a: An integer
@param: b: An integer
@return: nothing
"""
def connect(self, a, b):
# write your code here
a_root = self.find(a)
b_root = self.find(b)
self.dict[a_root] = b_root
"""
@param: a: An integer
@param: b: An integer
@return: A boolean
"""
def query(self, a, b):
# write your code here
return self.find(a) == self.find(b) | 23 | 52 | 0.5157 |
ace3802ba3be9110ca7639cc8bc980c09bb0bdfc | 116,939 | py | Python | airflow/www/views.py | jxiao0/airflow | cdf924dbba51b16042b543e730db897ff7dc1cdb | [
"Apache-2.0"
] | null | null | null | airflow/www/views.py | jxiao0/airflow | cdf924dbba51b16042b543e730db897ff7dc1cdb | [
"Apache-2.0"
] | null | null | null | airflow/www/views.py | jxiao0/airflow | cdf924dbba51b16042b543e730db897ff7dc1cdb | [
"Apache-2.0"
] | 1 | 2020-07-15T09:37:34.000Z | 2020-07-15T09:37:34.000Z | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import ast
import codecs
import copy
import datetime as dt
import itertools
import json
import logging
import math
import os
import pickle
import traceback
from collections import defaultdict
from datetime import timedelta
from functools import wraps
from textwrap import dedent
from six.moves.urllib.parse import quote
import markdown
import pendulum
import sqlalchemy as sqla
from flask import (
abort, jsonify, redirect, url_for, request, Markup, Response,
current_app, render_template, make_response)
from flask import flash
from flask_admin import BaseView, expose, AdminIndexView
from flask_admin.actions import action
from flask_admin.babel import lazy_gettext
from flask_admin.contrib.sqla import ModelView
from flask_admin.form.fields import DateTimeField
from flask_admin.tools import iterdecode
import lazy_object_proxy
from jinja2 import escape
from jinja2.sandbox import ImmutableSandboxedEnvironment
from past.builtins import basestring
from pygments import highlight, lexers
from pygments.formatters import HtmlFormatter
import six
from sqlalchemy import or_, desc, and_, union_all
from wtforms import (
Form, SelectField, TextAreaField, PasswordField,
StringField, IntegerField, validators)
import airflow
from airflow import configuration as conf, LoggingMixin, configuration
from airflow import models
from airflow import settings
from airflow import jobs
from airflow.api.common.experimental.mark_tasks import (set_dag_run_state_to_running,
set_dag_run_state_to_success,
set_dag_run_state_to_failed)
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator, Connection, DagRun, errors, XCom
from airflow.operators.subdag_operator import SubDagOperator
from airflow.ti_deps.dep_context import DepContext, QUEUE_DEPS, SCHEDULER_DEPS
from airflow.utils import timezone
from airflow.utils.dates import infer_time_unit, scale_time_units, parse_execution_date
from airflow.utils.db import create_session, provide_session
from airflow.utils.helpers import alchemy_to_dict, render_log_filename
from airflow.utils.net import get_hostname
from airflow.utils.state import State
from airflow.utils.timezone import datetime
from airflow._vendor import nvd3
from airflow.www import utils as wwwutils
from airflow.www.forms import (DateTimeForm, DateTimeWithNumRunsForm,
DateTimeWithNumRunsWithDagRunsForm)
from airflow.www.validators import GreaterEqualThan
QUERY_LIMIT = 100000
CHART_LIMIT = 200000
UTF8_READER = codecs.getreader('utf-8')
dagbag = models.DagBag(settings.DAGS_FOLDER)
login_required = airflow.login.login_required
current_user = airflow.login.current_user
logout_user = airflow.login.logout_user
FILTER_BY_OWNER = False
PAGE_SIZE = conf.getint('webserver', 'page_size')
if conf.getboolean('webserver', 'FILTER_BY_OWNER'):
# filter_by_owner if authentication is enabled and filter_by_owner is true
FILTER_BY_OWNER = not current_app.config['LOGIN_DISABLED']
def dag_link(v, c, m, p):
if m.dag_id is None:
return Markup()
kwargs = {'dag_id': m.dag_id}
# This is called with various objects, TIs, (ORM) DAG - some have this,
# some don't
if hasattr(m, 'execution_date'):
kwargs['execution_date'] = m.execution_date
url = url_for('airflow.graph', **kwargs)
return Markup(
'<a href="{}">{}</a>').format(url, m.dag_id)
def log_url_formatter(v, c, m, p):
url = url_for(
'airflow.log',
dag_id=m.dag_id,
task_id=m.task_id,
execution_date=m.execution_date.isoformat())
return Markup(
'<a href="{log_url}">'
' <span class="glyphicon glyphicon-book" aria-hidden="true">'
'</span></a>').format(log_url=url)
def dag_run_link(v, c, m, p):
url = url_for(
'airflow.graph',
dag_id=m.dag_id,
run_id=m.run_id,
execution_date=m.execution_date)
title = m.run_id
return Markup('<a href="{url}">{title}</a>').format(**locals())
def task_instance_link(v, c, m, p):
url = url_for(
'airflow.task',
dag_id=m.dag_id,
task_id=m.task_id,
execution_date=m.execution_date.isoformat())
url_root = url_for(
'airflow.graph',
dag_id=m.dag_id,
root=m.task_id,
execution_date=m.execution_date.isoformat())
return Markup(
"""
<span style="white-space: nowrap;">
<a href="{url}">{m.task_id}</a>
<a href="{url_root}" title="Filter on this task and upstream">
<span class="glyphicon glyphicon-filter" style="margin-left: 0px;"
aria-hidden="true"></span>
</a>
</span>
""").format(**locals())
def state_token(state):
color = State.color(state)
return Markup(
'<span class="label" style="background-color:{color};">'
'{state}</span>').format(**locals())
def parse_datetime_f(value):
if not isinstance(value, dt.datetime):
return value
return timezone.make_aware(value)
def state_f(v, c, m, p):
return state_token(m.state)
def duration_f(v, c, m, p):
if m.end_date and m.duration:
return timedelta(seconds=m.duration)
def datetime_f(v, c, m, p):
attr = getattr(m, p)
dttm = attr.isoformat() if attr else ''
if timezone.utcnow().isoformat()[:4] == dttm[:4]:
dttm = dttm[5:]
return Markup("<nobr>{}</nobr>").format(dttm)
def nobr_f(v, c, m, p):
return Markup("<nobr>{}</nobr>").format(getattr(m, p))
def label_link(v, c, m, p):
try:
default_params = ast.literal_eval(m.default_params)
except Exception:
default_params = {}
url = url_for(
'airflow.chart', chart_id=m.id, iteration_no=m.iteration_no,
**default_params)
title = m.label
return Markup("<a href='{url}'>{title}</a>").format(**locals())
def pool_link(v, c, m, p):
title = m.pool
url = url_for('taskinstance.index_view', flt1_pool_equals=m.pool)
return Markup("<a href='{url}'>{title}</a>").format(**locals())
def pygment_html_render(s, lexer=lexers.TextLexer):
return highlight(
s,
lexer(),
HtmlFormatter(linenos=True),
)
def render(obj, lexer):
out = ""
if isinstance(obj, basestring):
out += pygment_html_render(obj, lexer)
elif isinstance(obj, (tuple, list)):
for i, s in enumerate(obj):
out += "<div>List item #{}</div>".format(i)
out += "<div>" + pygment_html_render(s, lexer) + "</div>"
elif isinstance(obj, dict):
for k, v in obj.items():
out += '<div>Dict item "{}"</div>'.format(k)
out += "<div>" + pygment_html_render(v, lexer) + "</div>"
return out
def wrapped_markdown(s):
return '<div class="rich_doc">' + markdown.markdown(s) + "</div>"
attr_renderer = {
'bash_command': lambda x: render(x, lexers.BashLexer),
'hql': lambda x: render(x, lexers.SqlLexer),
'sql': lambda x: render(x, lexers.SqlLexer),
'doc': lambda x: render(x, lexers.TextLexer),
'doc_json': lambda x: render(x, lexers.JsonLexer),
'doc_rst': lambda x: render(x, lexers.RstLexer),
'doc_yaml': lambda x: render(x, lexers.YamlLexer),
'doc_md': wrapped_markdown,
'python_callable': lambda x: render(
wwwutils.get_python_source(x),
lexers.PythonLexer,
),
}
def data_profiling_required(f):
"""Decorator for views requiring data profiling access"""
@wraps(f)
def decorated_function(*args, **kwargs):
if (
current_app.config['LOGIN_DISABLED'] or
(not current_user.is_anonymous and current_user.data_profiling())
):
return f(*args, **kwargs)
else:
flash("This page requires data profiling privileges", "error")
return redirect(url_for('admin.index'))
return decorated_function
def fused_slots(v, c, m, p):
url = url_for(
'taskinstance.index_view',
flt1_pool_equals=m.pool,
flt2_state_equals='running',
)
return Markup("<a href='{0}'>{1}</a>").format(url, m.used_slots())
def fqueued_slots(v, c, m, p):
url = url_for(
'taskinstance.index_view',
flt1_pool_equals=m.pool,
flt2_state_equals='queued',
sort='1',
desc='1'
)
return Markup("<a href='{0}'>{1}</a>").format(url, m.queued_slots())
def recurse_tasks(tasks, task_ids, dag_ids, task_id_to_dag):
if isinstance(tasks, list):
for task in tasks:
recurse_tasks(task, task_ids, dag_ids, task_id_to_dag)
return
if isinstance(tasks, SubDagOperator):
subtasks = tasks.subdag.tasks
dag_ids.append(tasks.subdag.dag_id)
for subtask in subtasks:
if subtask.task_id not in task_ids:
task_ids.append(subtask.task_id)
task_id_to_dag[subtask.task_id] = tasks.subdag
recurse_tasks(subtasks, task_ids, dag_ids, task_id_to_dag)
if isinstance(tasks, BaseOperator):
task_id_to_dag[tasks.task_id] = tasks.dag
def get_chart_height(dag):
"""
TODO(aoen): See [AIRFLOW-1263] We use the number of tasks in the DAG as a heuristic to
approximate the size of generated chart (otherwise the charts are tiny and unreadable
when DAGs have a large number of tasks). Ideally nvd3 should allow for dynamic-height
charts, that is charts that take up space based on the size of the components within.
"""
return 600 + len(dag.tasks) * 10
def get_date_time_num_runs_dag_runs_form_data(request, session, dag):
dttm = request.args.get('execution_date')
if dttm:
dttm = pendulum.parse(dttm)
else:
dttm = dag.latest_execution_date or timezone.utcnow()
base_date = request.args.get('base_date')
if base_date:
base_date = timezone.parse(base_date)
else:
# The DateTimeField widget truncates milliseconds and would loose
# the first dag run. Round to next second.
base_date = (dttm + timedelta(seconds=1)).replace(microsecond=0)
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
num_runs = request.args.get('num_runs')
num_runs = int(num_runs) if num_runs else default_dag_run
DR = models.DagRun
drs = (
session.query(DR)
.filter(
DR.dag_id == dag.dag_id,
DR.execution_date <= base_date)
.order_by(desc(DR.execution_date))
.limit(num_runs)
.all()
)
dr_choices = []
dr_state = None
for dr in drs:
dr_choices.append((dr.execution_date.isoformat(), dr.run_id))
if dttm == dr.execution_date:
dr_state = dr.state
# Happens if base_date was changed and the selected dag run is not in result
if not dr_state and drs:
dr = drs[0]
dttm = dr.execution_date
dr_state = dr.state
return {
'dttm': dttm,
'base_date': base_date,
'num_runs': num_runs,
'execution_date': dttm.isoformat(),
'dr_choices': dr_choices,
'dr_state': dr_state,
}
class AirflowViewMixin(object):
def render(self, template, **kwargs):
kwargs['scheduler_job'] = lazy_object_proxy.Proxy(jobs.SchedulerJob.most_recent_job)
kwargs['macros'] = airflow.macros
return super(AirflowViewMixin, self).render(template, **kwargs)
class Airflow(AirflowViewMixin, BaseView):
def is_visible(self):
return False
@expose('/')
@login_required
def index(self):
return self.render('airflow/dags.html')
@expose('/chart_data')
@data_profiling_required
@wwwutils.gzipped
def chart_data(self):
from airflow import macros
import pandas as pd
if conf.getboolean('core', 'secure_mode'):
abort(404)
with create_session() as session:
chart_id = request.args.get('chart_id')
csv = request.args.get('csv') == "true"
chart = session.query(models.Chart).filter_by(id=chart_id).first()
db = session.query(
Connection).filter_by(conn_id=chart.conn_id).first()
payload = {
"state": "ERROR",
"error": ""
}
# Processing templated fields
try:
args = ast.literal_eval(chart.default_params)
if not isinstance(args, dict):
raise AirflowException('Not a dict')
except Exception:
args = {}
payload['error'] += (
"Default params is not valid, string has to evaluate as "
"a Python dictionary. ")
request_dict = {k: request.args.get(k) for k in request.args}
args.update(request_dict)
args['macros'] = macros
sandbox = ImmutableSandboxedEnvironment()
sql = sandbox.from_string(chart.sql).render(**args)
label = sandbox.from_string(chart.label).render(**args)
payload['sql_html'] = Markup(highlight(
sql,
lexers.SqlLexer(), # Lexer call
HtmlFormatter(noclasses=True))
)
payload['label'] = label
pd.set_option('display.max_colwidth', 100)
try:
hook = db.get_hook()
df = hook.get_pandas_df(
wwwutils.limit_sql(sql, CHART_LIMIT, conn_type=db.conn_type))
df = df.fillna(0)
except Exception as e:
payload['error'] += "SQL execution failed. Details: " + str(e)
if csv:
return Response(
response=df.to_csv(index=False),
status=200,
mimetype="application/text")
if not payload['error'] and len(df) == CHART_LIMIT:
payload['warning'] = (
"Data has been truncated to {0}"
" rows. Expect incomplete results.").format(CHART_LIMIT)
if not payload['error'] and len(df) == 0:
payload['error'] += "Empty result set. "
elif (
not payload['error'] and
chart.sql_layout == 'series' and
chart.chart_type != "datatable" and
len(df.columns) < 3):
payload['error'] += "SQL needs to return at least 3 columns. "
elif (
not payload['error'] and
chart.sql_layout == 'columns' and
len(df.columns) < 2):
payload['error'] += "SQL needs to return at least 2 columns. "
elif not payload['error']:
import numpy as np
chart_type = chart.chart_type
data = None
if chart.show_datatable or chart_type == "datatable":
data = df.to_dict(orient="split")
data['columns'] = [{'title': c} for c in data['columns']]
payload['data'] = data
# Trying to convert time to something Highcharts likes
x_col = 1 if chart.sql_layout == 'series' else 0
if chart.x_is_date:
try:
# From string to datetime
df[df.columns[x_col]] = pd.to_datetime(
df[df.columns[x_col]])
df[df.columns[x_col]] = df[df.columns[x_col]].apply(
lambda x: int(x.strftime("%s")) * 1000)
except Exception:
payload['error'] = "Time conversion failed"
if chart_type == 'datatable':
payload['state'] = 'SUCCESS'
return wwwutils.json_response(payload)
else:
if chart.sql_layout == 'series':
# User provides columns (series, x, y)
df[df.columns[2]] = df[df.columns[2]].astype(np.float)
df = df.pivot_table(
index=df.columns[1],
columns=df.columns[0],
values=df.columns[2], aggfunc=np.sum)
else:
# User provides columns (x, y, metric1, metric2, ...)
df.index = df[df.columns[0]]
df = df.sort_values(by=df.columns[0])
del df[df.columns[0]]
for col in df.columns:
df[col] = df[col].astype(np.float)
df = df.fillna(0)
NVd3ChartClass = chart_mapping.get(chart.chart_type)
NVd3ChartClass = getattr(nvd3, NVd3ChartClass)
nvd3_chart = NVd3ChartClass(x_is_date=chart.x_is_date)
for col in df.columns:
nvd3_chart.add_serie(name=col, y=df[col].tolist(), x=df[col].index.tolist())
try:
nvd3_chart.buildcontent()
payload['chart_type'] = nvd3_chart.__class__.__name__
payload['htmlcontent'] = nvd3_chart.htmlcontent
except Exception as e:
payload['error'] = str(e)
payload['state'] = 'SUCCESS'
payload['request_dict'] = request_dict
return wwwutils.json_response(payload)
@expose('/chart')
@data_profiling_required
def chart(self):
if conf.getboolean('core', 'secure_mode'):
abort(404)
with create_session() as session:
chart_id = request.args.get('chart_id')
embed = request.args.get('embed')
chart = session.query(models.Chart).filter_by(id=chart_id).first()
NVd3ChartClass = chart_mapping.get(chart.chart_type)
if not NVd3ChartClass:
flash(
"Not supported anymore as the license was incompatible, "
"sorry",
"danger")
redirect('/admin/chart/')
sql = ""
if chart.show_sql:
sql = Markup(highlight(
chart.sql,
lexers.SqlLexer(), # Lexer call
HtmlFormatter(noclasses=True))
)
return self.render(
'airflow/nvd3.html',
chart=chart,
title="Airflow - Chart",
sql=sql,
label=chart.label,
embed=embed)
@expose('/dag_stats')
@login_required
@provide_session
def dag_stats(self, session=None):
dr = models.DagRun
dm = models.DagModel
dag_ids = session.query(dm.dag_id)
dag_state_stats = (
session.query(dr.dag_id, dr.state, sqla.func.count(dr.state)).group_by(dr.dag_id, dr.state)
)
data = {}
for (dag_id, ) in dag_ids:
data[dag_id] = {}
for dag_id, state, count in dag_state_stats:
if dag_id not in data:
data[dag_id] = {}
data[dag_id][state] = count
payload = {}
for dag_id, d in data.items():
payload[dag_id] = []
for state in State.dag_states:
count = d.get(state, 0)
payload[dag_id].append({
'state': state,
'count': count,
'dag_id': dag_id,
'color': State.color(state)
})
return wwwutils.json_response(payload)
@expose('/task_stats')
@login_required
@provide_session
def task_stats(self, session=None):
TI = models.TaskInstance
DagRun = models.DagRun
Dag = models.DagModel
dag_ids = session.query(Dag.dag_id)
LastDagRun = (
session.query(DagRun.dag_id, sqla.func.max(DagRun.execution_date).label('execution_date'))
.join(Dag, Dag.dag_id == DagRun.dag_id)
.filter(DagRun.state != State.RUNNING)
.filter(Dag.is_active == True) # noqa: E712
.filter(Dag.is_subdag == False) # noqa: E712
.group_by(DagRun.dag_id)
.subquery('last_dag_run')
)
RunningDagRun = (
session.query(DagRun.dag_id, DagRun.execution_date)
.join(Dag, Dag.dag_id == DagRun.dag_id)
.filter(DagRun.state == State.RUNNING)
.filter(Dag.is_active == True) # noqa: E712
.filter(Dag.is_subdag == False) # noqa: E712
.subquery('running_dag_run')
)
# Select all task_instances from active dag_runs.
# If no dag_run is active, return task instances from most recent dag_run.
LastTI = (
session.query(TI.dag_id.label('dag_id'), TI.state.label('state'))
.join(LastDagRun, and_(
LastDagRun.c.dag_id == TI.dag_id,
LastDagRun.c.execution_date == TI.execution_date))
)
RunningTI = (
session.query(TI.dag_id.label('dag_id'), TI.state.label('state'))
.join(RunningDagRun, and_(
RunningDagRun.c.dag_id == TI.dag_id,
RunningDagRun.c.execution_date == TI.execution_date))
)
UnionTI = union_all(LastTI, RunningTI).alias('union_ti')
qry = (
session.query(UnionTI.c.dag_id, UnionTI.c.state, sqla.func.count())
.group_by(UnionTI.c.dag_id, UnionTI.c.state)
)
data = {}
for dag_id, state, count in qry:
if dag_id not in data:
data[dag_id] = {}
data[dag_id][state] = count
session.commit()
payload = {}
for (dag_id, ) in dag_ids:
payload[dag_id] = []
for state in State.task_states:
count = data.get(dag_id, {}).get(state, 0)
payload[dag_id].append({
'state': state,
'count': count,
'dag_id': dag_id,
'color': State.color(state)
})
return wwwutils.json_response(payload)
@expose('/code')
@login_required
@provide_session
def code(self, session=None):
dag_id = request.args.get('dag_id')
dm = models.DagModel
dag = session.query(dm).filter(dm.dag_id == dag_id).first()
try:
with wwwutils.open_maybe_zipped(dag.fileloc, 'r') as f:
code = f.read()
html_code = highlight(
code, lexers.PythonLexer(), HtmlFormatter(linenos=True))
except IOError as e:
html_code = str(e)
return self.render(
'airflow/dag_code.html', html_code=html_code, dag=dag, title=dag_id,
root=request.args.get('root'),
demo_mode=conf.getboolean('webserver', 'demo_mode'),
wrapped=conf.getboolean('webserver', 'default_wrap'))
@expose('/dag_details')
@login_required
@provide_session
def dag_details(self, session=None):
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
title = "DAG details"
root = request.args.get('root', '')
TI = models.TaskInstance
states = session\
.query(TI.state, sqla.func.count(TI.dag_id))\
.filter(TI.dag_id == dag_id)\
.group_by(TI.state)\
.all()
active_runs = models.DagRun.find(
dag_id=dag.dag_id,
state=State.RUNNING,
external_trigger=False,
session=session
)
return self.render(
'airflow/dag_details.html',
dag=dag, title=title, root=root, states=states, State=State,
active_runs=active_runs)
@current_app.errorhandler(404)
def circles(self):
return render_template(
'airflow/circles.html', hostname=get_hostname()), 404
@current_app.errorhandler(500)
def show_traceback(self):
from airflow.utils import asciiart as ascii_
return render_template(
'airflow/traceback.html',
hostname=get_hostname(),
nukular=ascii_.nukular,
info=traceback.format_exc()), 500
@expose('/noaccess')
def noaccess(self):
return self.render('airflow/noaccess.html')
@expose('/pickle_info')
@login_required
def pickle_info(self):
d = {}
dag_id = request.args.get('dag_id')
dags = [dagbag.dags.get(dag_id)] if dag_id else dagbag.dags.values()
for dag in dags:
if not dag.is_subdag:
d[dag.dag_id] = dag.pickle_info()
return wwwutils.json_response(d)
@expose('/login', methods=['GET', 'POST'])
def login(self):
return airflow.login.login(self, request)
@expose('/logout')
def logout(self):
logout_user()
flash('You have been logged out.')
return redirect(url_for('admin.index'))
@expose('/rendered')
@login_required
@wwwutils.action_logging
def rendered(self):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
dttm = pendulum.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
root = request.args.get('root', '')
dag = dagbag.get_dag(dag_id)
task = copy.copy(dag.get_task(task_id))
ti = models.TaskInstance(task=task, execution_date=dttm)
try:
ti.render_templates()
except Exception as e:
flash("Error rendering template: " + str(e), "error")
title = "Rendered Template"
html_dict = {}
for template_field in task.__class__.template_fields:
content = getattr(task, template_field)
if template_field in attr_renderer:
html_dict[template_field] = attr_renderer[template_field](content)
else:
html_dict[template_field] = (
"<pre><code>" + str(content) + "</pre></code>")
return self.render(
'airflow/ti_code.html',
html_dict=html_dict,
dag=dag,
task_id=task_id,
execution_date=execution_date,
form=form,
root=root,
title=title)
@expose('/get_logs_with_metadata')
@login_required
@wwwutils.action_logging
@provide_session
def get_logs_with_metadata(self, session=None):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
dttm = pendulum.parse(execution_date)
if request.args.get('try_number') is not None:
try_number = int(request.args.get('try_number'))
else:
try_number = None
response_format = request.args.get('format', 'json')
metadata = request.args.get('metadata')
metadata = json.loads(metadata)
# metadata may be null
if not metadata:
metadata = {}
# Convert string datetime into actual datetime
try:
execution_date = timezone.parse(execution_date)
except ValueError:
error_message = (
'Given execution date, {}, could not be identified '
'as a date. Example date format: 2015-11-16T14:34:15+00:00'.format(
execution_date))
response = jsonify({'error': error_message})
response.status_code = 400
return response
logger = logging.getLogger('airflow.task')
task_log_reader = conf.get('core', 'task_log_reader')
handler = next((handler for handler in logger.handlers
if handler.name == task_log_reader), None)
ti = session.query(models.TaskInstance).filter(
models.TaskInstance.dag_id == dag_id,
models.TaskInstance.task_id == task_id,
models.TaskInstance.execution_date == dttm).first()
def _get_logs_with_metadata(try_number, metadata):
if ti is None:
logs = ["*** Task instance did not exist in the DB\n"]
metadata['end_of_log'] = True
else:
logs, metadatas = handler.read(ti, try_number, metadata=metadata)
metadata = metadatas[0]
return logs, metadata
try:
if ti is not None:
dag = dagbag.get_dag(dag_id)
ti.task = dag.get_task(ti.task_id)
if response_format == 'json':
logs, metadata = _get_logs_with_metadata(try_number, metadata)
message = logs[0] if try_number is not None else logs
return jsonify(message=message, metadata=metadata)
filename_template = conf.get('core', 'LOG_FILENAME_TEMPLATE')
attachment_filename = render_log_filename(
ti=ti,
try_number="all" if try_number is None else try_number,
filename_template=filename_template)
metadata['download_logs'] = True
def _generate_log_stream(try_number, metadata):
if try_number is None and ti is not None:
next_try = ti.next_try_number
try_numbers = list(range(1, next_try))
else:
try_numbers = [try_number]
for try_number in try_numbers:
metadata.pop('end_of_log', None)
metadata.pop('max_offset', None)
metadata.pop('offset', None)
while 'end_of_log' not in metadata or not metadata['end_of_log']:
logs, metadata = _get_logs_with_metadata(try_number, metadata)
yield "\n".join(logs) + "\n"
return Response(_generate_log_stream(try_number, metadata),
mimetype="text/plain",
headers={"Content-Disposition": "attachment; filename={}".format(
attachment_filename)})
except AttributeError as e:
error_message = ["Task log handler {} does not support read logs.\n{}\n"
.format(task_log_reader, str(e))]
metadata['end_of_log'] = True
return jsonify(message=error_message, error=True, metadata=metadata)
@expose('/log')
@login_required
@wwwutils.action_logging
@provide_session
def log(self, session=None):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
dttm = pendulum.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
dag = dagbag.get_dag(dag_id)
ti = session.query(models.TaskInstance).filter(
models.TaskInstance.dag_id == dag_id,
models.TaskInstance.task_id == task_id,
models.TaskInstance.execution_date == dttm).first()
num_logs = 0
if ti is not None:
num_logs = ti.next_try_number - 1
if ti.state == State.UP_FOR_RESCHEDULE:
# Tasks in reschedule state decremented the try number
num_logs += 1
logs = [''] * num_logs
root = request.args.get('root', '')
return self.render(
'airflow/ti_log.html',
logs=logs, dag=dag, title="Log by attempts",
dag_id=dag.dag_id, task_id=task_id,
execution_date=execution_date, form=form,
root=root, wrapped=conf.getboolean('webserver', 'default_wrap'))
@expose('/elasticsearch')
@login_required
@wwwutils.action_logging
@provide_session
def elasticsearch(self, session=None):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
try_number = request.args.get('try_number', 1)
elasticsearch_frontend = conf.get('elasticsearch', 'frontend')
log_id_template = conf.get('elasticsearch', 'log_id_template')
log_id = log_id_template.format(
dag_id=dag_id, task_id=task_id,
execution_date=execution_date, try_number=try_number)
url = 'https://' + elasticsearch_frontend.format(log_id=quote(log_id))
return redirect(url)
@expose('/task')
@login_required
@wwwutils.action_logging
def task(self):
TI = models.TaskInstance
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
# Carrying execution_date through, even though it's irrelevant for
# this context
execution_date = request.args.get('execution_date')
dttm = pendulum.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
root = request.args.get('root', '')
dag = dagbag.get_dag(dag_id)
if not dag or task_id not in dag.task_ids:
flash(
"Task [{}.{}] doesn't seem to exist"
" at the moment".format(dag_id, task_id),
"error")
return redirect('/admin/')
task = copy.copy(dag.get_task(task_id))
task.resolve_template_files()
ti = TI(task=task, execution_date=dttm)
ti.refresh_from_db()
ti_attrs = []
for attr_name in dir(ti):
if not attr_name.startswith('_'):
attr = getattr(ti, attr_name)
if type(attr) != type(self.task): # noqa: E721
ti_attrs.append((attr_name, str(attr)))
task_attrs = []
for attr_name in dir(task):
if not attr_name.startswith('_'):
attr = getattr(task, attr_name)
if type(attr) != type(self.task) and \
attr_name not in attr_renderer: # noqa: E721
task_attrs.append((attr_name, str(attr)))
# Color coding the special attributes that are code
special_attrs_rendered = {}
for attr_name in attr_renderer:
if hasattr(task, attr_name):
source = getattr(task, attr_name)
special_attrs_rendered[attr_name] = attr_renderer[attr_name](source)
no_failed_deps_result = [(
"Unknown",
dedent("""\
All dependencies are met but the task instance is not running.
In most cases this just means that the task will probably
be scheduled soon unless:<br/>
- The scheduler is down or under heavy load<br/>
- The following configuration values may be limiting the number
of queueable processes:
<code>parallelism</code>,
<code>dag_concurrency</code>,
<code>max_active_dag_runs_per_dag</code>,
<code>non_pooled_task_slot_count</code><br/>
{}
<br/>
If this task instance does not start soon please contact your Airflow """
"""administrator for assistance."""
.format(
"- This task instance already ran and had its state changed "
"manually (e.g. cleared in the UI)<br/>"
if ti.state == State.NONE else "")))]
# Use the scheduler's context to figure out which dependencies are not met
dep_context = DepContext(SCHEDULER_DEPS)
failed_dep_reasons = [(dep.dep_name, dep.reason) for dep in
ti.get_failed_dep_statuses(
dep_context=dep_context)]
title = "Task Instance Details"
return self.render(
'airflow/task.html',
task_attrs=task_attrs,
ti_attrs=ti_attrs,
failed_dep_reasons=failed_dep_reasons or no_failed_deps_result,
task_id=task_id,
execution_date=execution_date,
special_attrs_rendered=special_attrs_rendered,
form=form,
root=root,
dag=dag, title=title)
@expose('/xcom')
@login_required
@wwwutils.action_logging
@provide_session
def xcom(self, session=None):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
# Carrying execution_date through, even though it's irrelevant for
# this context
execution_date = request.args.get('execution_date')
dttm = pendulum.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
root = request.args.get('root', '')
dm_db = models.DagModel
ti_db = models.TaskInstance
dag = session.query(dm_db).filter(dm_db.dag_id == dag_id).first()
ti = session.query(ti_db).filter(ti_db.dag_id == dag_id and ti_db.task_id == task_id).first()
if not ti:
flash(
"Task [{}.{}] doesn't seem to exist"
" at the moment".format(dag_id, task_id),
"error")
return redirect('/admin/')
xcomlist = session.query(XCom).filter(
XCom.dag_id == dag_id, XCom.task_id == task_id,
XCom.execution_date == dttm).all()
attributes = []
for xcom in xcomlist:
if not xcom.key.startswith('_'):
attributes.append((xcom.key, xcom.value))
title = "XCom"
return self.render(
'airflow/xcom.html',
attributes=attributes,
task_id=task_id,
execution_date=execution_date,
form=form,
root=root,
dag=dag, title=title)
@expose('/run', methods=['POST'])
@login_required
@wwwutils.action_logging
@wwwutils.notify_owner
def run(self):
dag_id = request.form.get('dag_id')
task_id = request.form.get('task_id')
origin = request.form.get('origin')
dag = dagbag.get_dag(dag_id)
task = dag.get_task(task_id)
execution_date = request.form.get('execution_date')
execution_date = pendulum.parse(execution_date)
ignore_all_deps = request.form.get('ignore_all_deps') == "true"
ignore_task_deps = request.form.get('ignore_task_deps') == "true"
ignore_ti_state = request.form.get('ignore_ti_state') == "true"
from airflow.executors import get_default_executor
executor = get_default_executor()
valid_celery_config = False
valid_kubernetes_config = False
try:
from airflow.executors.celery_executor import CeleryExecutor
valid_celery_config = isinstance(executor, CeleryExecutor)
except ImportError:
pass
try:
from airflow.contrib.executors.kubernetes_executor import KubernetesExecutor
valid_kubernetes_config = isinstance(executor, KubernetesExecutor)
except ImportError:
pass
if not valid_celery_config and not valid_kubernetes_config:
flash("Only works with the Celery or Kubernetes executors, sorry", "error")
return redirect(origin)
ti = models.TaskInstance(task=task, execution_date=execution_date)
ti.refresh_from_db()
# Make sure the task instance can be queued
dep_context = DepContext(
deps=QUEUE_DEPS,
ignore_all_deps=ignore_all_deps,
ignore_task_deps=ignore_task_deps,
ignore_ti_state=ignore_ti_state)
failed_deps = list(ti.get_failed_dep_statuses(dep_context=dep_context))
if failed_deps:
failed_deps_str = ", ".join(
["{}: {}".format(dep.dep_name, dep.reason) for dep in failed_deps])
flash("Could not queue task instance for execution, dependencies not met: "
"{}".format(failed_deps_str),
"error")
return redirect(origin)
executor.start()
executor.queue_task_instance(
ti,
ignore_all_deps=ignore_all_deps,
ignore_task_deps=ignore_task_deps,
ignore_ti_state=ignore_ti_state)
executor.heartbeat()
flash(
"Sent {} to the message queue, "
"it should start any moment now.".format(ti))
return redirect(origin)
@expose('/delete', methods=['POST'])
@login_required
@wwwutils.action_logging
@wwwutils.notify_owner
def delete(self):
from airflow.api.common.experimental import delete_dag
from airflow.exceptions import DagNotFound, DagFileExists
dag_id = request.values.get('dag_id')
origin = request.values.get('origin') or "/admin/"
try:
delete_dag.delete_dag(dag_id)
except DagNotFound:
flash("DAG with id {} not found. Cannot delete".format(dag_id))
return redirect(request.referrer)
except DagFileExists:
flash("Dag id {} is still in DagBag. "
"Remove the DAG file first.".format(dag_id))
return redirect(request.referrer)
flash("Deleting DAG with id {}. May take a couple minutes to fully"
" disappear.".format(dag_id))
# Upon successful delete return to origin
return redirect(origin)
@expose('/trigger', methods=['POST'])
@login_required
@wwwutils.action_logging
@wwwutils.notify_owner
@provide_session
def trigger(self, session=None):
dag_id = request.values.get('dag_id')
origin = request.values.get('origin') or "/admin/"
dag = session.query(models.DagModel).filter(models.DagModel.dag_id == dag_id).first()
if not dag:
flash("Cannot find dag {}".format(dag_id))
return redirect(origin)
execution_date = timezone.utcnow()
run_id = "manual__{0}".format(execution_date.isoformat())
dr = DagRun.find(dag_id=dag_id, run_id=run_id)
if dr:
flash("This run_id {} already exists".format(run_id))
return redirect(origin)
run_conf = {}
dag.create_dagrun(
run_id=run_id,
execution_date=execution_date,
state=State.RUNNING,
conf=run_conf,
external_trigger=True
)
flash(
"Triggered {}, "
"it should start any moment now.".format(dag_id))
return redirect(origin)
def _clear_dag_tis(self, dag, start_date, end_date, origin,
recursive=False, confirmed=False, only_failed=False):
if confirmed:
count = dag.clear(
start_date=start_date,
end_date=end_date,
include_subdags=recursive,
include_parentdag=recursive,
only_failed=only_failed,
)
flash("{0} task instances have been cleared".format(count))
return redirect(origin)
tis = dag.clear(
start_date=start_date,
end_date=end_date,
include_subdags=recursive,
dry_run=True,
include_parentdag=recursive,
only_failed=only_failed,
)
if not tis:
flash("No task instances to clear", 'error')
response = redirect(origin)
else:
details = "\n".join([str(t) for t in tis])
response = self.render(
'airflow/confirm.html',
message=("Here's the list of task instances you are about "
"to clear:"),
details=details)
return response
@expose('/clear', methods=['POST'])
@login_required
@wwwutils.action_logging
@wwwutils.notify_owner
def clear(self):
dag_id = request.form.get('dag_id')
task_id = request.form.get('task_id')
origin = request.form.get('origin')
dag = dagbag.get_dag(dag_id)
execution_date = request.form.get('execution_date')
execution_date = pendulum.parse(execution_date)
confirmed = request.form.get('confirmed') == "true"
upstream = request.form.get('upstream') == "true"
downstream = request.form.get('downstream') == "true"
future = request.form.get('future') == "true"
past = request.form.get('past') == "true"
recursive = request.form.get('recursive') == "true"
only_failed = request.form.get('only_failed') == "true"
dag = dag.sub_dag(
task_regex=r"^{0}$".format(task_id),
include_downstream=downstream,
include_upstream=upstream)
end_date = execution_date if not future else None
start_date = execution_date if not past else None
return self._clear_dag_tis(dag, start_date, end_date, origin,
recursive=recursive, confirmed=confirmed, only_failed=only_failed)
@expose('/dagrun_clear', methods=['POST'])
@login_required
@wwwutils.action_logging
@wwwutils.notify_owner
def dagrun_clear(self):
dag_id = request.form.get('dag_id')
origin = request.form.get('origin')
execution_date = request.form.get('execution_date')
confirmed = request.form.get('confirmed') == "true"
dag = dagbag.get_dag(dag_id)
execution_date = pendulum.parse(execution_date)
start_date = execution_date
end_date = execution_date
return self._clear_dag_tis(dag, start_date, end_date, origin,
recursive=True, confirmed=confirmed)
@expose('/blocked')
@login_required
@provide_session
def blocked(self, session=None):
DR = models.DagRun
dags = session\
.query(DR.dag_id, sqla.func.count(DR.id))\
.filter(DR.state == State.RUNNING)\
.group_by(DR.dag_id)\
.all()
payload = []
for dag_id, active_dag_runs in dags:
max_active_runs = 0
if dag_id in dagbag.dags:
max_active_runs = dagbag.dags[dag_id].max_active_runs
payload.append({
'dag_id': dag_id,
'active_dag_run': active_dag_runs,
'max_active_runs': max_active_runs,
})
return wwwutils.json_response(payload)
def _mark_dagrun_state_as_failed(self, dag_id, execution_date, confirmed, origin):
if not execution_date:
flash('Invalid execution date', 'error')
return redirect(origin)
execution_date = pendulum.parse(execution_date)
dag = dagbag.get_dag(dag_id)
if not dag:
flash('Cannot find DAG: {}'.format(dag_id), 'error')
return redirect(origin)
new_dag_state = set_dag_run_state_to_failed(dag, execution_date, commit=confirmed)
if confirmed:
flash('Marked failed on {} task instances'.format(len(new_dag_state)))
return redirect(origin)
else:
details = '\n'.join([str(t) for t in new_dag_state])
response = self.render('airflow/confirm.html',
message=("Here's the list of task instances you are "
"about to mark as failed"),
details=details)
return response
def _mark_dagrun_state_as_success(self, dag_id, execution_date, confirmed, origin):
if not execution_date:
flash('Invalid execution date', 'error')
return redirect(origin)
execution_date = pendulum.parse(execution_date)
dag = dagbag.get_dag(dag_id)
if not dag:
flash('Cannot find DAG: {}'.format(dag_id), 'error')
return redirect(origin)
new_dag_state = set_dag_run_state_to_success(dag, execution_date,
commit=confirmed)
if confirmed:
flash('Marked success on {} task instances'.format(len(new_dag_state)))
return redirect(origin)
else:
details = '\n'.join([str(t) for t in new_dag_state])
response = self.render('airflow/confirm.html',
message=("Here's the list of task instances you are "
"about to mark as success"),
details=details)
return response
@expose('/dagrun_failed', methods=['POST'])
@login_required
@wwwutils.action_logging
@wwwutils.notify_owner
def dagrun_failed(self):
dag_id = request.form.get('dag_id')
execution_date = request.form.get('execution_date')
confirmed = request.form.get('confirmed') == 'true'
origin = request.form.get('origin')
return self._mark_dagrun_state_as_failed(dag_id, execution_date,
confirmed, origin)
@expose('/dagrun_success', methods=['POST'])
@login_required
@wwwutils.action_logging
@wwwutils.notify_owner
def dagrun_success(self):
dag_id = request.form.get('dag_id')
execution_date = request.form.get('execution_date')
confirmed = request.form.get('confirmed') == 'true'
origin = request.form.get('origin')
return self._mark_dagrun_state_as_success(dag_id, execution_date,
confirmed, origin)
def _mark_task_instance_state(self, dag_id, task_id, origin, execution_date,
confirmed, upstream, downstream,
future, past, state):
dag = dagbag.get_dag(dag_id)
task = dag.get_task(task_id)
task.dag = dag
execution_date = pendulum.parse(execution_date)
if not dag:
flash("Cannot find DAG: {}".format(dag_id))
return redirect(origin)
if not task:
flash("Cannot find task {} in DAG {}".format(task_id, dag.dag_id))
return redirect(origin)
from airflow.api.common.experimental.mark_tasks import set_state
if confirmed:
altered = set_state(tasks=[task], execution_date=execution_date,
upstream=upstream, downstream=downstream,
future=future, past=past, state=state,
commit=True)
flash("Marked {} on {} task instances".format(state, len(altered)))
return redirect(origin)
to_be_altered = set_state(tasks=[task], execution_date=execution_date,
upstream=upstream, downstream=downstream,
future=future, past=past, state=state,
commit=False)
details = "\n".join([str(t) for t in to_be_altered])
response = self.render("airflow/confirm.html",
message=("Here's the list of task instances you are "
"about to mark as {}:".format(state)),
details=details)
return response
@expose('/failed', methods=['POST'])
@login_required
@wwwutils.action_logging
@wwwutils.notify_owner
def failed(self):
dag_id = request.form.get('dag_id')
task_id = request.form.get('task_id')
origin = request.form.get('origin')
execution_date = request.form.get('execution_date')
confirmed = request.form.get('confirmed') == "true"
upstream = request.form.get('failed_upstream') == "true"
downstream = request.form.get('failed_downstream') == "true"
future = request.form.get('failed_future') == "true"
past = request.form.get('failed_past') == "true"
return self._mark_task_instance_state(dag_id, task_id, origin, execution_date,
confirmed, upstream, downstream,
future, past, State.FAILED)
@expose('/success', methods=['POST'])
@login_required
@wwwutils.action_logging
@wwwutils.notify_owner
def success(self):
dag_id = request.form.get('dag_id')
task_id = request.form.get('task_id')
origin = request.form.get('origin')
execution_date = request.form.get('execution_date')
confirmed = request.form.get('confirmed') == "true"
upstream = request.form.get('success_upstream') == "true"
downstream = request.form.get('success_downstream') == "true"
future = request.form.get('success_future') == "true"
past = request.form.get('success_past') == "true"
return self._mark_task_instance_state(dag_id, task_id, origin, execution_date,
confirmed, upstream, downstream,
future, past, State.SUCCESS)
@expose('/tree')
@login_required
@wwwutils.gzipped
@wwwutils.action_logging
@provide_session
def tree(self, session=None):
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
dag_id = request.args.get('dag_id')
blur = conf.getboolean('webserver', 'demo_mode')
dag = dagbag.get_dag(dag_id)
if dag_id not in dagbag.dags:
flash('DAG "{0}" seems to be missing.'.format(dag_id), "error")
return redirect('/admin/')
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_downstream=False,
include_upstream=True)
base_date = request.args.get('base_date')
num_runs = request.args.get('num_runs')
num_runs = int(num_runs) if num_runs else default_dag_run
if base_date:
base_date = timezone.parse(base_date)
else:
base_date = dag.latest_execution_date or timezone.utcnow()
DR = models.DagRun
dag_runs = (
session.query(DR)
.filter(
DR.dag_id == dag.dag_id,
DR.execution_date <= base_date)
.order_by(DR.execution_date.desc())
.limit(num_runs)
.all()
)
dag_runs = {
dr.execution_date: alchemy_to_dict(dr) for dr in dag_runs}
dates = sorted(list(dag_runs.keys()))
max_date = max(dates) if dates else None
min_date = min(dates) if dates else None
tis = dag.get_task_instances(
start_date=min_date, end_date=base_date, session=session)
task_instances = {}
for ti in tis:
tid = alchemy_to_dict(ti)
dr = dag_runs.get(ti.execution_date)
tid['external_trigger'] = dr['external_trigger'] if dr else False
task_instances[(ti.task_id, ti.execution_date)] = tid
expanded = []
# The default recursion traces every path so that tree view has full
# expand/collapse functionality. After 5,000 nodes we stop and fall
# back on a quick DFS search for performance. See PR #320.
node_count = [0]
node_limit = 5000 / max(1, len(dag.roots))
def recurse_nodes(task, visited):
visited.add(task)
node_count[0] += 1
children = [
recurse_nodes(t, visited) for t in task.upstream_list
if node_count[0] < node_limit or t not in visited]
# D3 tree uses children vs _children to define what is
# expanded or not. The following block makes it such that
# repeated nodes are collapsed by default.
children_key = 'children'
if task.task_id not in expanded:
expanded.append(task.task_id)
elif children:
children_key = "_children"
def set_duration(tid):
if isinstance(tid, dict) and tid.get("state") == State.RUNNING \
and tid["start_date"] is not None:
d = timezone.utcnow() - pendulum.parse(tid["start_date"])
tid["duration"] = d.total_seconds()
return tid
return {
'name': task.task_id,
'instances': [
set_duration(task_instances.get((task.task_id, d))) or {
'execution_date': d.isoformat(),
'task_id': task.task_id
}
for d in dates],
children_key: children,
'num_dep': len(task.upstream_list),
'operator': task.task_type,
'retries': task.retries,
'owner': task.owner,
'start_date': task.start_date,
'end_date': task.end_date,
'depends_on_past': task.depends_on_past,
'ui_color': task.ui_color,
}
data = {
'name': '[DAG]',
'children': [recurse_nodes(t, set()) for t in dag.roots],
'instances': [dag_runs.get(d) or {'execution_date': d.isoformat()} for d in dates],
}
session.commit()
form = DateTimeWithNumRunsForm(data={'base_date': max_date,
'num_runs': num_runs})
external_logs = conf.get('elasticsearch', 'frontend')
return self.render(
'airflow/tree.html',
operators=sorted({op.__class__ for op in dag.tasks}, key=lambda x: x.__name__),
root=root,
form=form,
dag=dag, data=data, blur=blur, num_runs=num_runs,
show_external_logs=bool(external_logs))
@expose('/graph')
@login_required
@wwwutils.gzipped
@wwwutils.action_logging
@provide_session
def graph(self, session=None):
dag_id = request.args.get('dag_id')
blur = conf.getboolean('webserver', 'demo_mode')
dag = dagbag.get_dag(dag_id)
if dag_id not in dagbag.dags:
flash('DAG "{0}" seems to be missing.'.format(dag_id), "error")
return redirect('/admin/')
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
arrange = request.args.get('arrange', dag.orientation)
nodes = []
edges = []
for task in dag.tasks:
nodes.append({
'id': task.task_id,
'value': {
'label': task.task_id,
'labelStyle': "fill:{0};".format(task.ui_fgcolor),
'style': "fill:{0};".format(task.ui_color),
}
})
def get_upstream(task):
for t in task.upstream_list:
edge = {
'u': t.task_id,
'v': task.task_id,
}
if edge not in edges:
edges.append(edge)
get_upstream(t)
for t in dag.roots:
get_upstream(t)
dt_nr_dr_data = get_date_time_num_runs_dag_runs_form_data(request, session, dag)
dt_nr_dr_data['arrange'] = arrange
dttm = dt_nr_dr_data['dttm']
class GraphForm(DateTimeWithNumRunsWithDagRunsForm):
arrange = SelectField("Layout", choices=(
('LR', "Left->Right"),
('RL', "Right->Left"),
('TB', "Top->Bottom"),
('BT', "Bottom->Top"),
))
form = GraphForm(data=dt_nr_dr_data)
form.execution_date.choices = dt_nr_dr_data['dr_choices']
task_instances = {
ti.task_id: alchemy_to_dict(ti)
for ti in dag.get_task_instances(dttm, dttm, session=session)}
tasks = {
t.task_id: {
'dag_id': t.dag_id,
'task_type': t.task_type,
}
for t in dag.tasks}
if not tasks:
flash("No tasks found", "error")
session.commit()
doc_md = markdown.markdown(dag.doc_md) if hasattr(dag, 'doc_md') and dag.doc_md else ''
external_logs = conf.get('elasticsearch', 'frontend')
return self.render(
'airflow/graph.html',
dag=dag,
form=form,
width=request.args.get('width', "100%"),
height=request.args.get('height', "800"),
execution_date=dttm.isoformat(),
state_token=state_token(dt_nr_dr_data['dr_state']),
doc_md=doc_md,
arrange=arrange,
operators=sorted({op.__class__ for op in dag.tasks}, key=lambda x: x.__name__),
blur=blur,
root=root or '',
task_instances=task_instances,
tasks=tasks,
nodes=nodes,
edges=edges,
show_external_logs=bool(external_logs))
@expose('/duration')
@login_required
@wwwutils.action_logging
@provide_session
def duration(self, session=None):
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
base_date = request.args.get('base_date')
num_runs = request.args.get('num_runs')
num_runs = int(num_runs) if num_runs else default_dag_run
if dag is None:
flash('DAG "{0}" seems to be missing.'.format(dag_id), "error")
return redirect('/admin/')
if base_date:
base_date = pendulum.parse(base_date)
else:
base_date = dag.latest_execution_date or timezone.utcnow()
dates = dag.date_range(base_date, num=-abs(num_runs))
min_date = dates[0] if dates else datetime(2000, 1, 1)
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
chart_height = get_chart_height(dag)
chart = nvd3.lineChart(
name="lineChart", x_is_date=True, height=chart_height, width="1200")
cum_chart = nvd3.lineChart(
name="cumLineChart", x_is_date=True, height=chart_height, width="1200")
y = defaultdict(list)
x = defaultdict(list)
cum_y = defaultdict(list)
tis = dag.get_task_instances(
start_date=min_date, end_date=base_date, session=session)
TF = models.TaskFail
ti_fails = (
session
.query(TF)
.filter(
TF.dag_id == dag.dag_id,
TF.execution_date >= min_date,
TF.execution_date <= base_date,
TF.task_id.in_([t.task_id for t in dag.tasks]))
.all()
)
fails_totals = defaultdict(int)
for tf in ti_fails:
dict_key = (tf.dag_id, tf.task_id, tf.execution_date)
if tf.duration:
fails_totals[dict_key] += tf.duration
for ti in tis:
if ti.duration:
dttm = wwwutils.epoch(ti.execution_date)
x[ti.task_id].append(dttm)
y[ti.task_id].append(float(ti.duration))
fails_dict_key = (ti.dag_id, ti.task_id, ti.execution_date)
fails_total = fails_totals[fails_dict_key]
cum_y[ti.task_id].append(float(ti.duration + fails_total))
# determine the most relevant time unit for the set of task instance
# durations for the DAG
y_unit = infer_time_unit([d for t in y.values() for d in t])
cum_y_unit = infer_time_unit([d for t in cum_y.values() for d in t])
# update the y Axis on both charts to have the correct time units
chart.create_y_axis('yAxis', format='.02f', custom_format=False,
label='Duration ({})'.format(y_unit))
chart.axislist['yAxis']['axisLabelDistance'] = '40'
cum_chart.create_y_axis('yAxis', format='.02f', custom_format=False,
label='Duration ({})'.format(cum_y_unit))
cum_chart.axislist['yAxis']['axisLabelDistance'] = '40'
for task in dag.tasks:
if x[task.task_id]:
chart.add_serie(name=task.task_id, x=x[task.task_id],
y=scale_time_units(y[task.task_id], y_unit))
cum_chart.add_serie(name=task.task_id, x=x[task.task_id],
y=scale_time_units(cum_y[task.task_id],
cum_y_unit))
dates = sorted(list({ti.execution_date for ti in tis}))
max_date = max([ti.execution_date for ti in tis]) if dates else None
session.commit()
form = DateTimeWithNumRunsForm(data={'base_date': max_date,
'num_runs': num_runs})
chart.buildcontent()
cum_chart.buildcontent()
s_index = cum_chart.htmlcontent.rfind('});')
cum_chart.htmlcontent = (cum_chart.htmlcontent[:s_index] +
"$(function() {$( document ).trigger('chartload') })" +
cum_chart.htmlcontent[s_index:])
return self.render(
'airflow/duration_chart.html',
dag=dag,
demo_mode=conf.getboolean('webserver', 'demo_mode'),
root=root,
form=form,
chart=chart.htmlcontent,
cum_chart=cum_chart.htmlcontent
)
@expose('/tries')
@login_required
@wwwutils.action_logging
@provide_session
def tries(self, session=None):
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
base_date = request.args.get('base_date')
num_runs = request.args.get('num_runs')
num_runs = int(num_runs) if num_runs else default_dag_run
if base_date:
base_date = pendulum.parse(base_date)
else:
base_date = dag.latest_execution_date or timezone.utcnow()
dates = dag.date_range(base_date, num=-abs(num_runs))
min_date = dates[0] if dates else datetime(2000, 1, 1)
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
chart_height = get_chart_height(dag)
chart = nvd3.lineChart(
name="lineChart", x_is_date=True, y_axis_format='d', height=chart_height,
width="1200")
for task in dag.tasks:
y = []
x = []
for ti in task.get_task_instances(start_date=min_date,
end_date=base_date,
session=session):
dttm = wwwutils.epoch(ti.execution_date)
x.append(dttm)
y.append(ti.try_number)
if x:
chart.add_serie(name=task.task_id, x=x, y=y)
tis = dag.get_task_instances(
start_date=min_date, end_date=base_date, session=session)
tries = sorted(list({ti.try_number for ti in tis}))
max_date = max([ti.execution_date for ti in tis]) if tries else None
session.commit()
form = DateTimeWithNumRunsForm(data={'base_date': max_date,
'num_runs': num_runs})
chart.buildcontent()
return self.render(
'airflow/chart.html',
dag=dag,
demo_mode=conf.getboolean('webserver', 'demo_mode'),
root=root,
form=form,
chart=chart.htmlcontent
)
@expose('/landing_times')
@login_required
@wwwutils.action_logging
@provide_session
def landing_times(self, session=None):
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
base_date = request.args.get('base_date')
num_runs = request.args.get('num_runs')
num_runs = int(num_runs) if num_runs else default_dag_run
if base_date:
base_date = pendulum.parse(base_date)
else:
base_date = dag.latest_execution_date or timezone.utcnow()
dates = dag.date_range(base_date, num=-abs(num_runs))
min_date = dates[0] if dates else datetime(2000, 1, 1)
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
chart_height = get_chart_height(dag)
chart = nvd3.lineChart(
name="lineChart", x_is_date=True, height=chart_height, width="1200")
y = {}
x = {}
for task in dag.tasks:
y[task.task_id] = []
x[task.task_id] = []
for ti in task.get_task_instances(start_date=min_date,
end_date=base_date,
session=session):
if ti.end_date:
ts = ti.execution_date
following_schedule = dag.following_schedule(ts)
if dag.schedule_interval and following_schedule:
ts = following_schedule
dttm = wwwutils.epoch(ti.execution_date)
secs = (ti.end_date - ts).total_seconds()
x[ti.task_id].append(dttm)
y[ti.task_id].append(secs)
# determine the most relevant time unit for the set of landing times
# for the DAG
y_unit = infer_time_unit([d for t in y.values() for d in t])
# update the y Axis to have the correct time units
chart.create_y_axis('yAxis', format='.02f', custom_format=False,
label='Landing Time ({})'.format(y_unit))
chart.axislist['yAxis']['axisLabelDistance'] = '40'
for task in dag.tasks:
if x[task.task_id]:
chart.add_serie(name=task.task_id, x=x[task.task_id],
y=scale_time_units(y[task.task_id], y_unit))
tis = dag.get_task_instances(
start_date=min_date, end_date=base_date, session=session)
dates = sorted(list({ti.execution_date for ti in tis}))
max_date = max([ti.execution_date for ti in tis]) if dates else None
form = DateTimeWithNumRunsForm(data={'base_date': max_date,
'num_runs': num_runs})
chart.buildcontent()
return self.render(
'airflow/chart.html',
dag=dag,
chart=chart.htmlcontent,
height=str(chart_height + 100) + "px",
demo_mode=conf.getboolean('webserver', 'demo_mode'),
root=root,
form=form,
)
@expose('/paused', methods=['POST'])
@login_required
@wwwutils.action_logging
@provide_session
def paused(self, session=None):
dag_id = request.values.get('dag_id')
is_paused = True if request.args.get('is_paused') == 'false' else False
models.DagModel.get_dagmodel(dag_id).set_is_paused(is_paused=is_paused)
return "OK"
@expose('/refresh', methods=['POST'])
@login_required
@wwwutils.action_logging
@provide_session
def refresh(self, session=None):
# TODO: Is this method still needed after AIRFLOW-3561?
dm = models.DagModel
dag_id = request.values.get('dag_id')
orm_dag = session.query(dm).filter(dm.dag_id == dag_id).first()
if orm_dag:
orm_dag.last_expired = timezone.utcnow()
session.merge(orm_dag)
session.commit()
flash("DAG [{}] is now fresh as a daisy".format(dag_id))
return redirect(request.referrer)
@expose('/refresh_all', methods=['POST'])
@login_required
@wwwutils.action_logging
def refresh_all(self):
# TODO: Is this method still needed after AIRFLOW-3561?
flash("All DAGs are now up to date")
return redirect('/')
@expose('/gantt')
@login_required
@wwwutils.action_logging
@provide_session
def gantt(self, session=None):
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
demo_mode = conf.getboolean('webserver', 'demo_mode')
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
dt_nr_dr_data = get_date_time_num_runs_dag_runs_form_data(request, session, dag)
dttm = dt_nr_dr_data['dttm']
form = DateTimeWithNumRunsWithDagRunsForm(data=dt_nr_dr_data)
form.execution_date.choices = dt_nr_dr_data['dr_choices']
tis = [
ti for ti in dag.get_task_instances(dttm, dttm, session=session)
if ti.start_date and ti.state]
tis = sorted(tis, key=lambda ti: ti.start_date)
TF = models.TaskFail
ti_fails = list(itertools.chain(*[(
session
.query(TF)
.filter(TF.dag_id == ti.dag_id,
TF.task_id == ti.task_id,
TF.execution_date == ti.execution_date)
.all()
) for ti in tis]))
# determine bars to show in the gantt chart
gantt_bar_items = []
for ti in tis:
end_date = ti.end_date or timezone.utcnow()
try_count = ti.try_number
if ti.state != State.RUNNING:
try_count = ti.try_number - 1
gantt_bar_items.append((ti.task_id, ti.start_date, end_date, ti.state, try_count))
tf_count = 0
try_count = 1
prev_task_id = ""
for tf in ti_fails:
end_date = tf.end_date or timezone.utcnow()
if tf_count != 0 and tf.task_id == prev_task_id:
try_count = try_count + 1
else:
try_count = 1
prev_task_id = tf.task_id
gantt_bar_items.append((tf.task_id, tf.start_date, end_date, State.FAILED, try_count))
tf_count = tf_count + 1
tasks = []
for gantt_bar_item in gantt_bar_items:
task_id = gantt_bar_item[0]
start_date = gantt_bar_item[1]
end_date = gantt_bar_item[2]
state = gantt_bar_item[3]
try_count = gantt_bar_item[4]
tasks.append({
'startDate': wwwutils.epoch(start_date),
'endDate': wwwutils.epoch(end_date),
'isoStart': start_date.isoformat()[:-4],
'isoEnd': end_date.isoformat()[:-4],
'taskName': task_id,
'duration': (end_date - start_date).total_seconds(),
'status': state,
'executionDate': dttm.isoformat(),
'try_number': try_count,
})
states = {task['status']: task['status'] for task in tasks}
data = {
'taskNames': [ti.task_id for ti in tis],
'tasks': tasks,
'taskStatus': states,
'height': len(tis) * 25 + 25,
}
session.commit()
return self.render(
'airflow/gantt.html',
dag=dag,
execution_date=dttm.isoformat(),
form=form,
data=data,
base_date='',
demo_mode=demo_mode,
root=root,
)
@expose('/object/task_instances')
@login_required
@wwwutils.action_logging
@provide_session
def task_instances(self, session=None):
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
dttm = request.args.get('execution_date')
if dttm:
dttm = pendulum.parse(dttm)
else:
return "Error: Invalid execution_date"
task_instances = {
ti.task_id: alchemy_to_dict(ti)
for ti in dag.get_task_instances(dttm, dttm, session=session)}
return json.dumps(task_instances)
@expose('/variables/<form>', methods=["GET", "POST"])
@login_required
@wwwutils.action_logging
def variables(self, form):
try:
if request.method == 'POST':
data = request.json
if data:
with create_session() as session:
var = models.Variable(key=form, val=json.dumps(data))
session.add(var)
session.commit()
return ""
else:
return self.render(
'airflow/variables/{}.html'.format(form)
)
except Exception:
# prevent XSS
form = escape(form)
return ("Error: form airflow/variables/{}.html "
"not found.").format(form), 404
@expose('/varimport', methods=['POST'])
@login_required
@wwwutils.action_logging
def varimport(self):
try:
d = json.load(UTF8_READER(request.files['file']))
except Exception as e:
flash("Missing file or syntax error: {}.".format(e))
else:
suc_count = fail_count = 0
for k, v in d.items():
try:
models.Variable.set(k, v, serialize_json=not isinstance(v, six.string_types))
except Exception as e:
logging.info('Variable import failed: {}'.format(repr(e)))
fail_count += 1
else:
suc_count += 1
flash("{} variable(s) successfully updated.".format(suc_count), 'info')
if fail_count:
flash(
"{} variables(s) failed to be updated.".format(fail_count), 'error')
return redirect('/admin/variable')
class HomeView(AirflowViewMixin, AdminIndexView):
@expose("/")
@login_required
@provide_session
def index(self, session=None):
DM = models.DagModel
# restrict the dags shown if filter_by_owner and current user is not superuser
do_filter = FILTER_BY_OWNER and (not current_user.is_superuser())
owner_mode = conf.get('webserver', 'OWNER_MODE').strip().lower()
hide_paused_dags_by_default = conf.getboolean('webserver',
'hide_paused_dags_by_default')
show_paused_arg = request.args.get('showPaused', 'None')
def get_int_arg(value, default=0):
try:
return int(value)
except ValueError:
return default
arg_current_page = request.args.get('page', '0')
arg_search_query = request.args.get('search', None)
dags_per_page = PAGE_SIZE
current_page = get_int_arg(arg_current_page, default=0)
if show_paused_arg.strip().lower() == 'false':
hide_paused = True
elif show_paused_arg.strip().lower() == 'true':
hide_paused = False
else:
hide_paused = hide_paused_dags_by_default
# read orm_dags from the db
query = session.query(DM)
if do_filter and owner_mode == 'ldapgroup':
query = query.filter(
~DM.is_subdag,
DM.is_active,
DM.owners.in_(current_user.ldap_groups)
)
elif do_filter and owner_mode == 'user':
query = query.filter(
~DM.is_subdag, DM.is_active,
DM.owners == current_user.user.username
)
else:
query = query.filter(
~DM.is_subdag, DM.is_active
)
# optionally filter out "paused" dags
if hide_paused:
query = query.filter(~DM.is_paused)
if arg_search_query:
query = query.filter(sqla.func.lower(DM.dag_id) == arg_search_query.lower())
query = query.order_by(DM.dag_id)
start = current_page * dags_per_page
end = start + dags_per_page
dags = query.offset(start).limit(dags_per_page).all()
import_errors = session.query(errors.ImportError).all()
for ie in import_errors:
flash(
"Broken DAG: [{ie.filename}] {ie.stacktrace}".format(ie=ie),
"error")
from airflow.plugins_manager import import_errors as plugin_import_errors
for filename, stacktrace in plugin_import_errors.items():
flash(
"Broken plugin: [{filename}] {stacktrace}".format(
stacktrace=stacktrace,
filename=filename),
"error")
num_of_all_dags = query.count()
num_of_pages = int(math.ceil(num_of_all_dags / float(dags_per_page)))
auto_complete_data = set()
for row in query.with_entities(DM.dag_id, DM.owners):
auto_complete_data.add(row.dag_id)
auto_complete_data.add(row.owners)
return self.render(
'airflow/dags.html',
dags=dags,
hide_paused=hide_paused,
current_page=current_page,
search_query=arg_search_query if arg_search_query else '',
page_size=dags_per_page,
num_of_pages=num_of_pages,
num_dag_from=min(start + 1, num_of_all_dags),
num_dag_to=min(end, num_of_all_dags),
num_of_all_dags=num_of_all_dags,
paging=wwwutils.generate_pages(current_page, num_of_pages,
search=arg_search_query,
showPaused=not hide_paused),
auto_complete_data=auto_complete_data)
class QueryView(wwwutils.DataProfilingMixin, AirflowViewMixin, BaseView):
@expose('/', methods=['POST', 'GET'])
@wwwutils.gzipped
@provide_session
def query(self, session=None):
dbs = session.query(Connection).order_by(Connection.conn_id).all()
session.expunge_all()
db_choices = []
for db in dbs:
try:
if db.get_hook():
db_choices.append((db.conn_id, db.conn_id))
except Exception:
pass
conn_id_str = request.form.get('conn_id')
csv = request.form.get('csv') == "true"
sql = request.form.get('sql')
class QueryForm(Form):
conn_id = SelectField("Layout", choices=db_choices)
sql = TextAreaField("SQL", widget=wwwutils.AceEditorWidget())
data = {
'conn_id': conn_id_str,
'sql': sql,
}
results = None
has_data = False
error = False
if conn_id_str and request.method == 'POST':
db = [db for db in dbs if db.conn_id == conn_id_str][0]
try:
hook = db.get_hook()
df = hook.get_pandas_df(wwwutils.limit_sql(sql, QUERY_LIMIT, conn_type=db.conn_type))
# df = hook.get_pandas_df(sql)
has_data = len(df) > 0
df = df.fillna('')
results = df.to_html(
classes=[
'table', 'table-bordered', 'table-striped', 'no-wrap'],
index=False,
na_rep='',
) if has_data else ''
except Exception as e:
flash(str(e), 'error')
error = True
if has_data and len(df) == QUERY_LIMIT:
flash(
"Query output truncated at " + str(QUERY_LIMIT) +
" rows", 'info')
if not has_data and error:
flash('No data', 'error')
if csv:
return Response(
response=df.to_csv(index=False),
status=200,
mimetype="application/text")
form = QueryForm(request.form, data=data)
session.commit()
return self.render(
'airflow/query.html', form=form,
title="Ad Hoc Query",
results=results or '',
has_data=has_data)
class AirflowModelView(AirflowViewMixin, ModelView):
list_template = 'airflow/model_list.html'
edit_template = 'airflow/model_edit.html'
create_template = 'airflow/model_create.html'
column_display_actions = True
page_size = PAGE_SIZE
class ModelViewOnly(wwwutils.LoginMixin, AirflowModelView):
"""
Modifying the base ModelView class for non edit, browse only operations
"""
named_filter_urls = True
can_create = False
can_edit = False
can_delete = False
column_display_pk = True
class PoolModelView(wwwutils.SuperUserMixin, AirflowModelView):
column_list = ('pool', 'slots', 'used_slots', 'queued_slots')
column_formatters = dict(
pool=pool_link, used_slots=fused_slots, queued_slots=fqueued_slots)
named_filter_urls = True
form_args = {
'pool': {
'validators': [
validators.DataRequired(),
]
}
}
def delete_model(self, model):
if model.pool == models.Pool.DEFAULT_POOL_NAME:
flash("default_pool cannot be deleted", 'error')
return False
return super(PoolModelView, self).delete_model(model)
class SlaMissModelView(wwwutils.SuperUserMixin, ModelViewOnly):
verbose_name_plural = "SLA misses"
verbose_name = "SLA miss"
column_list = (
'dag_id', 'task_id', 'execution_date', 'email_sent', 'timestamp')
column_formatters = dict(
task_id=task_instance_link,
execution_date=datetime_f,
timestamp=datetime_f,
dag_id=dag_link)
named_filter_urls = True
column_searchable_list = ('dag_id', 'task_id',)
column_filters = (
'dag_id', 'task_id', 'email_sent', 'timestamp', 'execution_date')
filter_converter = wwwutils.UtcFilterConverter()
form_widget_args = {
'email_sent': {'disabled': True},
'timestamp': {'disabled': True},
}
@provide_session
def _connection_ids(session=None):
return [(c.conn_id, c.conn_id) for c in (
session
.query(Connection.conn_id)
.group_by(Connection.conn_id))]
class ChartModelView(wwwutils.DataProfilingMixin, AirflowModelView):
verbose_name = "chart"
verbose_name_plural = "charts"
form_columns = (
'label',
'owner',
'conn_id',
'chart_type',
'show_datatable',
'x_is_date',
'y_log_scale',
'show_sql',
'height',
'sql_layout',
'sql',
'default_params',
)
column_list = (
'label',
'conn_id',
'chart_type',
'owner',
'last_modified',
)
column_sortable_list = (
'label',
'conn_id',
'chart_type',
('owner', 'owner.username'),
'last_modified',
)
column_formatters = dict(label=label_link, last_modified=datetime_f)
column_default_sort = ('last_modified', True)
create_template = 'airflow/chart/create.html'
edit_template = 'airflow/chart/edit.html'
column_filters = ('label', 'owner.username', 'conn_id')
column_searchable_list = ('owner.username', 'label', 'sql')
column_descriptions = {
'label': "Can include {{ templated_fields }} and {{ macros }}",
'chart_type': "The type of chart to be displayed",
'sql': "Can include {{ templated_fields }} and {{ macros }}.",
'height': "Height of the chart, in pixels.",
'conn_id': "Source database to run the query against",
'x_is_date': (
"Whether the X axis should be casted as a date field. Expect most "
"intelligible date formats to get casted properly."
),
'owner': (
"The chart's owner, mostly used for reference and filtering in "
"the list view."
),
'show_datatable':
"Whether to display an interactive data table under the chart.",
'default_params': (
'A dictionary of {"key": "values",} that define what the '
'templated fields (parameters) values should be by default. '
'To be valid, it needs to "eval" as a Python dict. '
'The key values will show up in the url\'s querystring '
'and can be altered there.'
),
'show_sql': "Whether to display the SQL statement as a collapsible "
"section in the chart page.",
'y_log_scale': "Whether to use a log scale for the Y axis.",
'sql_layout': (
"Defines the layout of the SQL that the application should "
"expect. Depending on the tables you are sourcing from, it may "
"make more sense to pivot / unpivot the metrics."
),
}
column_labels = {
'sql': "SQL",
'height': "Chart Height",
'sql_layout': "SQL Layout",
'show_sql': "Display the SQL Statement",
'default_params': "Default Parameters",
}
form_choices = {
'chart_type': [
('line', 'Line Chart'),
('spline', 'Spline Chart'),
('bar', 'Bar Chart'),
('column', 'Column Chart'),
('area', 'Overlapping Area Chart'),
('stacked_area', 'Stacked Area Chart'),
('percent_area', 'Percent Area Chart'),
('datatable', 'No chart, data table only'),
],
'sql_layout': [
('series', 'SELECT series, x, y FROM ...'),
('columns', 'SELECT x, y (series 1), y (series 2), ... FROM ...'),
],
'conn_id': _connection_ids()
}
def on_model_change(self, form, model, is_created=True):
if model.iteration_no is None:
model.iteration_no = 0
else:
model.iteration_no += 1
if not model.user_id and current_user and hasattr(current_user, 'id'):
model.user_id = current_user.id
model.last_modified = timezone.utcnow()
chart_mapping = dict((
('line', 'lineChart'),
('spline', 'lineChart'),
('bar', 'multiBarChart'),
('column', 'multiBarChart'),
('area', 'stackedAreaChart'),
('stacked_area', 'stackedAreaChart'),
('percent_area', 'stackedAreaChart'),
('datatable', 'datatable'),
))
class KnownEventView(wwwutils.DataProfilingMixin, AirflowModelView):
verbose_name = "known event"
verbose_name_plural = "known events"
form_columns = (
'label',
'event_type',
'start_date',
'end_date',
'reported_by',
'description',
)
form_args = {
'label': {
'validators': [
validators.DataRequired(),
],
},
'event_type': {
'validators': [
validators.DataRequired(),
],
},
'start_date': {
'validators': [
validators.DataRequired(),
],
'filters': [
parse_datetime_f,
],
},
'end_date': {
'validators': [
validators.DataRequired(),
GreaterEqualThan(fieldname='start_date'),
],
'filters': [
parse_datetime_f,
]
},
'reported_by': {
'validators': [
validators.DataRequired(),
],
}
}
column_list = (
'label',
'event_type',
'start_date',
'end_date',
'reported_by',
)
column_default_sort = ("start_date", True)
column_sortable_list = (
'label',
# todo: yes this has a spelling error
('event_type', 'event_type.know_event_type'),
'start_date',
'end_date',
('reported_by', 'reported_by.username'),
)
filter_converter = wwwutils.UtcFilterConverter()
form_overrides = dict(start_date=DateTimeField, end_date=DateTimeField)
class KnownEventTypeView(wwwutils.DataProfilingMixin, AirflowModelView):
pass
# NOTE: For debugging / troubleshooting
# mv = KnowEventTypeView(
# models.KnownEventType,
# Session, name="Known Event Types", category="Manage")
# admin.add_view(mv)
# class DagPickleView(SuperUserMixin, ModelView):
# pass
# mv = DagPickleView(
# models.DagPickle,
# Session, name="Pickles", category="Manage")
# admin.add_view(mv)
class VariableView(wwwutils.DataProfilingMixin, AirflowModelView):
verbose_name = "Variable"
verbose_name_plural = "Variables"
list_template = 'airflow/variable_list.html'
def hidden_field_formatter(view, context, model, name):
if wwwutils.should_hide_value_for_key(model.key):
return Markup('*' * 8)
val = getattr(model, name)
if val:
return val
else:
return Markup('<span class="label label-danger">Invalid</span>')
form_columns = (
'key',
'val',
)
column_list = ('key', 'val', 'is_encrypted',)
column_filters = ('key', 'val')
column_searchable_list = ('key', 'val', 'is_encrypted',)
column_default_sort = ('key', False)
form_widget_args = {
'is_encrypted': {'disabled': True},
'val': {
'rows': 20,
}
}
form_args = {
'key': {
'validators': {
validators.DataRequired(),
},
},
}
column_sortable_list = (
'key',
'val',
'is_encrypted',
)
column_formatters = {
'val': hidden_field_formatter,
}
# Default flask-admin export functionality doesn't handle serialized json
@action('varexport', 'Export', None)
@provide_session
def action_varexport(self, ids, session=None):
V = models.Variable
qry = session.query(V).filter(V.id.in_(ids)).all()
var_dict = {}
d = json.JSONDecoder()
for var in qry:
val = None
try:
val = d.decode(var.val)
except Exception:
val = var.val
var_dict[var.key] = val
response = make_response(json.dumps(var_dict, sort_keys=True, indent=4))
response.headers["Content-Disposition"] = "attachment; filename=variables.json"
return response
def on_form_prefill(self, form, id):
if wwwutils.should_hide_value_for_key(form.key.data):
form.val.data = '*' * 8
class XComView(wwwutils.SuperUserMixin, AirflowModelView):
verbose_name = "XCom"
verbose_name_plural = "XComs"
form_columns = (
'key',
'value',
'execution_date',
'task_id',
'dag_id',
)
form_extra_fields = {
'value': StringField('Value'),
}
form_args = {
'execution_date': {
'filters': [
parse_datetime_f,
]
}
}
column_filters = ('key', 'timestamp', 'execution_date', 'task_id', 'dag_id')
column_searchable_list = ('key', 'timestamp', 'execution_date', 'task_id', 'dag_id')
filter_converter = wwwutils.UtcFilterConverter()
form_overrides = dict(execution_date=DateTimeField)
def on_model_change(self, form, model, is_created):
enable_pickling = configuration.getboolean('core', 'enable_xcom_pickling')
if enable_pickling:
model.value = pickle.dumps(model.value)
else:
try:
model.value = json.dumps(model.value).encode('UTF-8')
except ValueError:
log = LoggingMixin().log
log.error("Could not serialize the XCOM value into JSON. "
"If you are using pickles instead of JSON "
"for XCOM, then you need to enable pickle "
"support for XCOM in your airflow config.")
raise
class JobModelView(ModelViewOnly):
verbose_name_plural = "jobs"
verbose_name = "job"
column_display_actions = False
column_default_sort = ('start_date', True)
column_filters = (
'job_type', 'dag_id', 'state',
'unixname', 'hostname', 'start_date', 'end_date', 'latest_heartbeat')
column_formatters = dict(
start_date=datetime_f,
end_date=datetime_f,
hostname=nobr_f,
state=state_f,
latest_heartbeat=datetime_f)
filter_converter = wwwutils.UtcFilterConverter()
class DagRunModelView(ModelViewOnly):
verbose_name_plural = "DAG Runs"
can_edit = True
can_create = True
column_editable_list = ('state',)
verbose_name = "dag run"
column_default_sort = ('execution_date', True)
form_choices = {
'_state': [
('success', 'success'),
('running', 'running'),
('failed', 'failed'),
],
}
form_args = {
'dag_id': {
'validators': [
validators.DataRequired(),
]
},
'execution_date': {
'filters': [
parse_datetime_f,
]
}
}
column_list = (
'state', 'dag_id', 'execution_date', 'run_id', 'external_trigger')
column_filters = column_list
filter_converter = wwwutils.UtcFilterConverter()
column_searchable_list = ('dag_id', 'state', 'run_id')
column_formatters = dict(
execution_date=datetime_f,
state=state_f,
start_date=datetime_f,
dag_id=dag_link,
run_id=dag_run_link
)
form_overrides = dict(execution_date=DateTimeField)
@action('new_delete', "Delete", "Are you sure you want to delete selected records?")
@provide_session
def action_new_delete(self, ids, session=None):
deleted = set(session.query(models.DagRun)
.filter(models.DagRun.id.in_(ids))
.all())
session.query(models.DagRun) \
.filter(models.DagRun.id.in_(ids)) \
.delete(synchronize_session='fetch')
session.commit()
dirty_ids = []
for row in deleted:
dirty_ids.append(row.dag_id)
@action('set_running', "Set state to 'running'", None)
@provide_session
def action_set_running(self, ids, session=None):
try:
DR = models.DagRun
count = 0
dirty_ids = []
for dr in session.query(DR).filter(DR.id.in_(ids)).all():
dirty_ids.append(dr.dag_id)
count += 1
dr.state = State.RUNNING
dr.start_date = timezone.utcnow()
flash(
"{count} dag runs were set to running".format(**locals()))
except Exception as ex:
if not self.handle_view_exception(ex):
raise Exception("Ooops")
flash('Failed to set state', 'error')
@action('set_failed', "Set state to 'failed'",
"All running task instances would also be marked as failed, are you sure?")
@provide_session
def action_set_failed(self, ids, session=None):
try:
DR = models.DagRun
count = 0
dirty_ids = []
altered_tis = []
for dr in session.query(DR).filter(DR.id.in_(ids)).all():
dirty_ids.append(dr.dag_id)
count += 1
altered_tis += \
set_dag_run_state_to_failed(dagbag.get_dag(dr.dag_id),
dr.execution_date,
commit=True,
session=session)
altered_ti_count = len(altered_tis)
flash(
"{count} dag runs and {altered_ti_count} task instances "
"were set to failed".format(**locals()))
except Exception as ex:
if not self.handle_view_exception(ex):
raise Exception("Ooops")
flash('Failed to set state', 'error')
@action('set_success', "Set state to 'success'",
"All task instances would also be marked as success, are you sure?")
@provide_session
def action_set_success(self, ids, session=None):
try:
DR = models.DagRun
count = 0
dirty_ids = []
altered_tis = []
for dr in session.query(DR).filter(DR.id.in_(ids)).all():
dirty_ids.append(dr.dag_id)
count += 1
altered_tis += \
set_dag_run_state_to_success(dagbag.get_dag(dr.dag_id),
dr.execution_date,
commit=True,
session=session)
altered_ti_count = len(altered_tis)
flash(
"{count} dag runs and {altered_ti_count} task instances "
"were set to success".format(**locals()))
except Exception as ex:
if not self.handle_view_exception(ex):
raise Exception("Ooops")
flash('Failed to set state', 'error')
# Called after editing DagRun model in the UI.
@provide_session
def after_model_change(self, form, dagrun, is_created, session=None):
altered_tis = []
if dagrun.state == State.SUCCESS:
altered_tis = set_dag_run_state_to_success(
dagbag.get_dag(dagrun.dag_id),
dagrun.execution_date,
commit=True,
session=session)
elif dagrun.state == State.FAILED:
altered_tis = set_dag_run_state_to_failed(
dagbag.get_dag(dagrun.dag_id),
dagrun.execution_date,
commit=True,
session=session)
elif dagrun.state == State.RUNNING:
altered_tis = set_dag_run_state_to_running(
dagbag.get_dag(dagrun.dag_id),
dagrun.execution_date,
commit=True,
session=session)
altered_ti_count = len(altered_tis)
flash(
"1 dag run and {altered_ti_count} task instances "
"were set to '{dagrun.state}'".format(**locals()))
class LogModelView(ModelViewOnly):
verbose_name_plural = "logs"
verbose_name = "log"
column_display_actions = False
column_default_sort = ('dttm', True)
column_filters = ('dag_id', 'task_id', 'execution_date', 'extra')
filter_converter = wwwutils.UtcFilterConverter()
column_formatters = dict(
dttm=datetime_f, execution_date=datetime_f, dag_id=dag_link)
class TaskInstanceModelView(ModelViewOnly):
verbose_name_plural = "task instances"
verbose_name = "task instance"
column_filters = (
'state', 'dag_id', 'task_id', 'execution_date', 'hostname',
'queue', 'pool', 'operator', 'start_date', 'end_date')
filter_converter = wwwutils.UtcFilterConverter()
named_filter_urls = True
column_formatters = dict(
log_url=log_url_formatter,
task_id=task_instance_link,
hostname=nobr_f,
state=state_f,
execution_date=datetime_f,
start_date=datetime_f,
end_date=datetime_f,
queued_dttm=datetime_f,
dag_id=dag_link,
run_id=dag_run_link,
duration=duration_f)
column_searchable_list = ('dag_id', 'task_id', 'state')
column_default_sort = ('job_id', True)
form_choices = {
'state': [
('success', 'success'),
('running', 'running'),
('failed', 'failed'),
],
}
column_list = (
'state', 'dag_id', 'task_id', 'execution_date', 'operator',
'start_date', 'end_date', 'duration', 'job_id', 'hostname',
'unixname', 'priority_weight', 'queue', 'queued_dttm', 'try_number',
'pool', 'log_url')
page_size = PAGE_SIZE
@action('set_running', "Set state to 'running'", None)
def action_set_running(self, ids):
self.set_task_instance_state(ids, State.RUNNING)
@action('set_failed', "Set state to 'failed'", None)
def action_set_failed(self, ids):
self.set_task_instance_state(ids, State.FAILED)
@action('set_success', "Set state to 'success'", None)
def action_set_success(self, ids):
self.set_task_instance_state(ids, State.SUCCESS)
@action('set_retry', "Set state to 'up_for_retry'", None)
def action_set_retry(self, ids):
self.set_task_instance_state(ids, State.UP_FOR_RETRY)
@provide_session
@action('clear',
lazy_gettext('Clear'),
lazy_gettext(
'Are you sure you want to clear the state of the selected task instance(s)'
' and set their dagruns to the running state?'))
def action_clear(self, ids, session=None):
try:
TI = models.TaskInstance
dag_to_task_details = {}
dag_to_tis = {}
# Collect dags upfront as dagbag.get_dag() will reset the session
for id_str in ids:
task_id, dag_id, execution_date = iterdecode(id_str)
dag = dagbag.get_dag(dag_id)
task_details = dag_to_task_details.setdefault(dag, [])
task_details.append((task_id, execution_date))
for dag, task_details in dag_to_task_details.items():
for task_id, execution_date in task_details:
execution_date = parse_execution_date(execution_date)
ti = session.query(TI).filter(TI.task_id == task_id,
TI.dag_id == dag.dag_id,
TI.execution_date == execution_date).one()
tis = dag_to_tis.setdefault(dag, [])
tis.append(ti)
for dag, tis in dag_to_tis.items():
models.clear_task_instances(tis, session=session, dag=dag)
session.commit()
flash("{0} task instances have been cleared".format(len(ids)))
except Exception as ex:
if not self.handle_view_exception(ex):
raise Exception("Ooops")
flash('Failed to clear task instances', 'error')
@provide_session
def set_task_instance_state(self, ids, target_state, session=None):
try:
TI = models.TaskInstance
count = len(ids)
for id in ids:
task_id, dag_id, execution_date = iterdecode(id)
execution_date = parse_execution_date(execution_date)
ti = session.query(TI).filter(TI.task_id == task_id,
TI.dag_id == dag_id,
TI.execution_date == execution_date).one()
ti.state = target_state
session.commit()
flash(
"{count} task instances were set to '{target_state}'".format(**locals()))
except Exception as ex:
if not self.handle_view_exception(ex):
raise Exception("Ooops")
flash('Failed to set state', 'error')
def get_one(self, id):
"""
As a workaround for AIRFLOW-252, this method overrides Flask-Admin's ModelView.get_one().
TODO: this method should be removed once the below bug is fixed on Flask-Admin side.
https://github.com/flask-admin/flask-admin/issues/1226
"""
task_id, dag_id, execution_date = iterdecode(id)
execution_date = pendulum.parse(execution_date)
return self.session.query(self.model).get((task_id, dag_id, execution_date))
class ConnectionModelView(wwwutils.SuperUserMixin, AirflowModelView):
create_template = 'airflow/conn_create.html'
edit_template = 'airflow/conn_edit.html'
list_template = 'airflow/conn_list.html'
form_columns = (
'conn_id',
'conn_type',
'host',
'schema',
'login',
'password',
'port',
'extra',
'extra__jdbc__drv_path',
'extra__jdbc__drv_clsname',
'extra__google_cloud_platform__project',
'extra__google_cloud_platform__key_path',
'extra__google_cloud_platform__keyfile_dict',
'extra__google_cloud_platform__scope',
'extra__google_cloud_platform__num_retries',
'extra__grpc__auth_type',
'extra__grpc__credentials_pem_file',
'extra__grpc__scopes',
)
verbose_name = "Connection"
verbose_name_plural = "Connections"
column_default_sort = ('conn_id', False)
column_list = ('conn_id', 'conn_type', 'host', 'port', 'is_encrypted', 'is_extra_encrypted',)
form_overrides = dict(_password=PasswordField, _extra=TextAreaField)
form_args = dict(
conn_id=dict(validators=[validators.DataRequired()])
)
form_widget_args = {
'is_extra_encrypted': {'disabled': True},
'is_encrypted': {'disabled': True},
}
# Used to customized the form, the forms elements get rendered
# and results are stored in the extra field as json. All of these
# need to be prefixed with extra__ and then the conn_type ___ as in
# extra__{conn_type}__name. You can also hide form elements and rename
# others from the connection_form.js file
form_extra_fields = {
'extra__jdbc__drv_path': StringField('Driver Path'),
'extra__jdbc__drv_clsname': StringField('Driver Class'),
'extra__google_cloud_platform__project': StringField('Project Id'),
'extra__google_cloud_platform__key_path': StringField('Keyfile Path'),
'extra__google_cloud_platform__keyfile_dict': PasswordField('Keyfile JSON'),
'extra__google_cloud_platform__scope': StringField('Scopes (comma separated)'),
'extra__google_cloud_platform__num_retries': IntegerField(
'Number of Retries',
validators=[
validators.Optional(strip_whitespace=True),
validators.NumberRange(min=0),
],
),
'extra__grpc__auth_type': StringField('Grpc Auth Type'),
'extra__grpc__credentials_pem_file': StringField('Credential Keyfile Path'),
'extra__grpc__scopes': StringField('Scopes (comma separated)'),
}
form_choices = {
'conn_type': Connection._types
}
def on_model_change(self, form, model, is_created):
formdata = form.data
if formdata['conn_type'] in ['jdbc', 'google_cloud_platform', 'gprc']:
extra = {
key: formdata[key]
for key in self.form_extra_fields.keys() if key in formdata}
model.extra = json.dumps(extra)
@classmethod
def alert_fernet_key(cls):
fk = None
try:
fk = conf.get('core', 'fernet_key')
except Exception:
pass
return fk is None
@classmethod
def is_secure(cls):
"""
Used to display a message in the Connection list view making it clear
that the passwords and `extra` field can't be encrypted.
"""
is_secure = False
try:
import cryptography # noqa F401
conf.get('core', 'fernet_key')
is_secure = True
except Exception:
pass
return is_secure
def on_form_prefill(self, form, id):
try:
d = json.loads(form.data.get('extra', '{}'))
except Exception:
d = {}
for field in list(self.form_extra_fields.keys()):
value = d.get(field, '')
if value:
field = getattr(form, field)
field.data = value
class UserModelView(wwwutils.SuperUserMixin, AirflowModelView):
verbose_name = "User"
verbose_name_plural = "Users"
column_default_sort = 'username'
class VersionView(wwwutils.SuperUserMixin, AirflowViewMixin, BaseView):
@expose('/')
def version(self):
# Look at the version from setup.py
try:
airflow_version = airflow.__version__
except Exception as e:
airflow_version = None
logging.error(e)
# Get the Git repo and git hash
git_version = None
try:
with open(os.path.join(*[settings.AIRFLOW_HOME, 'airflow', 'git_version'])) as f:
git_version = f.readline()
except Exception as e:
logging.error(e)
# Render information
title = "Version Info"
return self.render('airflow/version.html',
title=title,
airflow_version=airflow_version,
git_version=git_version)
class ConfigurationView(wwwutils.SuperUserMixin, AirflowViewMixin, BaseView):
@expose('/')
def conf(self):
raw = request.args.get('raw') == "true"
title = "Airflow Configuration"
subtitle = conf.AIRFLOW_CONFIG
if conf.getboolean("webserver", "expose_config"):
with open(conf.AIRFLOW_CONFIG, 'r') as f:
config = f.read()
table = [(section, key, value, source)
for section, parameters in conf.as_dict(True, True).items()
for key, (value, source) in parameters.items()]
else:
config = (
"# Your Airflow administrator chose not to expose the "
"configuration, most likely for security reasons.")
table = None
if raw:
return Response(
response=config,
status=200,
mimetype="application/text")
else:
code_html = Markup(highlight(
config,
lexers.IniLexer(), # Lexer call
HtmlFormatter(noclasses=True))
)
return self.render(
'airflow/config.html',
pre_subtitle=settings.HEADER + " v" + airflow.__version__,
code_html=code_html, title=title, subtitle=subtitle,
table=table)
class DagModelView(wwwutils.SuperUserMixin, ModelView):
column_list = ('dag_id', 'owners')
column_editable_list = ('is_paused',)
form_excluded_columns = ('is_subdag', 'is_active')
column_searchable_list = ('dag_id',)
column_filters = (
'dag_id', 'owners', 'is_paused', 'is_active', 'is_subdag',
'last_scheduler_run', 'last_expired')
filter_converter = wwwutils.UtcFilterConverter()
form_widget_args = {
'last_scheduler_run': {'disabled': True},
'fileloc': {'disabled': True},
'is_paused': {'disabled': True},
'last_pickled': {'disabled': True},
'pickle_id': {'disabled': True},
'last_loaded': {'disabled': True},
'last_expired': {'disabled': True},
'pickle_size': {'disabled': True},
'scheduler_lock': {'disabled': True},
'owners': {'disabled': True},
}
column_formatters = dict(
dag_id=dag_link,
)
can_delete = False
can_create = False
page_size = PAGE_SIZE
list_template = 'airflow/list_dags.html'
named_filter_urls = True
def get_query(self):
"""
Default filters for model
"""
return super(DagModelView, self)\
.get_query()\
.filter(or_(models.DagModel.is_active, models.DagModel.is_paused))\
.filter(~models.DagModel.is_subdag)
def get_count_query(self):
"""
Default filters for model
"""
return super(DagModelView, self)\
.get_count_query()\
.filter(models.DagModel.is_active)\
.filter(~models.DagModel.is_subdag)
| 36.058896 | 103 | 0.574111 |
ace38165ccb99f54bc821bada5d2739c269051df | 90 | py | Python | Python_Exercises/Loop While/solution_exercise.py | pablogalve/Python-Learning | 40e296ba416dffc3f9c794c7770e03cc2c9a53d0 | [
"MIT"
] | 4 | 2020-01-27T13:46:19.000Z | 2022-02-15T15:11:50.000Z | Python_Exercises/Loop While/solution_exercise.py | pablogalve/Python-Learning | 40e296ba416dffc3f9c794c7770e03cc2c9a53d0 | [
"MIT"
] | null | null | null | Python_Exercises/Loop While/solution_exercise.py | pablogalve/Python-Learning | 40e296ba416dffc3f9c794c7770e03cc2c9a53d0 | [
"MIT"
] | null | null | null | num = int(input("Write a number"))
i = 1
while i < 11:
m = i * num
print(m)
i += 1 | 11.25 | 34 | 0.511111 |
ace381aa096e9b29593b973c679f4afefb31bcc7 | 19,752 | py | Python | Learning/~deep_training_amp.py | timucini/MLB-DeepLearning-Project | 2737e9cd32edefec8ec935f304d04206264ce349 | [
"MIT"
] | null | null | null | Learning/~deep_training_amp.py | timucini/MLB-DeepLearning-Project | 2737e9cd32edefec8ec935f304d04206264ce349 | [
"MIT"
] | null | null | null | Learning/~deep_training_amp.py | timucini/MLB-DeepLearning-Project | 2737e9cd32edefec8ec935f304d04206264ce349 | [
"MIT"
] | null | null | null | import pandas as pd
import tensorflow as tf
from pathlib import Path
from datetime import datetime
from tensorflow.keras import Input
from tensorflow.keras import backend as K
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.models import Sequential, load_model
from tensorflow.keras.layers import Dense, Dropout, BatchNormalization
#enviroment settings
path = Path(__file__).parent.absolute()/'Deep Training'
targets_index = False
predictors_index = False
metric = 'binary_accuracy'
minimise = False
#expanded settings
name_data = 'none_'#''
worker = -1#int(input('Enter Worker ID: '))
#gpu settings
for gpu in tf.config.experimental.list_physical_devices('GPU'):
tf.config.experimental.set_memory_growth(gpu, True)
#parameter settings
model_keys = ['optimizer','layers','activations','dropouts']
blueprint_keys = ['predictors','identifier']+model_keys
test_blueprint = dict(zip(blueprint_keys, [['Home: Win ratio'],'04D1234567890','adam',[4,4],['tanh','None'],[0,0]]))
#log settings
log_keys = ['timestamp']+blueprint_keys+['dimensions','length','nodes','loss',metric,'time','epochs']
sort_fields = [metric, 'loss', 'epochs', 'nodes', 'time']
sort_conditions = [minimise, True, True, True, True]
predictor_log_path = path/'Logs'/(name_data+'predictor_evaluation_log.csv')
parameter_log_path = path/'Logs'/(name_data+'parameter_evaluation_log.csv')
re_predictor_log_path = path/'Logs'/(name_data+'re_predictor_evaluation_log.csv')
re_parameter_log_path = path/'Logs'/(name_data+'re_parameter_evaluation_log.csv')
#model settings
models_path = path/'Models'
#data enviroment
data_date = datetime.fromtimestamp((path/'Data'/'Validation'/(name_data+'validation_targets.csv')).stat().st_mtime)
validation_targets = pd.read_csv(path/'Data'/'Validation'/(name_data+'validation_targets.csv'), index_col=targets_index)
validation_predictors = pd.read_csv(path/'Data'/'Validation'/(name_data+'validation_predictors.csv'), index_col=predictors_index)
training_targets = pd.read_csv(path/'Data'/'Training'/(name_data+'training_targets.csv'), index_col=targets_index)
training_predictors = pd.read_csv(path/'Data'/'Training'/(name_data+'training_predictors.csv'), index_col=predictors_index)
batch_size = len(training_targets)//10
#functions
def load_log(log_path):
#enviroment
log = pd.read_csv(log_path, index_col=False)
#procedure
if log.empty:
return log
for column in log.loc[:,log.dtypes==object]:
if (log[column][0].find('[')>-1 and log[column][0].find(']')>-1):
log[column] = log[column].str.replace("'",'').str.replace(', ',',').str.replace('[','').str.replace(']','').str.split(',')
if column=='layers' or column=='dropouts':
newCol = []
for element in log[column].tolist():
newElement = []
for value in element:
newElement.append(int(value))
newCol.append(newElement)
log[column] = pd.Series(newCol)
return log
def get_identifier(predictor_sample):
#enviroment
identifier = 0
#functions
def numberficate(string):
#enviroment
value = 0
#procedure
for index, char in enumerate(string):
value = value+(index+1)*31*ord(char)*113*len(string)*271
return value
#procedure
for predictor in predictor_sample:
identifier = identifier+numberficate(predictor)
preface = str(len(predictor_sample)).zfill(2)+'D'
return (preface+(str(identifier).zfill(16-len(preface))))[:16]
def training(blueprint, maximial_epochs, start_learning_rate=0.1, stop_learning_rate=0.1, patience=False, save=True):
#enviroment
epoch_range = range(maximial_epochs, 0, -round(maximial_epochs/(start_learning_rate/stop_learning_rate)**0.7))
learning_rate_decrease = (start_learning_rate/stop_learning_rate)**(1/max((len(epoch_range)-1,1)))
learning_rate = start_learning_rate
trained = []
times = []
#functions
def get_model():
#enviroment
name = blueprint['identifier']
#procedure
name = name[-4:]
model = Sequential(name=name)
model.add(Input(shape=(len(blueprint['predictors']),), name=('I'+name)))
for index, nodes in enumerate(blueprint['layers']):
activation = blueprint['activations'][index]
if activation=='None':
activation = None
model.add(Dense(nodes, activation, kernel_initializer='ones', name=('D'+str(index)+'_'+name)))
if blueprint['dropouts'][index]>0:
model.add(Dropout(blueprint['dropouts'][index]/nodes, name=('O'+str(index)+'_'+name)))
model.add(BatchNormalization(name=('B'+name)))
if (validation_targets.dtypes==bool).any():
activation = 'sigmoid'
loss = 'binary_crossentropy'
else:
activation = None
loss = 'MSE'
model.add(Dense(len(validation_targets.columns), activation=activation, kernel_initializer='ones', name=('T'+name)))
model.compile(optimizer=blueprint['optimizer'], loss=loss, metrics=[metric])
return model
def evaluating(model, patience, epochs):
monitor = EarlyStopping(monitor=('val_'+metric),restore_best_weights=True, patience=patience)
start = datetime.now()
history = model.fit(training_predictors[blueprint['predictors']], training_targets, batch_size, epochs, 0, [monitor], validation_data=(validation_predictors[blueprint['predictors']], validation_targets))
time = (datetime.now()-start).total_seconds()
metrics = model.evaluate(validation_predictors[blueprint['predictors']], validation_targets, return_dict=True, verbose=0)
metrics['time'] = time
metrics['epochs'] = len(history.history[metric])
return metrics, model
def to_row(metrics):
#enviroment
row = {'timestamp':datetime.now()}
#functions
def deep_copy(row):
#enviroment
copy = {}
#procedure
for key in row:
if isinstance(row[key], list):
items = []
for item in row[key]:
items.append(item)
copy[key] = items
else:
copy[key] = row[key]
return copy
#procedure
row.update(blueprint)
row['dimensions'] = len(blueprint['predictors'])
row['length'] = len(blueprint['layers'])
row['nodes'] = sum(blueprint['layers'])
row.update(metrics)
return deep_copy(row)
def only_best(model, model_metric):
on_system = None
for model_file in models_path.glob('*.h5'):
if model_file.stem==blueprint['identifier']:
on_system = load_model(model_file)
break
if on_system==None:
model.save(path/'Models'/(blueprint['identifier']+'.h5'))
else:
on_system = on_system.evaluate(validation_predictors[blueprint['predictors']], validation_targets, return_dict=True, verbose=0)[metric]
if model_metric>on_system:
model.save(path/'Models'/(blueprint['identifier']+'.h5'))
#procedure
model = get_model()
for epochs in epoch_range:
model.optimizer.lr = learning_rate
backup = model.get_weights()
image = model.evaluate(validation_predictors[blueprint['predictors']], validation_targets, return_dict=True, verbose=0)
if patience:
metrics, model = evaluating(model, patience, epochs)
else:
metrics, model = evaluating(model, epochs, epochs)
if image[metric]<=metrics[metric]:
trained.append(metrics['epochs'])
times.append(metrics['time'])
else:
model.set_weights(backup)
learning_rate = learning_rate*learning_rate_decrease
metrics = model.evaluate(validation_predictors[blueprint['predictors']], validation_targets, return_dict=True, verbose=0)
if save:
only_best(model, metrics[metric])
metrics['time'] = sum(times)
metrics['epochs'] = sum(trained)
return to_row(metrics)
def predictor_evaluation(start_bias=0.5, start_nodes=10, minimal_node_increase=3, epsilon=8, start_learning_rate=0.1, stop_learning_rate=0.01, parameter_patience=20, node_multiplier=10):
#functions
def force_try(predictor_combination):
#enviroment
dimension_lenght = len(predictor_combination['predictors'])
nodes = max((dimension_lenght*minimal_node_increase)+start_nodes, (predictor_combination['nodes']/dimension_lenght)*(dimension_lenght+1))
#procedure
trace_predictor(predictor_combination['predictors'], predictor_combination[metric], round(nodes))
def try_predictors(used_predictors, bias, nodes):
#enviroment
tries = []
maximial_epochs = nodes*node_multiplier
#functions
def append_tries(result, tries):
#procedure
if result[metric]>=bias:
tries.append(result)
return pd.DataFrame(tries).sort_values(by=sort_fields, ascending=sort_conditions).to_dict('records')[:epsilon]
return tries
def to_log(row):
#functions
def to_string():
s = ''
for value in row.values():
if isinstance(value, list):
v = '"'+str(value)+'",'
else:
v = str(value)+','
s = s + v
return s[0:-1]
#procedure
with open(predictor_log_path,'a') as log:
log.write('\n')
log.write(to_string())
def check(identifier):
#functions
def get_latest(backlog):
#enviroment
best = backlog.sort_values(by=sort_fields, ascending=sort_conditions)
blueprint = best[blueprint_keys].to_dict('records')[0]
#procedure
for model_path in models_path.glob('*.h5'):
if model_path.stem==identifier:
model_date = datetime.fromtimestamp(model_path.stat().st_mtime)
if model_date<=data_date:
row = training(blueprint, maximial_epochs, stop_learning_rate, stop_learning_rate, parameter_patience)
to_log(row)
return row
else:
return best.to_dict('records')[0]
row = training(blueprint, maximial_epochs, stop_learning_rate, stop_learning_rate, parameter_patience)
to_log(row)
return row
#procedure
backlog = load_log(predictor_log_path)
backlog = backlog[backlog['identifier']==identifier]
if backlog.empty:
return {}
else:
return get_latest(backlog)
#procedure
for predictor in validation_predictors.drop(columns=used_predictors).columns:
predictor_sample = used_predictors+[predictor]
identifier = get_identifier(predictor_sample)
from_log = check(identifier)
if not from_log:
row = parameter_evaluation(predictor_sample, identifier, parameter_patience, maximial_epochs, nodes, start_learning_rate, stop_learning_rate)
to_log(row)
tries = append_tries(row, tries)
else:
tries = append_tries(from_log, tries)
return tries
def trace_predictor(used_predictors, bias, nodes):
#procedure
print('Worker: ', worker)
print('Predictors:', used_predictors)
print('Bias: ', bias)
print('Nodes: ', nodes,'\n')
tries = try_predictors(used_predictors, bias, nodes)
print(pd.DataFrame(tries), '\n')
if not tries:
print('Dead end @:', used_predictors, '\n')
return
for predictor_combination in tries:
force_try(predictor_combination)
#procedure
if (worker>=0) and (worker<epsilon):
predictor_combination = try_predictors([], start_bias, start_nodes)[worker]
force_try(predictor_combination)
else:
trace_predictor([], start_bias, start_nodes)
def parameter_evaluation(predictors, identifier, parameter_patience, maximial_epochs, maximial_nodes, start_learning_rate, stop_learning_rate, output='flush', update=False):
#enviroment
buffer_log = []
initial_blueprint_values = [predictors, identifier, 'adam', [0], ['None'], [0]]
#functions
def check(blueprint):
#enviroment
buffer_frame = pd.DataFrame(buffer_log).astype(str)
model_values = pd.Series(blueprint).astype(str).loc[model_keys].tolist()
#procedure
if buffer_frame.empty:
return False
if update:
print(buffer_frame[-16:])
return buffer_frame[model_keys].isin(model_values).all(axis=1).any()
def get_best(keys=blueprint_keys):
#enviroment
buffer_frame = pd.DataFrame(buffer_log)
#functions
def deep_copy(row):
#enviroment
copy = {}
#procedure
for key in row:
if isinstance(row[key], list):
items = []
for item in row[key]:
items.append(item)
copy[key] = items
else:
copy[key] = row[key]
return copy
#procedure
best = buffer_frame.sort_values(by=sort_fields, ascending=sort_conditions)[keys].to_dict('records')[0]
return deep_copy(best)
def get_size():
#enviroment
blueprint = dict(zip(blueprint_keys, initial_blueprint_values))
#functions
def get_duration(blueprint):
#envoriment
nodes = sum(blueprint['layers'])
#procedure
return parameter_patience+round((maximial_epochs-parameter_patience)*((nodes-1)/(maximial_nodes-1)))
#procedure
for length in range(maximial_nodes):
for width in range(1, maximial_nodes-sum(blueprint['layers'])+1):
blueprint['layers'][length] = width
if not check(blueprint):
buffer_log.append(training(blueprint, maximial_epochs=get_duration(blueprint), patience=parameter_patience))
blueprint = get_best()
if len(blueprint['layers'])<(length+1):
return
if blueprint['layers'][-1]==1:
return
if sum(blueprint['layers'])>=maximial_nodes:
return
blueprint['layers'] = blueprint['layers']+[0]
blueprint['activations'] = blueprint['activations']+['None']
blueprint['dropouts'] = blueprint['dropouts']+[0]
def get_activations():
#enviroment
possibilities = ['None','relu','selu','elu','tanh','softsign','softplus']
#procedure
for lenght in range(get_best(['length'])['length']):
blueprint = get_best()
for activation in possibilities:
blueprint['activations'][lenght] = activation
if not check(blueprint):
buffer_log.append(training(blueprint, get_best(['epochs'])['epochs'], patience=parameter_patience))
def get_dropouts():
#procedure
for length, width in enumerate(get_best(['layers'])['layers']):
blueprint = get_best()
for dropout in range(width):
blueprint['dropouts'][length] = dropout
if not check(blueprint):
buffer_log.append(training(blueprint, get_best(['epochs'])['epochs'], patience=parameter_patience))
def get_optimizer():
#enviroment
possibilities = ['adam','sgd','rmsprop','adadelta','adagrad','adamax','nadam']
blueprint = get_best()
#procedure
for optimizer in possibilities:
blueprint['optimizer'] = optimizer
if not check(blueprint):
buffer_log.append(training(blueprint, get_best(['epochs'])['epochs'], patience=parameter_patience))
def flush_log():
#enviroment
buffer_frame = pd.DataFrame(buffer_log)
#procedure
buffer_frame.to_csv(parameter_log_path, header=False, index=False, mode='a')
#procedure
get_size()
get_activations()
get_dropouts()
get_optimizer()
if output=='flush':
flush_log()
return training(get_best(), maximial_epochs, start_learning_rate, stop_learning_rate)
elif output=='records':
buffer_log.append(training(get_best(), maximial_epochs, start_learning_rate, stop_learning_rate))
return buffer_log
def re_predictor_evaluation(dimension_node_multiplier=10, epsilon=8, start_learning_rate=0.1, stop_learning_rate=0.01, parameter_patience=50, node_multiplier=5):
#enviroment
predictor_log = load_log(predictor_log_path)
parameter_log = load_log(parameter_log_path)
#functions
def check(identifier):
re_log = load_log(re_predictor_log_path)['identifier'].tolist()
return identifier in re_log
#procedure
best_predictors = pd.concat((predictor_log, parameter_log)).sort_values(by=sort_fields, ascending=sort_conditions).drop_duplicates(subset=['identifier'], keep='first')[:epsilon]
print(best_predictors)
best_predictors = best_predictors['predictors'].tolist()
all_bests = []
for best_set in best_predictors:
all_bests = all_bests + best_set
all_bests = list(set(all_bests))
best_predictors.append(all_bests)
for predictors in best_predictors:
print(predictors)
identifier = get_identifier(predictors)
if check(identifier):
continue
nodes = len(predictors)*dimension_node_multiplier
pd.DataFrame(parameter_evaluation(predictors, identifier, parameter_patience, nodes*node_multiplier, nodes, start_learning_rate, stop_learning_rate, output='records', update=True)).to_csv(re_predictor_log_path, header=False, index=False, mode='a')
def re_parameter_evaluation(parameter_patience=100, max_epochs=500, maximal_nodes=150, start_learning_rate=0.1, stop_learning_rate=0.01):
#enviroment
re_predictor_log = load_log(re_predictor_log_path)
#procedure
best = re_predictor_log.sort_values(by=sort_fields, ascending=sort_conditions).drop_duplicates(subset=['identifier'], keep='first').iat[0,1]
pd.DataFrame(parameter_evaluation(best, get_identifier(best), parameter_patience, max_epochs, maximal_nodes, start_learning_rate, stop_learning_rate, output='records', update=True)).to_csv(re_parameter_log_path, index=False, mode='w')
#procedure
#predictor_evaluation(start_bias=0.5, start_nodes=10, minimal_node_increase=3, epsilon=8, start_learning_rate=0.1, stop_learning_rate=0.01, parameter_patience=20, node_multiplier=10)
re_predictor_evaluation(dimension_node_multiplier=10, epsilon=8, start_learning_rate=0.1, stop_learning_rate=0.01, parameter_patience=50, node_multiplier=5)
re_parameter_evaluation(parameter_patience=100, max_epochs=500, maximal_nodes=150, start_learning_rate=0.1, stop_learning_rate=0.01)
#print(pd.DataFrame([training(test_blueprint, 20, start_learning_rate=0.1, stop_learning_rate=0.1, patience=5, save=False)])) | 48.77037 | 255 | 0.635682 |
ace381ce1f733dc9f35f23bbf0f889d757a1a31d | 325 | py | Python | ContractorApp/migrations/0021_remove_job_slug.py | Paul-W-0/FindContractors | d4986a15073856b9cf236f9dac0fdc60bfe96aba | [
"BSD-2-Clause"
] | null | null | null | ContractorApp/migrations/0021_remove_job_slug.py | Paul-W-0/FindContractors | d4986a15073856b9cf236f9dac0fdc60bfe96aba | [
"BSD-2-Clause"
] | null | null | null | ContractorApp/migrations/0021_remove_job_slug.py | Paul-W-0/FindContractors | d4986a15073856b9cf236f9dac0fdc60bfe96aba | [
"BSD-2-Clause"
] | null | null | null | # Generated by Django 3.2.9 on 2021-12-24 17:12
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('ContractorApp', '0020_alter_job_slug'),
]
operations = [
migrations.RemoveField(
model_name='job',
name='slug',
),
]
| 18.055556 | 49 | 0.587692 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.