hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 11
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
251
| max_stars_repo_name
stringlengths 4
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
251
| max_issues_repo_name
stringlengths 4
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
251
| max_forks_repo_name
stringlengths 4
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.05M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.04M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b9d2c04ffcb32d5c9ad6c0f626a368e22db97763
| 4,504
|
py
|
Python
|
tests/data/s3_scrape_config.py
|
kids-first/kf-api-study-creator
|
93a79b108b6474f9b4135ace06c89ddcf63dd257
|
[
"Apache-2.0"
] | 3
|
2019-05-04T02:07:28.000Z
|
2020-10-16T17:47:44.000Z
|
tests/data/s3_scrape_config.py
|
kids-first/kf-api-study-creator
|
93a79b108b6474f9b4135ace06c89ddcf63dd257
|
[
"Apache-2.0"
] | 604
|
2019-02-21T18:14:51.000Z
|
2022-02-10T08:13:54.000Z
|
tests/data/s3_scrape_config.py
|
kids-first/kf-api-study-creator
|
93a79b108b6474f9b4135ace06c89ddcf63dd257
|
[
"Apache-2.0"
] | null | null | null |
"""
This is an extract config intended for S3 object manifests produced by TBD.
To use it, you must import it in another extract config and override at least
the `source_data_url`. You may also append additional operations to the
`operations` list as well.
For example you could have the following in your extract config module:
from kf_ingest_packages.common.extract_configs.s3_object_info import *
source_data_url = 'file://../data/kf-seq-data-bcm-chung-s3-objects.tsv'
operations.append(
value_map(
in_col='Key',
out_col=CONCEPT.BIOSPECIMEN.ID,
m=lambda x: x
)
)
"""
import os
from kf_lib_data_ingest.common import constants
from kf_lib_data_ingest.common.constants import GENOMIC_FILE
from kf_lib_data_ingest.common.concept_schema import CONCEPT
from kf_lib_data_ingest.etl.extract.operations import (
keep_map,
row_map,
value_map,
constant_map,
)
def file_ext(x):
"""
Get genomic file extension
"""
matches = [
file_ext for file_ext in FILE_EXT_FORMAT_MAP if x.endswith(file_ext)
]
if matches:
file_ext = max(matches, key=len)
else:
file_ext = None
return file_ext
FILE_EXT_FORMAT_MAP = {
"fq": GENOMIC_FILE.FORMAT.FASTQ,
"fastq": GENOMIC_FILE.FORMAT.FASTQ,
"fq.gz": GENOMIC_FILE.FORMAT.FASTQ,
"fastq.gz": GENOMIC_FILE.FORMAT.FASTQ,
"bam": GENOMIC_FILE.FORMAT.BAM,
"hgv.bam": GENOMIC_FILE.FORMAT.BAM,
"cram": GENOMIC_FILE.FORMAT.CRAM,
"bam.bai": GENOMIC_FILE.FORMAT.BAI,
"bai": GENOMIC_FILE.FORMAT.BAI,
"cram.crai": GENOMIC_FILE.FORMAT.CRAI,
"crai": GENOMIC_FILE.FORMAT.CRAI,
"g.vcf.gz": GENOMIC_FILE.FORMAT.GVCF,
"g.vcf.gz.tbi": GENOMIC_FILE.FORMAT.TBI,
"vcf.gz": GENOMIC_FILE.FORMAT.VCF,
"vcf": GENOMIC_FILE.FORMAT.VCF,
"vcf.gz.tbi": GENOMIC_FILE.FORMAT.TBI,
"peddy.html": "html",
}
DATA_TYPES = {
GENOMIC_FILE.FORMAT.FASTQ: GENOMIC_FILE.DATA_TYPE.UNALIGNED_READS,
GENOMIC_FILE.FORMAT.BAM: GENOMIC_FILE.DATA_TYPE.ALIGNED_READS,
GENOMIC_FILE.FORMAT.CRAM: GENOMIC_FILE.DATA_TYPE.ALIGNED_READS,
GENOMIC_FILE.FORMAT.BAI: "Aligned Reads Index",
GENOMIC_FILE.FORMAT.CRAI: "Aligned Reads Index",
GENOMIC_FILE.FORMAT.VCF: "Variant Calls",
GENOMIC_FILE.FORMAT.GVCF: "gVCF",
"g.vcf.gz.tbi": "gVCF Index",
"vcf.gz.tbi": "Variant Calls Index",
"html": "Other",
}
def filter_df_by_file_ext(df):
"""
Only keep rows where file extension is one of those in
FILE_EXT_FORMAT_MAP.keys
"""
df[CONCEPT.GENOMIC_FILE.FILE_FORMAT] = df["Key"].apply(
lambda x: file_format(x)
)
return df[df[CONCEPT.GENOMIC_FILE.FILE_FORMAT].notnull()]
source_data_url = (
'https://localhost:5002/download/study/SD_ME0WME0W/'
'file/SF_Y1JMXTTS/version/FV_4RYEMD71'
)
do_after_read = filter_df_by_file_ext
def s3_url(row):
"""
Create S3 URL for object from S3 bucket and key
"""
return f's3://{row["Bucket"]}/{row["Key"]}'
def file_format(x):
"""
Get genomic file format by looking genomic file ext up in
FILE_EXT_FORMAT_MAP dict
"""
# File format
return FILE_EXT_FORMAT_MAP.get(file_ext(x))
def data_type(x):
"""
Get genomic file data type by looking up file format in DATA_TYPES.
However, if the file's extension has `tbi` in it, then use the file
extension itself to do the data type lookup.
"""
ext = file_ext(x)
if "tbi" in ext:
data_type = DATA_TYPES.get(ext)
else:
data_type = DATA_TYPES.get(file_format(x))
return data_type
operations = [
row_map(out_col=CONCEPT.GENOMIC_FILE.ID, m=lambda row: s3_url(row)),
row_map(
out_col=CONCEPT.GENOMIC_FILE.URL_LIST, m=lambda row: [s3_url(row)]
),
value_map(
in_col="Key",
out_col=CONCEPT.GENOMIC_FILE.FILE_NAME,
m=lambda x: os.path.split(x)[-1],
),
keep_map(in_col="Size", out_col=CONCEPT.GENOMIC_FILE.SIZE),
value_map(
in_col="ETag",
out_col=CONCEPT.GENOMIC_FILE.HASH_DICT,
m=lambda x: {constants.FILE.HASH.S3_ETAG.lower(): x.replace('"', "")},
),
constant_map(
out_col=CONCEPT.GENOMIC_FILE.AVAILABILITY,
m=constants.GENOMIC_FILE.AVAILABILITY.IMMEDIATE,
),
keep_map(
in_col=CONCEPT.GENOMIC_FILE.FILE_FORMAT,
out_col=CONCEPT.GENOMIC_FILE.FILE_FORMAT,
),
value_map(
in_col="Key",
out_col=CONCEPT.GENOMIC_FILE.DATA_TYPE,
m=lambda x: data_type(x),
),
]
| 27.463415
| 78
| 0.67984
|
b9d3222fd93bbc8ba199ba7a401394dc7531a2ff
| 665
|
py
|
Python
|
hard-gists/5c973ec1b5ab2e387646/snippet.py
|
jjhenkel/dockerizeme
|
eaa4fe5366f6b9adf74399eab01c712cacaeb279
|
[
"Apache-2.0"
] | 21
|
2019-07-08T08:26:45.000Z
|
2022-01-24T23:53:25.000Z
|
hard-gists/5c973ec1b5ab2e387646/snippet.py
|
jjhenkel/dockerizeme
|
eaa4fe5366f6b9adf74399eab01c712cacaeb279
|
[
"Apache-2.0"
] | 5
|
2019-06-15T14:47:47.000Z
|
2022-02-26T05:02:56.000Z
|
hard-gists/5c973ec1b5ab2e387646/snippet.py
|
jjhenkel/dockerizeme
|
eaa4fe5366f6b9adf74399eab01c712cacaeb279
|
[
"Apache-2.0"
] | 17
|
2019-05-16T03:50:34.000Z
|
2021-01-14T14:35:12.000Z
|
import bpy
from bpy.app.handlers import persistent
bl_info = {
"name": "Playback Once",
"author": "Adhi Hargo",
"version": (1, 0, 0),
"blender": (2, 67, 3),
"location": "",
"description": "Playback once.",
"warning": "",
"wiki_url": "",
"tracker_url": "",
"category": "Animation"}
if __name__ == "__main__":
register()
| 22.931034
| 63
| 0.645113
|
b9d47acd47b8bd0babe955a7bbbde7c4d9080b36
| 688
|
py
|
Python
|
Py3Challenges/saves/challenges/c6_min.py
|
AlbertUnruh/Py3Challenges
|
52f03f157860f6464f0c1710bf051a8099c29ea2
|
[
"MIT"
] | 2
|
2022-02-13T04:57:10.000Z
|
2022-02-13T10:40:14.000Z
|
Py3Challenges/saves/challenges/c6_min.py
|
AlbertUnruh/Py3Challenges
|
52f03f157860f6464f0c1710bf051a8099c29ea2
|
[
"MIT"
] | null | null | null |
Py3Challenges/saves/challenges/c6_min.py
|
AlbertUnruh/Py3Challenges
|
52f03f157860f6464f0c1710bf051a8099c29ea2
|
[
"MIT"
] | null | null | null |
"""
To master this you should consider using the builtin-``min``-function.
"""
from ...challenge import Challenge
from random import randint
x = []
for _ in range(randint(2, 10)):
x.append(randint(1, 100))
intro = f"You have to print the lowest value of {', '.join(str(_) for _ in x[:-1])} and {x[-1]}. (values: x)"
challenge = Challenge(
intro=intro,
validate_function=validate_function,
help=__doc__,
values={"x": x},
capture_stdout=True,
)
| 22.193548
| 109
| 0.632267
|
b9d600352f466e38045c7614f4b0151d5eb8f878
| 4,625
|
py
|
Python
|
services/web/server/tests/unit/with_dbs/01/test_director_v2.py
|
mrnicegyu11/osparc-simcore
|
b6fa6c245dbfbc18cc74a387111a52de9b05d1f4
|
[
"MIT"
] | null | null | null |
services/web/server/tests/unit/with_dbs/01/test_director_v2.py
|
mrnicegyu11/osparc-simcore
|
b6fa6c245dbfbc18cc74a387111a52de9b05d1f4
|
[
"MIT"
] | 1
|
2021-11-29T13:38:09.000Z
|
2021-11-29T13:38:09.000Z
|
services/web/server/tests/unit/with_dbs/01/test_director_v2.py
|
mrnicegyu11/osparc-simcore
|
b6fa6c245dbfbc18cc74a387111a52de9b05d1f4
|
[
"MIT"
] | null | null | null |
# pylint:disable=unused-variable
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
from typing import AsyncIterator
import pytest
from aioresponses import aioresponses
from faker import Faker
from hypothesis import HealthCheck, given, settings
from hypothesis import strategies as st
from models_library.clusters import ClusterID
from models_library.projects import ProjectID
from models_library.projects_pipeline import ComputationTask
from models_library.projects_state import RunningState
from models_library.users import UserID
from simcore_service_webserver import director_v2_api
from simcore_service_webserver.director_v2_models import (
ClusterCreate,
ClusterPatch,
ClusterPing,
)
| 30.833333
| 87
| 0.780973
|
b9d60ecc3068b2d42bc6110555d94274b9cac29c
| 930
|
py
|
Python
|
tools/py/heatmap.py
|
sriramreddyM/pLitter
|
e506777af0b8bbae411b474f5eacee91e8efea59
|
[
"MIT"
] | 5
|
2021-11-09T10:25:35.000Z
|
2022-03-30T03:57:46.000Z
|
tools/py/heatmap.py
|
sriramreddyM/pLitter
|
e506777af0b8bbae411b474f5eacee91e8efea59
|
[
"MIT"
] | null | null | null |
tools/py/heatmap.py
|
sriramreddyM/pLitter
|
e506777af0b8bbae411b474f5eacee91e8efea59
|
[
"MIT"
] | 1
|
2021-09-09T08:04:46.000Z
|
2021-09-09T08:04:46.000Z
|
'''
converts video to frames and saves images by different interval, or overlap, etc
'''
import folium
from folium import plugins
from folium.plugins import HeatMap
import csv
# class plitterMap():
# def __int__(self, file_path):
# self.data = file_path
# df = []
# with open(self.data) as f:
# reader = csv.reader(f)
# for row in reader:
# df_row = []
# df_row.append(row[0])
# df_row.append(row[0])
# df_row.append(row[0])
# df.append(row)
# self.tooltip = df[0][0]
# def loadMap():
# self.map = folium.Map(location=[float(row[1]), float(row[2])], zoom_start = 18)
# def loadGpsLoc():
# folium.Marker([float(row[1]), float(row[2])], popup="<i>"+row[0]+"</i>", tooltip=tooltip, icon=icon_circle).add_to(rangsit_map)
# rangsit_map
| 30
| 133
| 0.53871
|
b9d6dd8bd3445675e1356c10ac0bb61cd00aba81
| 3,027
|
py
|
Python
|
generator.py
|
Geoalert/emergency-mapping
|
96668e4e5aa2b520e5727536f7a8f4c262ee3da6
|
[
"MIT"
] | 3
|
2018-04-04T17:58:53.000Z
|
2021-10-14T08:50:13.000Z
|
generator.py
|
aeronetlab/map_augury
|
96668e4e5aa2b520e5727536f7a8f4c262ee3da6
|
[
"MIT"
] | null | null | null |
generator.py
|
aeronetlab/map_augury
|
96668e4e5aa2b520e5727536f7a8f4c262ee3da6
|
[
"MIT"
] | 1
|
2020-03-24T12:07:07.000Z
|
2020-03-24T12:07:07.000Z
|
import numpy as np
| 40.905405
| 116
| 0.570202
|
b9d71e12c5fdd4a3220a64251c8e0e2c9a302fe4
| 13,351
|
py
|
Python
|
awx/api/metadata.py
|
Avinesh/awx
|
6310a2edd890d6062a9f6bcdeb2b46c4b876c2bf
|
[
"Apache-2.0"
] | 1
|
2021-09-07T14:53:57.000Z
|
2021-09-07T14:53:57.000Z
|
awx/api/metadata.py
|
Avinesh/awx
|
6310a2edd890d6062a9f6bcdeb2b46c4b876c2bf
|
[
"Apache-2.0"
] | 2
|
2020-02-04T05:01:38.000Z
|
2020-02-18T06:44:52.000Z
|
awx/api/metadata.py
|
Avinesh/awx
|
6310a2edd890d6062a9f6bcdeb2b46c4b876c2bf
|
[
"Apache-2.0"
] | 1
|
2020-01-28T05:34:09.000Z
|
2020-01-28T05:34:09.000Z
|
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
from collections import OrderedDict
# Django
from django.core.exceptions import PermissionDenied
from django.db.models.fields import PositiveIntegerField, BooleanField
from django.db.models.fields.related import ForeignKey
from django.http import Http404
from django.utils.encoding import force_text, smart_text
from django.utils.translation import ugettext_lazy as _
# Django REST Framework
from rest_framework import exceptions
from rest_framework import metadata
from rest_framework import serializers
from rest_framework.relations import RelatedField, ManyRelatedField
from rest_framework.fields import JSONField as DRFJSONField
from rest_framework.request import clone_request
# AWX
from awx.main.fields import JSONField, ImplicitRoleField
from awx.main.models import InventorySource, NotificationTemplate
from awx.main.scheduler.kubernetes import PodManager
| 43.630719
| 131
| 0.601004
|
b9d7834f2dd39b0c5b6da30b8ebfe19e7026adeb
| 1,985
|
py
|
Python
|
plugins/python/tasks.py
|
BBVA/deeptracy
|
40f4b6bba2bdd345e95e42d474c05fa90f15c3e9
|
[
"Apache-1.1"
] | 85
|
2017-09-22T10:48:51.000Z
|
2021-06-11T18:33:28.000Z
|
plugins/python/tasks.py
|
BBVA/deeptracy
|
40f4b6bba2bdd345e95e42d474c05fa90f15c3e9
|
[
"Apache-1.1"
] | 51
|
2017-10-17T10:16:16.000Z
|
2020-08-29T23:10:21.000Z
|
plugins/python/tasks.py
|
BBVA/deeptracy
|
40f4b6bba2bdd345e95e42d474c05fa90f15c3e9
|
[
"Apache-1.1"
] | 14
|
2017-11-20T10:20:16.000Z
|
2021-02-02T21:35:07.000Z
|
import json
from washer.worker.actions import AppendStdout, AppendStderr
from washer.worker.actions import CreateNamedLog, AppendToLog
from washer.worker.actions import SetProperty
from washer.worker.commands import washertask
| 28.357143
| 75
| 0.614106
|
b9d84b2b4c7d4cbbbf84bcb2ee37459c480a1a5e
| 715
|
py
|
Python
|
senity/utils/getSiteProfile.py
|
pkokkinos/senity
|
c6e41678620bef558cc3600929a8320ff2a285cf
|
[
"MIT"
] | 1
|
2017-10-26T12:30:04.000Z
|
2017-10-26T12:30:04.000Z
|
senity/utils/getSiteProfile.py
|
pkokkinos/senity
|
c6e41678620bef558cc3600929a8320ff2a285cf
|
[
"MIT"
] | null | null | null |
senity/utils/getSiteProfile.py
|
pkokkinos/senity
|
c6e41678620bef558cc3600929a8320ff2a285cf
|
[
"MIT"
] | null | null | null |
import json
import os
# get site profile
# get all site profile
#sites_folder = "sites"
#print getAllSiteProfiles(sites_folder)
| 23.833333
| 77
| 0.664336
|
b9d87f8b647f237794f75914da625ea130e200c3
| 5,959
|
py
|
Python
|
ppo_new/baseline.py
|
QingXinHu123/Lane_change_RL
|
06c70e6f58d3478669b56800028e320ca03f5222
|
[
"MIT"
] | 1
|
2022-03-17T03:40:57.000Z
|
2022-03-17T03:40:57.000Z
|
ppo_new/baseline.py
|
QingXinHu123/Lane_change_RL
|
06c70e6f58d3478669b56800028e320ca03f5222
|
[
"MIT"
] | null | null | null |
ppo_new/baseline.py
|
QingXinHu123/Lane_change_RL
|
06c70e6f58d3478669b56800028e320ca03f5222
|
[
"MIT"
] | null | null | null |
import os, sys
from env.LaneChangeEnv import LaneChangeEnv
import random
import numpy as np
if 'SUMO_HOME' in os.environ:
tools = os.path.join(os.environ['SUMO_HOME'], 'tools')
sys.path.append(tools)
print('success')
else:
sys.exit("please declare environment variable 'SUMO_HOME'")
import traci
NUM_EPS = 100
IS_GUI = False
# f = open('../data/baseline_evaluation/testseed2.csv', 'w+')
# safety_gap = 2
constraints_list = [3.0] # [1.0, 2.0, 3.0, 4.0, 5.0, 10.0, 20.0]
ttcs = [0.1, 0.3, 0.5, 1, 2, 3]
# ttcs = [2]
gap = 0
reward_list = []
danger_rate_list = []
crash_rate_list = []
level_1_danger_list = []
level_2_danger_list = []
coll_rate_list = []
succ_rate_list = []
succ_len_list = []
for ttc in ttcs:
ret_eval, danger_rate, crash_rate, level_1_danger_rate, level_2_danger_rate, coll_rate, success_rate, success_len = evaluate_baseline(NUM_EPS, ttc, gap, IS_GUI)
reward_list.append(ret_eval)
danger_rate_list.append(danger_rate)
crash_rate_list.append(crash_rate)
level_1_danger_list.append(level_1_danger_rate)
level_2_danger_list.append(level_2_danger_rate)
coll_rate_list.append(coll_rate)
succ_rate_list.append(success_rate)
succ_len_list.append(success_len)
print('reward: ', reward_list)
print('danger rate: ', danger_rate_list)
print('crash rate: ', crash_rate_list)
print('level-1-danger_rate: ', level_1_danger_list)
print('level-2-danger_rate: ', level_2_danger_list)
print('collison rate: ', coll_rate_list)
print('success rate: ', succ_rate_list)
print('sucess len: ', succ_len_list)
# reward: [-89.12552753359037, -69.84537459892903, -73.81562785829651, -148.23580687485645, -227.71842861064192, -229.9101089174337]
# danger rate: [2.13, 0.88, 0.77, 1.88, 3.82, 3.82]
# crash rate: [0.58, 0.33, 0.5, 1.24, 2.09, 2.09]
# level-1-danger_rate: [0.23, 0.09, 0.05, 0.14, 0.25, 0.25]
# level-2-danger_rate: [0.05, 0.03, 0.05, 0.12, 0.2, 0.2]
# collison rate: [0.0, 0.0, 0.02, 0.09, 0.14, 0.14]
# success rate: [0.99, 0.99, 0.9, 0.6, 0.08, 0.05]
# sucess len: [55.656565656565654, 62.43434343434343, 67.5, 90.1, 66.625, 73.4]
| 36.558282
| 164
| 0.659171
|
b9d8a3bc2867b57ba7db6ffd06a68bdf7372909c
| 1,261
|
py
|
Python
|
clean_data.py
|
toogy/pendigits-hmm
|
03382e1457941714439d40b67e53eaf117fe4d08
|
[
"MIT"
] | null | null | null |
clean_data.py
|
toogy/pendigits-hmm
|
03382e1457941714439d40b67e53eaf117fe4d08
|
[
"MIT"
] | null | null | null |
clean_data.py
|
toogy/pendigits-hmm
|
03382e1457941714439d40b67e53eaf117fe4d08
|
[
"MIT"
] | null | null | null |
import numpy as np
import pickle
from collections import defaultdict
from parsing import parser
from analysis import training
if __name__ == '__main__':
main()
| 24.25
| 72
| 0.704996
|
b9d992fc9c803eca7ba614c187b28cbfcef4b1f8
| 5,988
|
py
|
Python
|
scripts/commit_validation/commit_validation/commit_validation.py
|
cypherdotXd/o3de
|
bb90c4ddfe2d495e9c00ebf1e2650c6d603a5676
|
[
"Apache-2.0",
"MIT"
] | 8
|
2021-08-31T02:14:19.000Z
|
2021-12-28T19:20:59.000Z
|
scripts/commit_validation/commit_validation/commit_validation.py
|
cypherdotXd/o3de
|
bb90c4ddfe2d495e9c00ebf1e2650c6d603a5676
|
[
"Apache-2.0",
"MIT"
] | 8
|
2021-07-12T13:55:00.000Z
|
2021-10-04T14:53:21.000Z
|
scripts/commit_validation/commit_validation/commit_validation.py
|
cypherdotXd/o3de
|
bb90c4ddfe2d495e9c00ebf1e2650c6d603a5676
|
[
"Apache-2.0",
"MIT"
] | 1
|
2021-09-16T05:06:18.000Z
|
2021-09-16T05:06:18.000Z
|
#
# Copyright (c) Contributors to the Open 3D Engine Project.
# For complete copyright and license terms please see the LICENSE at the root of this distribution.
#
# SPDX-License-Identifier: Apache-2.0 OR MIT
#
#
import abc
import importlib
import os
import pkgutil
import re
import time
from typing import Dict, List, Tuple
VERBOSE = False
def validate_commit(commit: Commit, out_errors: List[str] = None, ignore_validators: List[str] = None) -> bool:
"""Validates a commit against all validators
:param commit: The commit to validate
:param out_errors: if not None, will populate with the list of errors given by the validators
:param ignore_validators: Optional list of CommitValidator classes to ignore, by class name
:return: True if there are no validation errors, and False otherwise
"""
failed_count = 0
passed_count = 0
start_time = time.time()
# Find all the validators in the validators package (recursively)
validator_classes = []
validators_dir = os.path.join(os.path.dirname(__file__), 'validators')
for _, module_name, is_package in pkgutil.iter_modules([validators_dir]):
if not is_package:
module = importlib.import_module('commit_validation.validators.' + module_name)
validator = module.get_validator()
if ignore_validators and validator.__name__ in ignore_validators:
print(f"Disabled validation for '{validator.__name__}'")
else:
validator_classes.append(validator)
error_summary = {}
# Process validators
for validator_class in validator_classes:
validator = validator_class()
validator_name = validator.__class__.__name__
error_list = []
passed = validator.run(commit, errors = error_list)
if passed:
passed_count += 1
print(f'{validator.__class__.__name__} PASSED')
else:
failed_count += 1
print(f'{validator.__class__.__name__} FAILED')
error_summary[validator_name] = error_list
end_time = time.time()
if failed_count:
print("VALIDATION FAILURE SUMMARY")
for val_name in error_summary.keys():
errors = error_summary[val_name]
if errors:
for error_message in errors:
first_line = True
for line in error_message.splitlines():
if first_line:
first_line = False
print(f'VALIDATOR_FAILED: {val_name} {line}')
else:
print(f' {line}') # extra detail lines do not need machine parsing
stats_strs = []
if failed_count > 0:
stats_strs.append(f'{failed_count} failed')
if passed_count > 0:
stats_strs.append(f'{passed_count} passed')
stats_str = ', '.join(stats_strs) + f' in {end_time - start_time:.2f}s'
print()
print(stats_str)
return failed_count == 0
SOURCE_FILE_EXTENSIONS: Tuple[str, ...] = (
'.c', '.cc', '.cpp', '.cxx', '.h', '.hpp', '.hxx', '.inl', '.m', '.mm', '.cs', '.java'
)
"""File extensions for compiled source code"""
SCRIPT_FILE_EXTENSIONS: Tuple[str, ...] = (
'.py', '.lua', '.bat', '.cmd', '.sh', '.js'
)
"""File extensions for interpreted code"""
BUILD_FILE_EXTENSIONS: Tuple[str, ...] = (
'.cmake',
)
"""File extensions for build files"""
SOURCE_AND_SCRIPT_FILE_EXTENSIONS: Tuple[str, ...] = SOURCE_FILE_EXTENSIONS + SCRIPT_FILE_EXTENSIONS + BUILD_FILE_EXTENSIONS
"""File extensions for both compiled and interpreted code"""
BUILD_FILE_PATTERNS: Tuple[re.Pattern, ...] = (
re.compile(r'.*CMakeLists\.txt'),
re.compile(r'.*Jenkinsfile')
)
"""File patterns for build files"""
SOURCE_AND_SCRIPT_FILE_PATTERNS: Tuple[re.Pattern, ...] = BUILD_FILE_PATTERNS
EXCLUDED_VALIDATION_PATTERNS = [
'*/.git/*',
'*/3rdParty/*',
'*/__pycache__/*',
'*/External/*',
'build',
'Cache',
'*/Code/Framework/AzCore/azgnmx/azgnmx/*',
'Code/Tools/CryFXC',
'Code/Tools/HLSLCrossCompiler',
'Code/Tools/HLSLCrossCompilerMETAL',
'Docs',
'python/runtime',
'restricted/*/Tools/*RemoteControl',
'Tools/3dsmax',
'*/user/Cache/*',
'*/user/log/*',
]
| 31.68254
| 124
| 0.631096
|
b9db09c1d1c26d802117168878ef76954cf77560
| 3,360
|
py
|
Python
|
matrixprofile/algorithms/snippets.py
|
KSaiRahul21/matrixprofile
|
d8250e30d90ed0453bb7c35bb34ab0c04ae7b334
|
[
"Apache-2.0"
] | null | null | null |
matrixprofile/algorithms/snippets.py
|
KSaiRahul21/matrixprofile
|
d8250e30d90ed0453bb7c35bb34ab0c04ae7b334
|
[
"Apache-2.0"
] | null | null | null |
matrixprofile/algorithms/snippets.py
|
KSaiRahul21/matrixprofile
|
d8250e30d90ed0453bb7c35bb34ab0c04ae7b334
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
range = getattr(__builtins__, 'xrange', range)
# end of py2 compatability boilerplate
import numpy as np
from matrixprofile import core
from matrixprofile.algorithms.mpdist import mpdist_vector
def snippets(ts, snippet_size, num_snippets=2, window_size=None):
"""
The snippets algorithm is used to summarize your time series by
identifying N number of representative subsequences. If you want to
identify typical patterns in your time series, then this is the algorithm
to use.
Parameters
----------
ts : array_like
The time series.
snippet_size : int
The size of snippet desired.
num_snippets : int, Default 2
The number of snippets you would like to find.
window_size : int, Default (snippet_size / 2)
The window size.
Returns
-------
list : snippets
A list of snippets as dictionary objects with the following structure.
>>> {
>>> fraction: fraction of the snippet,
>>> index: the index of the snippet,
>>> snippet: the snippet values
>>> }
"""
ts = core.to_np_array(ts).astype('d')
n = len(ts)
if not isinstance(snippet_size, int) or snippet_size < 4:
raise ValueError('snippet_size must be an integer >= 4')
if n < (2 * snippet_size):
raise ValueError('Time series is too short relative to snippet length')
if not window_size:
window_size = int(np.floor(snippet_size / 2))
if window_size >= snippet_size:
raise ValueError('window_size must be smaller than snippet_size')
# pad end of time series with zeros
num_zeros = int(snippet_size * np.ceil(n / snippet_size) - n)
ts = np.append(ts, np.zeros(num_zeros))
# compute all profiles
indices = np.arange(0, len(ts) - snippet_size, snippet_size)
distances = []
for j, i in enumerate(indices):
distance = mpdist_vector(ts, ts[i:(i + snippet_size - 1)], int(window_size))
distances.append(distance)
distances = np.array(distances)
# find N snippets
snippets = []
minis = np.inf
total_min = None
for n in range(num_snippets):
minims = np.inf
for i in range(len(indices)):
s = np.sum(np.minimum(distances[i, :], minis))
if minims > s:
minims = s
index = i
minis = np.minimum(distances[index, :], minis)
actual_index = indices[index]
snippet = ts[actual_index:actual_index + snippet_size]
snippet_distance = distances[index]
snippets.append({
'index': actual_index,
'snippet': snippet,
'distance': snippet_distance
})
if isinstance(total_min, type(None)):
total_min = snippet_distance
else:
total_min = np.minimum(total_min, snippet_distance)
# compute the fraction of each snippet
for snippet in snippets:
mask = (snippet['distance'] <= total_min)
snippet['fraction'] = mask.sum() / (len(ts) - snippet_size)
total_min = total_min - mask
del snippet['distance']
return snippets
| 29.734513
| 84
| 0.633036
|
b9db24edad8766b6e734d6a8a9c26aff6bb04235
| 2,360
|
py
|
Python
|
jina/logging/formatter.py
|
yk/jina
|
ab66e233e74b956390f266881ff5dc4e0110d3ff
|
[
"Apache-2.0"
] | 1
|
2020-12-23T12:34:00.000Z
|
2020-12-23T12:34:00.000Z
|
jina/logging/formatter.py
|
yk/jina
|
ab66e233e74b956390f266881ff5dc4e0110d3ff
|
[
"Apache-2.0"
] | null | null | null |
jina/logging/formatter.py
|
yk/jina
|
ab66e233e74b956390f266881ff5dc4e0110d3ff
|
[
"Apache-2.0"
] | null | null | null |
import json
import re
from copy import copy
from logging import Formatter
from .profile import used_memory
from ..helper import colored
| 34.705882
| 114
| 0.601695
|
b9db51239c1e9a509c29f6e80aebfb0363b62210
| 194
|
py
|
Python
|
atcoder/abc191/b.py
|
sugitanishi/competitive-programming
|
51af65fdce514ece12f8afbf142b809d63eefb5d
|
[
"MIT"
] | null | null | null |
atcoder/abc191/b.py
|
sugitanishi/competitive-programming
|
51af65fdce514ece12f8afbf142b809d63eefb5d
|
[
"MIT"
] | null | null | null |
atcoder/abc191/b.py
|
sugitanishi/competitive-programming
|
51af65fdce514ece12f8afbf142b809d63eefb5d
|
[
"MIT"
] | null | null | null |
import sys
sys.setrecursionlimit(10000000)
input=lambda : sys.stdin.readline().rstrip()
n,x=map(int,input().split())
a=list(map(int,input().split()))
aa=list(filter(lambda b:b!=x,a))
print(*aa)
| 24.25
| 44
| 0.71134
|
b9dc15c3ca6876833207138ba4d65fbd0be25acd
| 61,341
|
py
|
Python
|
tests/integration/test_streaming_e2e.py
|
cfogg/python-client
|
40e6891c8240e6b2acd5df538e622e9f15de43d6
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/test_streaming_e2e.py
|
cfogg/python-client
|
40e6891c8240e6b2acd5df538e622e9f15de43d6
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/test_streaming_e2e.py
|
cfogg/python-client
|
40e6891c8240e6b2acd5df538e622e9f15de43d6
|
[
"Apache-2.0"
] | null | null | null |
"""Streaming integration tests."""
# pylint:disable=no-self-use,invalid-name,too-many-arguments,too-few-public-methods,line-too-long
# pylint:disable=too-many-statements,too-many-locals,too-many-lines
import threading
import time
import json
from queue import Queue
from splitio.client.factory import get_factory
from tests.helpers.mockserver import SSEMockServer, SplitMockServer
try: # try to import python3 names. fallback to python2
from urllib.parse import parse_qs
except ImportError:
from urlparse import parse_qs
def make_split_change_event(change_number):
"""Make a split change event."""
return {
'event': 'message',
'data': json.dumps({
'id':'TVUsxaabHs:0:0',
'clientId':'pri:MzM0ODI1MTkxMw==',
'timestamp': change_number-1,
'encoding':'json',
'channel':'MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_splits',
'data': json.dumps({
'type': 'SPLIT_UPDATE',
'changeNumber': change_number
})
})
}
def make_split_kill_event(name, default_treatment, change_number):
"""Make a split change event."""
return {
'event': 'message',
'data': json.dumps({
'id':'TVUsxaabHs:0:0',
'clientId':'pri:MzM0ODI1MTkxMw==',
'timestamp': change_number-1,
'encoding':'json',
'channel':'MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_splits',
'data': json.dumps({
'type': 'SPLIT_KILL',
'splitName': name,
'defaultTreatment': default_treatment,
'changeNumber': change_number
})
})
}
def make_initial_event():
"""Make a split change event."""
return {'id':'TVUsxaabHs:0:0'}
def make_occupancy(channel, publishers):
"""Make an occupancy event."""
return {
'event': 'message',
'data': json.dumps({
'id':'aP6EuhrcUm:0:0',
'timestamp':1604325712734,
'encoding': 'json',
'channel': "[?occupancy=metrics.publishers]%s" % channel,
'data': json.dumps({'metrics': {'publishers': publishers}}),
'name':'[meta]occupancy'
})
}
def make_segment_change_event(name, change_number):
"""Make a split change event."""
return {
'event': 'message',
'data': json.dumps({
'id':'TVUsxaabHs:0:0',
'clientId':'pri:MzM0ODI1MTkxMw==',
'timestamp': change_number-1,
'encoding':'json',
'channel':'MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_segments',
'data': json.dumps({
'type': 'SEGMENT_UPDATE',
'segmentName': name,
'changeNumber': change_number
})
})
}
def make_control_event(control_type, timestamp):
"""Make a control event."""
return {
'event': 'message',
'data': json.dumps({
'id':'TVUsxaabHs:0:0',
'clientId':'pri:MzM0ODI1MTkxMw==',
'timestamp': timestamp,
'encoding':'json',
'channel':'[?occupancy=metrics.publishers]control_pri',
'data': json.dumps({
'type': 'CONTROL',
'controlType': control_type,
})
})
}
def make_ably_error_event(code, status):
"""Make a control event."""
return {
'event': 'error',
'data': json.dumps({
'message':'Invalid accessToken in request: sarasa',
'code': code,
'statusCode': status,
'href':"https://help.ably.io/error/%d" % code
})
}
def make_simple_split(name, cn, active, killed, default_treatment, tt, on):
"""Make a simple split."""
return {
'trafficTypeName': tt,
'name': name,
'seed': 1699838640,
'status': 'ACTIVE' if active else 'ARCHIVED',
'changeNumber': cn,
'killed': killed,
'defaultTreatment': default_treatment,
'conditions': [
{
'matcherGroup': {
'combiner': 'AND',
'matchers': [
{
'matcherType': 'ALL_KEYS',
'negate': False,
'userDefinedSegmentMatcherData': None,
'whitelistMatcherData': None
}
]
},
'partitions': [
{'treatment': 'on' if on else 'off', 'size': 100},
{'treatment': 'off' if on else 'on', 'size': 0}
]
}
]
}
def make_split_with_segment(name, cn, active, killed, default_treatment,
tt, on, segment):
"""Make a split with a segment."""
return {
'trafficTypeName': tt,
'name': name,
'seed': cn,
'status': 'ACTIVE' if active else 'ARCHIVED',
'changeNumber': cn,
'killed': killed,
'defaultTreatment': default_treatment,
'configurations': {
'on': '{\'size\':15,\'test\':20}'
},
'conditions': [
{
'matcherGroup': {
'combiner': 'AND',
'matchers': [
{
'matcherType': 'IN_SEGMENT',
'negate': False,
'userDefinedSegmentMatcherData': {'segmentName': segment},
'whitelistMatcherData': None
}
]
},
'partitions': [{
'treatment': 'on' if on else 'off',
'size': 100
}]
}
]
}
| 43.137131
| 115
| 0.600887
|
b9dc3713922fc2f091f8ac06b4fabec4e905eb4d
| 1,647
|
py
|
Python
|
venues/abstract_venue.py
|
weezel/BandEventNotifier
|
55824ba26aba9882f46d1770ec5df592a5dc32bc
|
[
"0BSD"
] | null | null | null |
venues/abstract_venue.py
|
weezel/BandEventNotifier
|
55824ba26aba9882f46d1770ec5df592a5dc32bc
|
[
"0BSD"
] | 2
|
2020-02-10T19:37:47.000Z
|
2020-02-10T19:44:54.000Z
|
venues/abstract_venue.py
|
weezel/BandEventNotifier
|
55824ba26aba9882f46d1770ec5df592a5dc32bc
|
[
"0BSD"
] | null | null | null |
import re
from abc import ABC, abstractmethod
from typing import Any, Dict, Generator
# class AbstractVenue(metaclass=ABC):
| 27.45
| 66
| 0.57377
|
b9dcf24da986778ebcd29602d923908626cfea3c
| 4,263
|
py
|
Python
|
mtl/util/pipeline.py
|
vandurme/TFMTL
|
5958187900bdf67089a237c523b6caa899f63ac1
|
[
"Apache-2.0"
] | 10
|
2019-05-18T22:23:44.000Z
|
2022-01-25T15:24:45.000Z
|
mtl/util/pipeline.py
|
vandurme/TFMTL
|
5958187900bdf67089a237c523b6caa899f63ac1
|
[
"Apache-2.0"
] | 1
|
2020-01-07T15:24:16.000Z
|
2020-01-15T00:39:01.000Z
|
mtl/util/pipeline.py
|
vandurme/TFMTL
|
5958187900bdf67089a237c523b6caa899f63ac1
|
[
"Apache-2.0"
] | 1
|
2021-12-02T02:24:06.000Z
|
2021-12-02T02:24:06.000Z
|
# Copyright 2018 Johns Hopkins University. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import namedtuple
import tensorflow as tf
from tensorflow.python.framework import sparse_tensor as sparse_tensor_lib
from tensorflow.python.ops import parsing_ops
# namedtuple for bucket_info object (used in Pipeline)
# func: a mapping from examples to tf.int64 keys
# pads: a set of tf shapes that correspond to padded examples
bucket_info = namedtuple("bucket_info", "func pads")
def int64_feature(value):
""" Takes a single int (e.g. 3) and converts it to a tf Feature """
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def int64_list_feature(sequence):
""" Sequence of ints (e.g [1,2,3]) to TF feature """
return tf.train.Feature(int64_list=tf.train.Int64List(value=sequence))
| 34.658537
| 80
| 0.649073
|
b9dd82e962e13070a8526b2d4d0da1d0be6265ee
| 7,417
|
py
|
Python
|
src/py65/devices/mpu65c02.py
|
dabeaz/py65
|
62d790445018f0616508022912b67d8d64935a29
|
[
"BSD-3-Clause"
] | 5
|
2015-03-19T22:22:45.000Z
|
2020-05-15T18:26:59.000Z
|
src/py65/devices/mpu65c02.py
|
BigEd/py65
|
57d5e7191362006c1d6fa20662da3e4854f1b7c2
|
[
"BSD-3-Clause"
] | null | null | null |
src/py65/devices/mpu65c02.py
|
BigEd/py65
|
57d5e7191362006c1d6fa20662da3e4854f1b7c2
|
[
"BSD-3-Clause"
] | 3
|
2015-04-27T02:42:29.000Z
|
2021-07-16T20:50:23.000Z
|
from py65.devices import mpu6502
from py65.utils.devices import make_instruction_decorator
| 27.369004
| 71
| 0.58676
|
b9ddc98cf55e2bef4fcf498ec4787ca57bad46d0
| 5,623
|
py
|
Python
|
tests/test__io.py
|
soerendip/ms-mint
|
bf5f5d87d07a0d2108c6cd0d92c278f2ea762e58
|
[
"MIT"
] | 1
|
2021-09-03T04:02:25.000Z
|
2021-09-03T04:02:25.000Z
|
tests/test__io.py
|
soerendip/ms-mint
|
bf5f5d87d07a0d2108c6cd0d92c278f2ea762e58
|
[
"MIT"
] | 3
|
2020-09-29T21:43:39.000Z
|
2021-07-21T22:18:27.000Z
|
tests/test__io.py
|
soerendip/ms-mint
|
bf5f5d87d07a0d2108c6cd0d92c278f2ea762e58
|
[
"MIT"
] | 4
|
2019-11-14T13:25:24.000Z
|
2021-04-30T22:08:53.000Z
|
import pandas as pd
import shutil
import os
import io
from ms_mint.Mint import Mint
from pathlib import Path as P
from ms_mint.io import (
ms_file_to_df,
mzml_to_pandas_df_pyteomics,
convert_ms_file_to_feather,
convert_ms_file_to_parquet,
MZMLB_AVAILABLE,
)
from paths import (
TEST_MZML,
TEST_MZXML,
TEST_PARQUET,
TEST_MZMLB_POS,
TEST_MZML_POS,
TEST_MZML_NEG,
)
| 27.563725
| 81
| 0.634181
|
b9de795b7b1298f8cad5f30e914735224920a0f9
| 1,158
|
py
|
Python
|
core/views.py
|
moiyad/image
|
d4515ef3057794f38268a6887bfff157115f26f7
|
[
"MIT"
] | null | null | null |
core/views.py
|
moiyad/image
|
d4515ef3057794f38268a6887bfff157115f26f7
|
[
"MIT"
] | null | null | null |
core/views.py
|
moiyad/image
|
d4515ef3057794f38268a6887bfff157115f26f7
|
[
"MIT"
] | null | null | null |
from django.core.files.storage import FileSystemStorage
from django.shortcuts import render, redirect
from core.forms import DocumentForm
from core.models import Document
from media import image_cv2
| 30.473684
| 88
| 0.668394
|
b9df48f54330cde291fba9c3ce4e17b22e7c1da1
| 1,156
|
py
|
Python
|
python/verifair/benchmarks/fairsquare/M_BN_F_SVM_A_Q.py
|
obastani/verifair
|
1d5efea041330fa9fe8d59d976bdd3ef97aff417
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2019-11-05T20:40:40.000Z
|
2020-09-16T03:13:54.000Z
|
python/verifair/benchmarks/fairsquare/M_BN_F_SVM_A_Q.py
|
obastani/verifair
|
1d5efea041330fa9fe8d59d976bdd3ef97aff417
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
python/verifair/benchmarks/fairsquare/M_BN_F_SVM_A_Q.py
|
obastani/verifair
|
1d5efea041330fa9fe8d59d976bdd3ef97aff417
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
from .helper import *
| 33.028571
| 85
| 0.569204
|
b9dfea4e7beba7ec415b85a76c49ed3af214dec4
| 25,442
|
py
|
Python
|
ml4chem/atomistic/models/neuralnetwork.py
|
muammar/mlchem
|
365487c23ea3386657e178e56ab31adfe8d5d073
|
[
"BSD-3-Clause-LBNL"
] | 77
|
2019-08-05T17:30:22.000Z
|
2022-03-28T14:31:35.000Z
|
ml4chem/atomistic/models/neuralnetwork.py
|
muammar/ml4chem
|
365487c23ea3386657e178e56ab31adfe8d5d073
|
[
"BSD-3-Clause-LBNL"
] | 6
|
2019-07-31T18:59:38.000Z
|
2020-10-18T18:15:07.000Z
|
ml4chem/atomistic/models/neuralnetwork.py
|
muammar/mlchem
|
365487c23ea3386657e178e56ab31adfe8d5d073
|
[
"BSD-3-Clause-LBNL"
] | 15
|
2020-02-28T10:11:21.000Z
|
2021-12-01T13:45:33.000Z
|
import dask
import datetime
import logging
import time
import torch
import numpy as np
import pandas as pd
from collections import OrderedDict
from ml4chem.metrics import compute_rmse
from ml4chem.atomistic.models.base import DeepLearningModel, DeepLearningTrainer
from ml4chem.atomistic.models.loss import AtomicMSELoss
from ml4chem.optim.handler import get_optimizer, get_lr_scheduler, get_lr
from ml4chem.utils import convert_elapsed_time, get_chunks, get_number_of_parameters
from pprint import pformat
# Setting precision and starting logger object
torch.set_printoptions(precision=10)
logger = logging.getLogger()
| 33.742706
| 88
| 0.526924
|
b9e018d6290ebe7b0654b7e76a8df225914e3778
| 7,104
|
py
|
Python
|
hatsploit/core/db/db.py
|
EntySec/HatSploit
|
8e445804c252cc24e87888be2c2efc02750ce5ee
|
[
"MIT"
] | 139
|
2021-02-17T15:52:30.000Z
|
2022-03-30T14:50:42.000Z
|
hatsploit/core/db/db.py
|
YurinDoctrine/HatSploit
|
b1550323e08336ec057cbafb77003c22a3bbee91
|
[
"MIT"
] | 27
|
2021-03-24T17:14:30.000Z
|
2022-03-02T18:50:43.000Z
|
hatsploit/core/db/db.py
|
YurinDoctrine/HatSploit
|
b1550323e08336ec057cbafb77003c22a3bbee91
|
[
"MIT"
] | 85
|
2021-02-17T15:39:03.000Z
|
2022-03-07T09:08:58.000Z
|
#!/usr/bin/env python3
#
# MIT License
#
# Copyright (c) 2020-2022 EntySec
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import json
import os
from hatsploit.core.cli.badges import Badges
from hatsploit.lib.config import Config
from hatsploit.lib.storage import LocalStorage
| 38.193548
| 86
| 0.639077
|
b9e0543df8f2ae150950f2a9787edb6296aac618
| 2,482
|
py
|
Python
|
bluesky/tests/test_simulators.py
|
NSLS-II/bluesky
|
b7d666e65cf4ef556fb46b744c33264c8e3f7507
|
[
"BSD-3-Clause"
] | 43
|
2015-08-04T20:13:41.000Z
|
2019-04-12T17:21:36.000Z
|
bluesky/tests/test_simulators.py
|
NSLS-II/bluesky
|
b7d666e65cf4ef556fb46b744c33264c8e3f7507
|
[
"BSD-3-Clause"
] | 966
|
2015-07-29T16:43:21.000Z
|
2019-05-09T21:02:28.000Z
|
bluesky/tests/test_simulators.py
|
NSLS-II/bluesky
|
b7d666e65cf4ef556fb46b744c33264c8e3f7507
|
[
"BSD-3-Clause"
] | 40
|
2015-07-29T16:42:41.000Z
|
2019-02-07T02:30:34.000Z
|
from bluesky.plans import scan
from bluesky.simulators import (print_summary, print_summary_wrapper,
summarize_plan,
check_limits,
plot_raster_path)
import pytest
from bluesky.plans import grid_scan
| 34
| 76
| 0.636583
|
b9e09def642ce98a753ac3053c44b1ba7d862f16
| 4,850
|
py
|
Python
|
shutTheBox/main.py
|
robi1467/shut-the-box
|
ed1a8f13bc74caa63361453e723768a9cbe1dac4
|
[
"MIT"
] | null | null | null |
shutTheBox/main.py
|
robi1467/shut-the-box
|
ed1a8f13bc74caa63361453e723768a9cbe1dac4
|
[
"MIT"
] | null | null | null |
shutTheBox/main.py
|
robi1467/shut-the-box
|
ed1a8f13bc74caa63361453e723768a9cbe1dac4
|
[
"MIT"
] | null | null | null |
import random
numbers_list = [1,2,3,4,5,6,7,8,9,10]
game_won = False
game_completed = False
#Stats
games_played = 0
games_won = 0
games_lost = 0
average_score = 0
total_score = 0
keep_playing = True
while keep_playing:
numbers_list = [1,2,3,4,5,6,7,8,9,10]
welcome()
roll_total = 0
while roll_total < 55:
dice_amount = 2
if all_less_than_7():
dice_amount = choose_dice_amount()
dice_total = dice_roll(dice_amount)
print("Your roll is: " + str(dice_total))
if check_lost_game(dice_total):
print("It is impossible to continue the game with this roll")
break
choose_number_to_drop(dice_total)
roll_total += dice_total
if roll_total == 55:
game_won = win_game()
if game_won:
print("Congrats you won!!!!")
games_played +=1
games_won +=1
else:
print("You lose, your score is " + str(score_game()))
print("Numbers remaining: " + str(numbers_list))
games_played += 1
games_lost += 1
total_score += score_game()
average_score = total_score/games_played
game_won = False
print("STATS:\n Games Played: " + str(games_played) + "\nGames Won: " + str(games_won) + "\nGames Lost: " + str(games_lost)
+ "\nAverage Score: " + str(average_score) + "\nTotal Score: " + str(total_score))
keep_playing_input()
| 28.034682
| 127
| 0.549897
|
b9e0c71df07f6cc03e495d11899558d7e577552a
| 3,803
|
py
|
Python
|
repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts_selinux.py
|
sm00th/leapp-repository
|
1c171ec3a5f9260a3c6f84a9b15cad78a875ac61
|
[
"Apache-2.0"
] | 21
|
2018-11-20T15:58:39.000Z
|
2022-03-15T19:57:24.000Z
|
repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts_selinux.py
|
sm00th/leapp-repository
|
1c171ec3a5f9260a3c6f84a9b15cad78a875ac61
|
[
"Apache-2.0"
] | 732
|
2018-11-21T18:33:26.000Z
|
2022-03-31T16:16:24.000Z
|
repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts_selinux.py
|
sm00th/leapp-repository
|
1c171ec3a5f9260a3c6f84a9b15cad78a875ac61
|
[
"Apache-2.0"
] | 85
|
2018-11-20T17:55:00.000Z
|
2022-03-29T09:40:31.000Z
|
import warnings
import pytest
from leapp.libraries.actor.systemfacts import get_selinux_status
from leapp.models import SELinuxFacts
no_selinux = False
try:
import selinux
except ImportError:
no_selinux = True
warnings.warn(
'Tests which uses `selinux` will be skipped'
' due to library unavailability.', ImportWarning)
reason_to_skip_msg = "Selinux is not available"
# FIXME: create valid tests...
class MockNoConfigFileOSError(object):
| 38.414141
| 83
| 0.674993
|
b9e1517b77ef8c0c8643211eb516389a83db60f8
| 2,608
|
py
|
Python
|
Phase-1/Python Basic 2/Day-24.py
|
emetowinner/python-challenges
|
520da69da0f2632deb1e81136d2b62d40555a4aa
|
[
"MIT"
] | 3
|
2020-05-21T20:19:40.000Z
|
2022-02-27T08:20:10.000Z
|
Phase-1/Python Basic 2/Day-24.py
|
emetowinner/python-challenges
|
520da69da0f2632deb1e81136d2b62d40555a4aa
|
[
"MIT"
] | null | null | null |
Phase-1/Python Basic 2/Day-24.py
|
emetowinner/python-challenges
|
520da69da0f2632deb1e81136d2b62d40555a4aa
|
[
"MIT"
] | 4
|
2020-05-12T16:41:52.000Z
|
2020-05-21T20:17:22.000Z
|
"""
1. Write a Python program to reverse only the vowels of a given string.
Sample Output:
w3resuorce
Python
Perl
ASU
2. Write a Python program to check whether a given integer is a palindrome or not.
Note: An integer is a palindrome when it reads the same backward as forward. Negative numbers are not palindromic.
Sample Output:
False
True
False
3. Write a Python program to remove the duplicate elements of a given array of numbers such that each element appear only once and return the new length of the given array.
Sample Output:
5
4
4. Write a Python program to calculate the maximum profit from selling and buying values of stock. An array of numbers represent the stock prices in chronological order.
For example, given [8, 10, 7, 5, 7, 15], the function will return 10, since the buying value of the stock is 5 dollars and sell value is 15 dollars.
Sample Output:
10
7
0
5. Write a Python program to remove all instances of a given value from a given array of integers and find the length of the new array.
For example, given [8, 10, 7, 5, 7, 15], the function will return 10, since the buying value of the stock is 5 dollars and sell value is 15 dollars.
Sample Output:
6
0
5
0
6. Write a Python program to find the starting and ending position of a given value in a given array of integers, sorted in ascending order.
If the target is not found in the array, return [0, 0].
Input: [5, 7, 7, 8, 8, 8] target value = 8
Output: [0, 5]
Input: [1, 3, 6, 9, 13, 14] target value = 4
Output: [0, 0]
Sample Output:
[0, 5]
[0, 0]
7. The price of a given stock on each day is stored in an array.
Write a Python program to find the maximum profit in one transaction i.e., buy one and sell one share of the stock from the given price value of the said array. You cannot sell a stock before you buy one.
Input (Stock price of each day): [224, 236, 247, 258, 259, 225]
Output: 35
Explanation:
236 - 224 = 12
247 - 224 = 23
258 - 224 = 34
259 - 224 = 35
225 - 224 = 1
247 - 236 = 11
258 - 236 = 22
259 - 236 = 23
225 - 236 = -11
258 - 247 = 11
259 - 247 = 12
225 - 247 = -22
259 - 258 = 1
225 - 258 = -33
225 - 259 = -34
8. Write a Python program to print a given N by M matrix of numbers line by line in forward > backwards > forward >... order.
Input matrix:
[[1, 2, 3,4],
[5, 6, 7, 8],
[0, 6, 2, 8],
[2, 3, 0, 2]]
Output:
1
2
3
4
8
7
6
5
0
6
2
8
2
0
3
2
9. Write a Python program to compute the largest product of three integers from a given list of integers.
Sample Output:
4000
8
120
10. Write a Python program to find the first missing positive integer that does not exist in a given list.
"""
| 25.821782
| 204
| 0.717408
|
b9e1d3ca3ecc29b35600c2af35a03fcf35a771c0
| 3,413
|
py
|
Python
|
etl/parsers/etw/Microsoft_Windows_IPxlatCfg.py
|
IMULMUL/etl-parser
|
76b7c046866ce0469cd129ee3f7bb3799b34e271
|
[
"Apache-2.0"
] | 104
|
2020-03-04T14:31:31.000Z
|
2022-03-28T02:59:36.000Z
|
etl/parsers/etw/Microsoft_Windows_IPxlatCfg.py
|
IMULMUL/etl-parser
|
76b7c046866ce0469cd129ee3f7bb3799b34e271
|
[
"Apache-2.0"
] | 7
|
2020-04-20T09:18:39.000Z
|
2022-03-19T17:06:19.000Z
|
etl/parsers/etw/Microsoft_Windows_IPxlatCfg.py
|
IMULMUL/etl-parser
|
76b7c046866ce0469cd129ee3f7bb3799b34e271
|
[
"Apache-2.0"
] | 16
|
2020-03-05T18:55:59.000Z
|
2022-03-01T10:19:28.000Z
|
# -*- coding: utf-8 -*-
"""
Microsoft-Windows-IPxlatCfg
GUID : 3e5ac668-af52-4c15-b99b-a3e7a6616ebd
"""
from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct
from etl.utils import WString, CString, SystemTime, Guid
from etl.dtyp import Sid
from etl.parsers.etw.core import Etw, declare, guid
| 29.17094
| 123
| 0.699092
|
b9e2c12e3855c30001fd37ab610587d3e95c803d
| 535
|
py
|
Python
|
microservices/users/config.py
|
Levakin/sanic-test-app
|
d96a54a21f6d0d3b262bbc7bc75f5fa3b12c3b61
|
[
"Apache-2.0"
] | null | null | null |
microservices/users/config.py
|
Levakin/sanic-test-app
|
d96a54a21f6d0d3b262bbc7bc75f5fa3b12c3b61
|
[
"Apache-2.0"
] | null | null | null |
microservices/users/config.py
|
Levakin/sanic-test-app
|
d96a54a21f6d0d3b262bbc7bc75f5fa3b12c3b61
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
from distutils.util import strtobool
| 31.470588
| 63
| 0.646729
|
b9e36baa14d5265769af32c8ed910969e39eaf3a
| 199
|
py
|
Python
|
semantic-python/test/fixtures/4-01-lambda-literals.py
|
Temurson/semantic
|
2e9cd2c006cec9a0328791e47d8c6d60af6d5a1b
|
[
"MIT"
] | 8,844
|
2019-05-31T15:47:12.000Z
|
2022-03-31T18:33:51.000Z
|
semantic-python/test/fixtures/4-01-lambda-literals.py
|
Qanora/semantic
|
b0eda9a61bbc690a342fb177cfc12eec8c1c001c
|
[
"MIT"
] | 401
|
2019-05-31T18:30:26.000Z
|
2022-03-31T16:32:29.000Z
|
semantic-python/test/fixtures/4-01-lambda-literals.py
|
Qanora/semantic
|
b0eda9a61bbc690a342fb177cfc12eec8c1c001c
|
[
"MIT"
] | 504
|
2019-05-31T17:55:03.000Z
|
2022-03-30T04:15:04.000Z
|
# CHECK-TREE: { const <- \x -> \y -> x; y <- const #true #true; z <- const #false #false; #record { const: const, y : y, z: z, }}
const = lambda x, y: x
y = const(True, True)
z = const(False, False)
| 39.8
| 129
| 0.557789
|
b9e379a95e3f4e855adb56ee1112dc1aa95e6a78
| 9,351
|
py
|
Python
|
main.py
|
mithi/semantic-segmentation
|
85e9df04397745e0c6ab252e30991fa9b514ec1a
|
[
"MIT"
] | 33
|
2017-08-24T16:38:15.000Z
|
2022-03-17T15:55:52.000Z
|
main.py
|
mithi/semantic-segmentation
|
85e9df04397745e0c6ab252e30991fa9b514ec1a
|
[
"MIT"
] | 3
|
2018-10-12T11:17:22.000Z
|
2019-05-30T09:49:11.000Z
|
main.py
|
mithi/semantic-segmentation
|
85e9df04397745e0c6ab252e30991fa9b514ec1a
|
[
"MIT"
] | 26
|
2017-09-17T09:09:52.000Z
|
2020-01-14T02:48:56.000Z
|
import tensorflow as tf
import os.path
import warnings
from distutils.version import LooseVersion
import glob
import helper
import project_tests as tests
#--------------------------
# USER-SPECIFIED DATA
#--------------------------
# Tune these parameters
NUMBER_OF_CLASSES = 2
IMAGE_SHAPE = (160, 576)
EPOCHS = 20
BATCH_SIZE = 1
LEARNING_RATE = 0.0001
DROPOUT = 0.75
# Specify these directory paths
DATA_DIRECTORY = './data'
RUNS_DIRECTORY = './runs'
TRAINING_DATA_DIRECTORY ='./data/data_road/training'
NUMBER_OF_IMAGES = len(glob.glob('./data/data_road/training/calib/*.*'))
VGG_PATH = './data/vgg'
all_training_losses = [] # Used for plotting to visualize if our training is going well given parameters
#--------------------------
# DEPENDENCY CHECK
#--------------------------
# Check TensorFlow Version
assert LooseVersion(tf.__version__) >= LooseVersion('1.0'), 'Please use TensorFlow version 1.0 or newer. You are using {}'.format(tf.__version__)
print('TensorFlow Version: {}'.format(tf.__version__))
# Check for a GPU
if not tf.test.gpu_device_name():
warnings.warn('No GPU found. Please use a GPU to train your neural network.')
else:
print('Default GPU Device: {}'.format(tf.test.gpu_device_name()))
#--------------------------
# PLACEHOLDER TENSORS
#--------------------------
correct_label = tf.placeholder(tf.float32, [None, IMAGE_SHAPE[0], IMAGE_SHAPE[1], NUMBER_OF_CLASSES])
learning_rate = tf.placeholder(tf.float32)
keep_prob = tf.placeholder(tf.float32)
#--------------------------
# FUNCTIONS
#--------------------------
def load_vgg(sess, vgg_path):
"""
Load Pretrained VGG Model into TensorFlow.
sess: TensorFlow Session
vgg_path: Path to vgg folder, containing "variables/" and "saved_model.pb"
return: Tuple of Tensors from VGG model (image_input, keep_prob, layer3, layer4, layer7)
"""
# load the model and weights
model = tf.saved_model.loader.load(sess, ['vgg16'], vgg_path)
# Get Tensors to be returned from graph
graph = tf.get_default_graph()
image_input = graph.get_tensor_by_name('image_input:0')
keep_prob = graph.get_tensor_by_name('keep_prob:0')
layer3 = graph.get_tensor_by_name('layer3_out:0')
layer4 = graph.get_tensor_by_name('layer4_out:0')
layer7 = graph.get_tensor_by_name('layer7_out:0')
return image_input, keep_prob, layer3, layer4, layer7
def conv_1x1(layer, layer_name):
""" Return the output of a 1x1 convolution of a layer """
return tf.layers.conv2d(inputs = layer,
filters = NUMBER_OF_CLASSES,
kernel_size = (1, 1),
strides = (1, 1),
name = layer_name)
def upsample(layer, k, s, layer_name):
""" Return the output of transpose convolution given kernel_size k and strides s """
return tf.layers.conv2d_transpose(inputs = layer,
filters = NUMBER_OF_CLASSES,
kernel_size = (k, k),
strides = (s, s),
padding = 'same',
name = layer_name)
def layers(vgg_layer3_out, vgg_layer4_out, vgg_layer7_out, num_classes = NUMBER_OF_CLASSES):
"""
Create the layers for a fully convolutional network. Build skip-layers using the vgg layers.
vgg_layerX_out: TF Tensor for VGG Layer X output
num_classes: Number of classes to classify
return: The Tensor for the last layer of output
"""
# Use a shorter variable name for simplicity
layer3, layer4, layer7 = vgg_layer3_out, vgg_layer4_out, vgg_layer7_out
# Apply a 1x1 convolution to encoder layers
layer3x = conv_1x1(layer = layer3, layer_name = "layer3conv1x1")
layer4x = conv_1x1(layer = layer4, layer_name = "layer4conv1x1")
layer7x = conv_1x1(layer = layer7, layer_name = "layer7conv1x1")
# Add decoder layers to the network with skip connections and upsampling
# Note: the kernel size and strides are the same as the example in Udacity Lectures
# Semantic Segmentation Scene Understanding Lesson 10-9: FCN-8 - Decoder
decoderlayer1 = upsample(layer = layer7x, k = 4, s = 2, layer_name = "decoderlayer1")
decoderlayer2 = tf.add(decoderlayer1, layer4x, name = "decoderlayer2")
decoderlayer3 = upsample(layer = decoderlayer2, k = 4, s = 2, layer_name = "decoderlayer3")
decoderlayer4 = tf.add(decoderlayer3, layer3x, name = "decoderlayer4")
decoderlayer_output = upsample(layer = decoderlayer4, k = 16, s = 8, layer_name = "decoderlayer_output")
return decoderlayer_output
def optimize(nn_last_layer, correct_label, learning_rate, num_classes = NUMBER_OF_CLASSES):
"""
Build the TensorFLow loss and optimizer operations.
nn_last_layer: TF Tensor of the last layer in the neural network
correct_label: TF Placeholder for the correct label image
learning_rate: TF Placeholder for the learning rate
num_classes: Number of classes to classify
return: Tuple of (logits, train_op, cross_entropy_loss)
"""
# Reshape 4D tensors to 2D, each row represents a pixel, each column a class
logits = tf.reshape(nn_last_layer, (-1, num_classes))
class_labels = tf.reshape(correct_label, (-1, num_classes))
# The cross_entropy_loss is the cost which we are trying to minimize to yield higher accuracy
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits = logits, labels = class_labels)
cross_entropy_loss = tf.reduce_mean(cross_entropy)
# The model implements this operation to find the weights/parameters that would yield correct pixel labels
train_op = tf.train.AdamOptimizer(learning_rate).minimize(cross_entropy_loss)
return logits, train_op, cross_entropy_loss
def train_nn(sess, epochs, batch_size, get_batches_fn, train_op,
cross_entropy_loss, input_image,
correct_label, keep_prob, learning_rate):
"""
Train neural network and print out the loss during training.
sess: TF Session
epochs: Number of epochs
batch_size: Batch size
get_batches_fn: Function to get batches of training data. Call using get_batches_fn(batch_size)
train_op: TF Operation to train the neural network
cross_entropy_loss: TF Tensor for the amount of loss
input_image: TF Placeholder for input images
correct_label: TF Placeholder for label images
keep_prob: TF Placeholder for dropout keep probability
learning_rate: TF Placeholder for learning rate
"""
for epoch in range(EPOCHS):
losses, i = [], 0
for images, labels in get_batches_fn(BATCH_SIZE):
i += 1
feed = { input_image: images,
correct_label: labels,
keep_prob: DROPOUT,
learning_rate: LEARNING_RATE }
_, partial_loss = sess.run([train_op, cross_entropy_loss], feed_dict = feed)
print("---> iteration: ", i, " partial loss:", partial_loss)
losses.append(partial_loss)
training_loss = sum(losses) / len(losses)
all_training_losses.append(training_loss)
print("------------------")
print("epoch: ", epoch + 1, " of ", EPOCHS, "training loss: ", training_loss)
print("------------------")
def run():
""" Run a train a model and save output images resulting from the test image fed on the trained model """
# Get vgg model if we can't find it where it should be
helper.maybe_download_pretrained_vgg(DATA_DIRECTORY)
# A function to get batches
get_batches_fn = helper.gen_batch_function(TRAINING_DATA_DIRECTORY, IMAGE_SHAPE)
with tf.Session() as session:
# Returns the three layers, keep probability and input layer from the vgg architecture
image_input, keep_prob, layer3, layer4, layer7 = load_vgg(session, VGG_PATH)
# The resulting network architecture from adding a decoder on top of the given vgg model
model_output = layers(layer3, layer4, layer7, NUMBER_OF_CLASSES)
# Returns the output logits, training operation and cost operation to be used
# - logits: each row represents a pixel, each column a class
# - train_op: function used to get the right parameters to the model to correctly label the pixels
# - cross_entropy_loss: function outputting the cost which we are minimizing, lower cost should yield higher accuracy
logits, train_op, cross_entropy_loss = optimize(model_output, correct_label, learning_rate, NUMBER_OF_CLASSES)
# Initialize all variables
session.run(tf.global_variables_initializer())
session.run(tf.local_variables_initializer())
# Train the neural network
train_nn(session, EPOCHS, BATCH_SIZE, get_batches_fn,
train_op, cross_entropy_loss, image_input,
correct_label, keep_prob, learning_rate)
# Run the model with the test images and save each painted output image (roads painted green)
helper.save_inference_samples(RUNS_DIRECTORY, DATA_DIRECTORY, session, IMAGE_SHAPE, logits, keep_prob, image_input)
#--------------------------
# MAIN
#--------------------------
if __name__ == "__main__":
run_tests()
run() # Run a train a model and save output images resulting from the test image fed on the trained model
print(all_training_losses)
| 37.8583
| 146
| 0.69276
|
b9e38ca4d963e2aa4de106573e34682092b6337e
| 22,356
|
py
|
Python
|
tests/scanner/audit/log_sink_rules_engine_test.py
|
BrunoReboul/forseti-security
|
9d4a61b3e5a5d22a4330d15ddf61063fc9079071
|
[
"Apache-2.0"
] | null | null | null |
tests/scanner/audit/log_sink_rules_engine_test.py
|
BrunoReboul/forseti-security
|
9d4a61b3e5a5d22a4330d15ddf61063fc9079071
|
[
"Apache-2.0"
] | null | null | null |
tests/scanner/audit/log_sink_rules_engine_test.py
|
BrunoReboul/forseti-security
|
9d4a61b3e5a5d22a4330d15ddf61063fc9079071
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests the LogSinkRulesEngine."""
import unittest
import mock
from tests.unittest_utils import ForsetiTestCase
from tests.unittest_utils import get_datafile_path
from google.cloud.forseti.common.gcp_type.billing_account import BillingAccount
from google.cloud.forseti.common.gcp_type.folder import Folder
from google.cloud.forseti.common.gcp_type.log_sink import LogSink
from google.cloud.forseti.common.gcp_type.organization import Organization
from google.cloud.forseti.common.gcp_type.project import Project
from google.cloud.forseti.scanner.audit import log_sink_rules_engine as lsre
from google.cloud.forseti.scanner.audit.errors import InvalidRulesSchemaError
if __name__ == '__main__':
unittest.main()
| 40.79562
| 84
| 0.560834
|
b9e3fca3aec04c54b087304757154615d5a67e58
| 2,852
|
py
|
Python
|
backend/api/ulca-ums-service/user-management/utilities/orgUtils.py
|
agupta54/ulca
|
c1f570ac254ce2ac73f40c49716458f4f7cbaee2
|
[
"MIT"
] | 3
|
2022-01-12T06:51:51.000Z
|
2022-02-23T18:54:33.000Z
|
backend/api/ulca-ums-service/user-management/utilities/orgUtils.py
|
agupta54/ulca
|
c1f570ac254ce2ac73f40c49716458f4f7cbaee2
|
[
"MIT"
] | 6
|
2021-08-31T19:21:26.000Z
|
2022-01-03T05:53:42.000Z
|
backend/api/ulca-ums-service/user-management/utilities/orgUtils.py
|
agupta54/ulca
|
c1f570ac254ce2ac73f40c49716458f4f7cbaee2
|
[
"MIT"
] | 8
|
2021-08-12T08:07:49.000Z
|
2022-01-25T04:40:51.000Z
|
import uuid
from config import USR_ORG_MONGO_COLLECTION, USR_MONGO_COLLECTION
import db
from models.response import post_error
import logging
log = logging.getLogger('file')
| 41.333333
| 149
| 0.619565
|
b9e478ed385905aa26b48748e1fbf896e8ced766
| 4,299
|
py
|
Python
|
setup.py
|
AntonBiryukovUofC/diffvg
|
e081098f52b82bfd0b7e91114d289d65ef969a60
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
AntonBiryukovUofC/diffvg
|
e081098f52b82bfd0b7e91114d289d65ef969a60
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
AntonBiryukovUofC/diffvg
|
e081098f52b82bfd0b7e91114d289d65ef969a60
|
[
"Apache-2.0"
] | null | null | null |
# Adapted from https://github.com/pybind/cmake_example/blob/master/setup.py
import os
import re
import sys
import platform
import subprocess
import importlib
from sysconfig import get_paths
import importlib
from setuptools import setup, Extension
from setuptools.command.build_ext import build_ext
from setuptools.command.install import install
from distutils.sysconfig import get_config_var
from distutils.version import LooseVersion
torch_spec = importlib.util.find_spec("torch")
tf_spec = importlib.util.find_spec("tensorflow")
packages = []
build_with_cuda = False
if torch_spec is not None:
packages.append('pydiffvg')
import torch
if torch.cuda.is_available():
build_with_cuda = True
if tf_spec is not None and sys.platform != 'win32':
packages.append('pydiffvg_tensorflow')
if not build_with_cuda:
import tensorflow as tf
if tf.test.is_gpu_available(cuda_only=True, min_cuda_compute_capability=None):
build_with_cuda = True
if len(packages) == 0:
print('Error: PyTorch or Tensorflow must be installed. For Windows platform only PyTorch is supported.')
exit()
# Override build_with_cuda with environment variable
if 'DIFFVG_CUDA' in os.environ:
build_with_cuda = os.environ['DIFFVG_CUDA'] == '1'
setup(name='diffvg',
version='0.0.1',
install_requires=["svgpathtools"],
description='Differentiable Vector Graphics',
ext_modules=[CMakeExtension('diffvg', '', build_with_cuda)],
cmdclass=dict(build_ext=Build, install=install),
packages=packages,
zip_safe=False)
| 38.044248
| 109
| 0.601303
|
b9e64ab7c515862e0dec6a8272d8a276b9bd86b9
| 14,587
|
py
|
Python
|
robotpy_ext/common_drivers/navx/registerio.py
|
twinters007/robotpy-wpilib-utilities
|
d2e18c16fc97a469e0621521e0fbed0093610d6e
|
[
"MIT",
"BSD-3-Clause"
] | 2
|
2017-01-16T03:10:57.000Z
|
2017-01-16T03:11:00.000Z
|
robotpy_ext/common_drivers/navx/registerio.py
|
twinters007/robotpy-wpilib-utilities
|
d2e18c16fc97a469e0621521e0fbed0093610d6e
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
robotpy_ext/common_drivers/navx/registerio.py
|
twinters007/robotpy-wpilib-utilities
|
d2e18c16fc97a469e0621521e0fbed0093610d6e
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
# validated: 2017-02-19 DS c5e3a8a9b642 roborio/java/navx_frc/src/com/kauailabs/navx/frc/RegisterIO.java
#----------------------------------------------------------------------------
# Copyright (c) Kauai Labs 2015. All Rights Reserved.
#
# Created in support of Team 2465 (Kauaibots). Go Purple Wave!
#
# Open Source Software - may be modified and shared by FRC teams. Any
# modifications to this code must be accompanied by the \License.txt file
# in the root directory of the project
#----------------------------------------------------------------------------
from ._impl import AHRSProtocol, IMUProtocol, IMURegisters
from wpilib.timer import Timer
import logging
logger = logging.getLogger('navx')
__all__ = ['RegisterIO']
IO_TIMEOUT_SECONDS = 1.0
DELAY_OVERHEAD_SECONDS = 0.004
| 54.632959
| 159
| 0.676973
|
b9e6a0bf2a4d3e860c6eb607624b101a086157b4
| 12,517
|
py
|
Python
|
RigolWFM/channel.py
|
wvdv2002/RigolWFM
|
849a1130c9194f052eaf5582dfa67e7a5708a3a3
|
[
"BSD-3-Clause"
] | null | null | null |
RigolWFM/channel.py
|
wvdv2002/RigolWFM
|
849a1130c9194f052eaf5582dfa67e7a5708a3a3
|
[
"BSD-3-Clause"
] | null | null | null |
RigolWFM/channel.py
|
wvdv2002/RigolWFM
|
849a1130c9194f052eaf5582dfa67e7a5708a3a3
|
[
"BSD-3-Clause"
] | null | null | null |
#pylint: disable=invalid-name
#pylint: disable=too-many-instance-attributes
#pylint: disable=too-many-return-statements
#pylint: disable=too-many-statements
"""
Class structure and methods for an oscilloscope channel.
The idea is to collect all the relevant information from all the Rigol
scope waveforms into a single structure that can be handled in a uniform
and consistent manner.
Specifically this lets one just use
channel.times : numpy array of signal times
channel.volts : numpy array of signal voltages
or the stringification method to describe a channel
print(channel)
"""
from enum import Enum
import numpy as np
def best_scale(number):
"""Scale and units for a number with proper prefix."""
absnr = abs(number)
if absnr == 0:
return 1, ' '
if absnr < 0.99999999e-9:
return 1e12, 'p'
if absnr < 0.99999999e-6:
return 1e9, 'n'
if absnr < 0.99999999e-3:
return 1e6, ''
if absnr < 0.99999999:
return 1e3, 'm'
if absnr < 0.99999999e3:
return 1, ' '
if absnr < 0.99999999e6:
return 1e-3, 'k'
if absnr < 0.999999991e9:
return 1e-6, 'M'
return 1e-9, 'G'
def engineering_string(number, n_digits):
"""Format number with proper prefix."""
scale, prefix = best_scale(number)
fformat = "%%.%df %%s" % n_digits
s = fformat % (number * scale, prefix)
return s
def _channel_bytes(channel_number, w):
"""
Return right series of bytes for a channel for 1000Z scopes.
Waveform points are interleaved stored in memory when two or more
channels are saved. This unweaves them.
Args:
channel_number: the number of enabled channels before this one
w: original waveform object
Returns
byte array for specified channel
"""
offset = 0
if w.header.stride == 2: # byte pattern CHx CHy
# use odd bytes when this is the second enabled channel
if any([w.header.ch[i].enabled for i in range(channel_number-1)]):
offset = 1
elif w.header.stride == 4: # byte pattern CH4 CH3 CH2 CH1
offset = 4 - channel_number
data = np.frombuffer(w.data.raw, dtype=np.uint8)
raw_bytes = data[offset::w.header.stride]
return raw_bytes
| 36.176301
| 96
| 0.589199
|
b9e6a9be08cb7ae14c68608c944b95cbe6233b10
| 1,477
|
py
|
Python
|
configs/raubtierv2a/faster_rcnn_x101_64x4d_fpn_1x_raubtierv2a_nofreeze_4gpu.py
|
esf-bt2020/mmdetection
|
abc5fe060e0fcb716f845c85441be3741b22d3cf
|
[
"Apache-2.0"
] | null | null | null |
configs/raubtierv2a/faster_rcnn_x101_64x4d_fpn_1x_raubtierv2a_nofreeze_4gpu.py
|
esf-bt2020/mmdetection
|
abc5fe060e0fcb716f845c85441be3741b22d3cf
|
[
"Apache-2.0"
] | null | null | null |
configs/raubtierv2a/faster_rcnn_x101_64x4d_fpn_1x_raubtierv2a_nofreeze_4gpu.py
|
esf-bt2020/mmdetection
|
abc5fe060e0fcb716f845c85441be3741b22d3cf
|
[
"Apache-2.0"
] | null | null | null |
_base_ = '../faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py'
model = dict(
backbone=dict(
num_stages=4,
#frozen_stages=4
),
roi_head=dict(
bbox_head=dict(
num_classes=3
)
)
)
dataset_type = 'COCODataset'
classes = ('luchs', 'rotfuchs', 'wolf')
data = dict(
train=dict(
img_prefix='raubtierv2a/train/',
classes=classes,
ann_file='raubtierv2a/train/_annotations.coco.json'),
val=dict(
img_prefix='raubtierv2a/valid/',
classes=classes,
ann_file='raubtierv2a/valid/_annotations.coco.json'),
test=dict(
img_prefix='raubtierv2a/test/',
classes=classes,
ann_file='raubtierv2a/test/_annotations.coco.json'))
#optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) #original (8x2=16)
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) #(4x2=8) 4 GPUs
#optimizer = dict(type='SGD', lr=0.0025, momentum=0.9, weight_decay=0.0001) #(1x2=2)
total_epochs=24
evaluation = dict(classwise=True, interval=1, metric='bbox')
work_dir = '/media/storage1/projects/WilLiCam/checkpoint_workdir/raubtierv2a/faster_rcnn_x101_64x4d_fpn_1x_raubtierv2a_nofreeze_4gpu'
#http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco/faster_rcnn_x101_64x4d_fpn_1x_coco_20200204-833ee192.pth
load_from = 'checkpoints/faster_rcnn_x101_64x4d_fpn_1x_coco_20200204-833ee192.pth'
| 26.375
| 151
| 0.704807
|
b9e6fcabd0b33c8ba893844382f8413f57f64840
| 262
|
py
|
Python
|
driver/python/setup.py
|
wbaweto/QConf
|
977a53d601eab2055fd8fb344b92f4026d178ad5
|
[
"BSD-2-Clause"
] | 2,056
|
2015-03-23T04:51:13.000Z
|
2022-03-20T11:57:36.000Z
|
driver/python/setup.py
|
xzz0329/QConf
|
f852f984de0b55bbca5bcb433a7be5af6383c449
|
[
"BSD-2-Clause"
] | 116
|
2015-03-25T01:32:39.000Z
|
2022-02-12T03:21:08.000Z
|
driver/python/setup.py
|
xzz0329/QConf
|
f852f984de0b55bbca5bcb433a7be5af6383c449
|
[
"BSD-2-Clause"
] | 634
|
2015-03-24T11:51:22.000Z
|
2022-01-28T04:22:19.000Z
|
from distutils.core import setup, Extension
setup(name = 'qconf_py', version = '1.2.2', ext_modules = [Extension('qconf_py', ['lib/python_qconf.cc'],
include_dirs=['/usr/local/include/qconf'],
extra_objects=['/usr/local/qconf/lib/libqconf.a']
)])
| 43.666667
| 105
| 0.683206
|
b9e707edd4da101ada4ff00b233330f2c2f9843e
| 148
|
py
|
Python
|
abc153/d.py
|
Lockdef/kyopro-code
|
2d943a87987af05122c556e173e5108a0c1c77c8
|
[
"MIT"
] | null | null | null |
abc153/d.py
|
Lockdef/kyopro-code
|
2d943a87987af05122c556e173e5108a0c1c77c8
|
[
"MIT"
] | null | null | null |
abc153/d.py
|
Lockdef/kyopro-code
|
2d943a87987af05122c556e173e5108a0c1c77c8
|
[
"MIT"
] | null | null | null |
h = int(input())
i = 1
a = 1
b = 1
c = 1
while h >= a:
a = 2 ** i
i += 1
s = 0
t = True
for j in range(1, i-1):
c += 2 ** j
print(c)
| 8.705882
| 23
| 0.398649
|
b9e96b262a690da4aaab0bf9584b51a15851826f
| 6,784
|
py
|
Python
|
demos/python/sdk_wireless_camera_control/open_gopro/demos/log_battery.py
|
Natureshadow/OpenGoPro
|
05110123cfbf6584288b813f2d4896d3a091480e
|
[
"MIT"
] | 210
|
2021-06-05T20:06:17.000Z
|
2022-03-31T18:13:17.000Z
|
demos/python/sdk_wireless_camera_control/open_gopro/demos/log_battery.py
|
Natureshadow/OpenGoPro
|
05110123cfbf6584288b813f2d4896d3a091480e
|
[
"MIT"
] | 73
|
2021-06-01T21:22:44.000Z
|
2022-03-31T18:33:24.000Z
|
demos/python/sdk_wireless_camera_control/open_gopro/demos/log_battery.py
|
Natureshadow/OpenGoPro
|
05110123cfbf6584288b813f2d4896d3a091480e
|
[
"MIT"
] | 70
|
2021-06-07T03:59:04.000Z
|
2022-03-26T10:51:15.000Z
|
# log_battery.py/Open GoPro, Version 2.0 (C) Copyright 2021 GoPro, Inc. (http://gopro.com/OpenGoPro).
# This copyright was auto-generated on Wed, Sep 1, 2021 5:05:45 PM
"""Example to continuously read the battery (with no Wifi connection)"""
import csv
import time
import logging
import argparse
import threading
from pathlib import Path
from datetime import datetime
from dataclasses import dataclass
from typing import Optional, Tuple, Literal, List
from rich.console import Console
from open_gopro import GoPro
from open_gopro.constants import StatusId
from open_gopro.util import setup_logging, set_logging_level
logger = logging.getLogger(__name__)
console = Console() # rich consoler printer
BarsType = Literal[0, 1, 2, 3]
SAMPLE_INDEX = 0
SAMPLES: List[Sample] = []
def dump_results_as_csv(location: Path) -> None:
"""Write all of the samples to a csv file
Args:
location (Path): File to write to
"""
console.print(f"Dumping results as CSV to {location}")
with open(location, mode="w") as f:
w = csv.writer(f, delimiter=",", quotechar='"', quoting=csv.QUOTE_MINIMAL)
w.writerow(["index", "time", "percentage", "bars"])
initial_time = SAMPLES[0].time
for s in SAMPLES:
w.writerow([s.index, (s.time - initial_time).seconds, s.percentage, s.bars])
def process_battery_notifications(gopro: GoPro, initial_bars: BarsType, initial_percentage: int) -> None:
"""Separate thread to continuously check for and store battery notifications.
If the CLI parameter was set to poll, this isn't used.
Args:
gopro (GoPro): instance to get updates from
initial_bars (BarsType): Initial bars level when notifications were enabled
initial_percentage (int): Initial percentage when notifications were enabled
"""
last_percentage = initial_percentage
last_bars = initial_bars
while True:
# Block until we receive an update
notification = gopro.get_update()
# Update data points if they have changed
last_percentage = (
notification.data[StatusId.INT_BATT_PER]
if StatusId.INT_BATT_PER in notification.data
else last_percentage
)
last_bars = (
notification.data[StatusId.BATT_LEVEL] if StatusId.BATT_LEVEL in notification.data else last_bars
)
# Append and print sample
global SAMPLE_INDEX
SAMPLES.append(Sample(index=SAMPLE_INDEX, percentage=last_percentage, bars=last_bars))
console.print(str(SAMPLES[-1]))
SAMPLE_INDEX += 1
def main() -> int:
"""Main program functionality
Returns:
int: program return code
"""
identifier, log_location, poll = parse_arguments()
global logger
logger = setup_logging(logger, log_location)
global SAMPLE_INDEX
gopro: Optional[GoPro] = None
return_code = 0
try:
with GoPro(identifier, enable_wifi=False) as gopro:
set_logging_level(logger, logging.ERROR)
# # Setup notifications if we are not polling
if poll is None:
console.print("Configuring battery notifications...")
# Enable notifications of the relevant battery statuses. Also store initial values.
bars = gopro.ble_status.batt_level.register_value_update().flatten
percentage = gopro.ble_status.int_batt_per.register_value_update().flatten
# Start a thread to handle asynchronous battery level notifications
threading.Thread(
target=process_battery_notifications, args=(gopro, bars, percentage), daemon=True
).start()
with console.status("[bold green]Receiving battery notifications until it dies..."):
# Sleep forever, allowing notification handler thread to deal with battery level notifications
while True:
time.sleep(1)
# Otherwise, poll
else:
with console.status("[bold green]Polling the battery until it dies..."):
while True:
SAMPLES.append(
Sample(
index=SAMPLE_INDEX,
percentage=gopro.ble_status.int_batt_per.get_value().flatten,
bars=gopro.ble_status.batt_level.get_value().flatten,
)
)
console.print(str(SAMPLES[-1]))
SAMPLE_INDEX += 1
time.sleep(poll)
except Exception as e: # pylint: disable=broad-except
logger.error(repr(e))
return_code = 1
except KeyboardInterrupt:
logger.warning("Received keyboard interrupt. Shutting down...")
finally:
if len(SAMPLES) > 0:
csv_location = Path(log_location.parent) / "battery_results.csv"
dump_results_as_csv(csv_location)
if gopro is not None:
gopro.close()
console.print("Exiting...")
return return_code # pylint: disable=lost-exception
def parse_arguments() -> Tuple[str, Path, Optional[int]]:
"""Parse command line arguments
Returns:
Tuple[str, Path, Path]: (identifier, path to save log, path to VLC)
"""
parser = argparse.ArgumentParser(
description="Connect to the GoPro via BLE only and continuously read the battery (either by polling or notifications)."
)
parser.add_argument(
"-i",
"--identifier",
type=str,
help="Last 4 digits of GoPro serial number, which is the last 4 digits of the default camera SSID. \
If not used, first discovered GoPro will be connected to",
default=None,
)
parser.add_argument(
"-l",
"--log",
type=Path,
help="Location to store detailed log",
default="log_battery.log",
)
parser.add_argument(
"-p",
"--poll",
type=int,
help="Set to poll the battery at a given interval. If not set, battery level will be notified instead. Defaults to notifications.",
default=None,
)
args = parser.parse_args()
return args.identifier, args.log, args.poll
if __name__ == "__main__":
main()
| 34.969072
| 139
| 0.627358
|
b9ea2a649f07b6a108f30b09b86010ae0b3acd70
| 47
|
py
|
Python
|
tumbleweed/models.py
|
mcroydon/django-tumbleweed
|
3f1eab2bf12350a91ca38165efec0c221a1fe69a
|
[
"BSD-3-Clause"
] | 1
|
2015-11-08T11:33:15.000Z
|
2015-11-08T11:33:15.000Z
|
tumbleweed/models.py
|
mcroydon/django-tumbleweed
|
3f1eab2bf12350a91ca38165efec0c221a1fe69a
|
[
"BSD-3-Clause"
] | null | null | null |
tumbleweed/models.py
|
mcroydon/django-tumbleweed
|
3f1eab2bf12350a91ca38165efec0c221a1fe69a
|
[
"BSD-3-Clause"
] | null | null | null |
# These are not the droids you are looking for.
| 47
| 47
| 0.765957
|
b9ea32c16e86b4071267eb26a711d79f81eaea56
| 2,925
|
py
|
Python
|
xos/hpc_observer/steps/sync_originserver.py
|
wathsalav/xos
|
f6bcaa37a948ee41729236afe7fce0802e002404
|
[
"Apache-2.0"
] | null | null | null |
xos/hpc_observer/steps/sync_originserver.py
|
wathsalav/xos
|
f6bcaa37a948ee41729236afe7fce0802e002404
|
[
"Apache-2.0"
] | null | null | null |
xos/hpc_observer/steps/sync_originserver.py
|
wathsalav/xos
|
f6bcaa37a948ee41729236afe7fce0802e002404
|
[
"Apache-2.0"
] | null | null | null |
import os
import sys
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.syncstep import SyncStep
from core.models import Service
from hpc.models import ServiceProvider, ContentProvider, CDNPrefix, OriginServer
from util.logger import Logger, logging
# hpclibrary will be in steps/..
parentdir = os.path.join(os.path.dirname(__file__),"..")
sys.path.insert(0,parentdir)
from hpclib import HpcLibrary
logger = Logger(level=logging.INFO)
| 34.411765
| 229
| 0.654701
|
b9ea437d66df34d28efcf808ad16c896dadcac76
| 400
|
py
|
Python
|
main.py
|
aroxby/pixel-processor
|
9cfe260a085ced0883ce8b0a35c28020f4aa8737
|
[
"MIT"
] | null | null | null |
main.py
|
aroxby/pixel-processor
|
9cfe260a085ced0883ce8b0a35c28020f4aa8737
|
[
"MIT"
] | null | null | null |
main.py
|
aroxby/pixel-processor
|
9cfe260a085ced0883ce8b0a35c28020f4aa8737
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from PIL import Image
if __name__ == '__main__':
main()
| 17.391304
| 69
| 0.6
|
b9eab80495274dd2446a7b029f17be91df29a452
| 1,539
|
py
|
Python
|
scipy/weave/examples/swig2_example.py
|
lesserwhirls/scipy-cwt
|
ee673656d879d9356892621e23ed0ced3d358621
|
[
"BSD-3-Clause"
] | 8
|
2015-10-07T00:37:32.000Z
|
2022-01-21T17:02:33.000Z
|
scipy/weave/examples/swig2_example.py
|
lesserwhirls/scipy-cwt
|
ee673656d879d9356892621e23ed0ced3d358621
|
[
"BSD-3-Clause"
] | null | null | null |
scipy/weave/examples/swig2_example.py
|
lesserwhirls/scipy-cwt
|
ee673656d879d9356892621e23ed0ced3d358621
|
[
"BSD-3-Clause"
] | 8
|
2015-05-09T14:23:57.000Z
|
2018-11-15T05:56:00.000Z
|
"""Simple example to show how to use weave.inline on SWIG2 wrapped
objects. SWIG2 refers to SWIG versions >= 1.3.
To run this example you must build the trivial SWIG2 extension called
swig2_ext. To do this you need to do something like this::
$ swig -c++ -python -I. -o swig2_ext_wrap.cxx swig2_ext.i
$ g++ -Wall -O2 -I/usr/include/python2.3 -fPIC -I. -c \
-o swig2_ext_wrap.os swig2_ext_wrap.cxx
$ g++ -shared -o _swig2_ext.so swig2_ext_wrap.os \
-L/usr/lib/python2.3/config
The files swig2_ext.i and swig2_ext.h are included in the same
directory that contains this file.
Note that weave's SWIG2 support works fine whether SWIG_COBJECT_TYPES
are used or not.
Author: Prabhu Ramachandran
Copyright (c) 2004, Prabhu Ramachandran
License: BSD Style.
"""
# Import our SWIG2 wrapped library
import swig2_ext
import scipy.weave as weave
from scipy.weave import swig2_spec, converters
# SWIG2 support is not enabled by default. We do this by adding the
# swig2 converter to the default list of converters.
converters.default.insert(0, swig2_spec.swig2_converter())
def test():
"""Instantiate the SWIG wrapped object and then call its method
from C++ using weave.inline
"""
a = swig2_ext.A()
b = swig2_ext.foo() # This will be an APtr instance.
b.thisown = 1 # Prevent memory leaks.
code = """a->f();
b->f();
"""
weave.inline(code, ['a', 'b'], include_dirs=['.'],
headers=['"swig2_ext.h"'], verbose=1)
if __name__ == "__main__":
test()
| 28.5
| 69
| 0.690058
|
b9eba9b75a6e45fee4cdfe3d81874f5e8476b939
| 1,951
|
py
|
Python
|
src/simplify.py
|
denghz/Probabilistic-Programming
|
fa505a75c4558e507fd3effd2737c63537bfe50d
|
[
"BSD-3-Clause"
] | null | null | null |
src/simplify.py
|
denghz/Probabilistic-Programming
|
fa505a75c4558e507fd3effd2737c63537bfe50d
|
[
"BSD-3-Clause"
] | null | null | null |
src/simplify.py
|
denghz/Probabilistic-Programming
|
fa505a75c4558e507fd3effd2737c63537bfe50d
|
[
"BSD-3-Clause"
] | null | null | null |
from wolframclient.language.expression import WLSymbol
from nnDiff import *
if __name__ == "__main__":
exp = sys.argv[1:]
if exp == []:
exp = ["Sin", "x"]
res = map(str,simplify(exp))
print(' '.join(res), file=sys.stderr)
| 27.097222
| 67
| 0.438237
|
b9ebcddd99e456fbeb39a0191aad31656c7f4943
| 856
|
py
|
Python
|
setup.py
|
EdWard680/python-firetv
|
4c02f79a1c8ae60a489297178d010a31545a3b5d
|
[
"MIT"
] | null | null | null |
setup.py
|
EdWard680/python-firetv
|
4c02f79a1c8ae60a489297178d010a31545a3b5d
|
[
"MIT"
] | null | null | null |
setup.py
|
EdWard680/python-firetv
|
4c02f79a1c8ae60a489297178d010a31545a3b5d
|
[
"MIT"
] | null | null | null |
from setuptools import setup
setup(
name='firetv',
version='1.0.7',
description='Communicate with an Amazon Fire TV device via ADB over a network.',
url='https://github.com/happyleavesaoc/python-firetv/',
license='MIT',
author='happyleaves',
author_email='happyleaves.tfr@gmail.com',
packages=['firetv'],
install_requires=['pycryptodome', 'rsa', 'adb-homeassistant', 'pure-python-adb-homeassistant'],
extras_require={
'firetv-server': ['Flask>=0.10.1', 'PyYAML>=3.12']
},
entry_points={
'console_scripts': [
'firetv-server = firetv.__main__:main'
]
},
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3'
]
)
| 30.571429
| 99
| 0.613318
|
b9ec25017a264a5c2dd928342198ca509ad93675
| 893
|
py
|
Python
|
neo/io/exampleio.py
|
Mario-Kart-Felix/python-neo
|
951c97cf9eb56f5489da88940de920329e0f4c1b
|
[
"BSD-3-Clause"
] | 199
|
2015-01-20T13:49:13.000Z
|
2022-03-21T18:35:29.000Z
|
neo/io/exampleio.py
|
Mario-Kart-Felix/python-neo
|
951c97cf9eb56f5489da88940de920329e0f4c1b
|
[
"BSD-3-Clause"
] | 905
|
2015-01-07T09:21:15.000Z
|
2022-03-31T16:29:44.000Z
|
neo/io/exampleio.py
|
Mario-Kart-Felix/python-neo
|
951c97cf9eb56f5489da88940de920329e0f4c1b
|
[
"BSD-3-Clause"
] | 178
|
2015-01-05T12:34:39.000Z
|
2022-02-20T23:06:52.000Z
|
"""
neo.io have been split in 2 level API:
* neo.io: this API give neo object
* neo.rawio: this API give raw data as they are in files.
Developper are encourage to use neo.rawio.
When this is done the neo.io is done automagically with
this king of following code.
Author: sgarcia
"""
from neo.io.basefromrawio import BaseFromRaw
from neo.rawio.examplerawio import ExampleRawIO
| 28.806452
| 93
| 0.724524
|
b9ecb48aece2a2ca161d7bba9b3c95a928b2be7f
| 728
|
py
|
Python
|
scrapyproject/migrations/0003_auto_20170209_1025.py
|
sap9433/Distributed-Multi-User-Scrapy-System-with-a-Web-UI
|
0676f7599f288409d0faf7b6211c171ce8c46a7a
|
[
"MIT"
] | 108
|
2017-03-14T05:40:13.000Z
|
2022-03-03T12:35:49.000Z
|
scrapyproject/migrations/0003_auto_20170209_1025.py
|
sap9433/Distributed-Multi-User-Scrapy-System-with-a-Web-UI
|
0676f7599f288409d0faf7b6211c171ce8c46a7a
|
[
"MIT"
] | 8
|
2017-03-14T05:40:13.000Z
|
2018-10-13T07:07:29.000Z
|
scrapyproject/migrations/0003_auto_20170209_1025.py
|
sap9433/Distributed-Multi-User-Scrapy-System-with-a-Web-UI
|
0676f7599f288409d0faf7b6211c171ce8c46a7a
|
[
"MIT"
] | 43
|
2017-04-19T12:18:07.000Z
|
2021-11-25T09:37:17.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
| 24.266667
| 53
| 0.575549
|
b9eda494aa9f90de7b3474adbd78e46927f9990c
| 406
|
py
|
Python
|
src/cart/forms.py
|
cbsBiram/xarala__ssr
|
863e1362c786daa752b942b796f7a015211d2f1b
|
[
"FSFAP"
] | null | null | null |
src/cart/forms.py
|
cbsBiram/xarala__ssr
|
863e1362c786daa752b942b796f7a015211d2f1b
|
[
"FSFAP"
] | null | null | null |
src/cart/forms.py
|
cbsBiram/xarala__ssr
|
863e1362c786daa752b942b796f7a015211d2f1b
|
[
"FSFAP"
] | null | null | null |
from django import forms
from django.utils.translation import gettext_lazy as _
COURSE_QUANTITY_CHOICES = [(i, str(i)) for i in range(1, 21)]
| 27.066667
| 72
| 0.726601
|
b9eda5f604ec6cf197f2876a0f748c37ee805587
| 73
|
py
|
Python
|
patches/datasets/__init__.py
|
sflippl/patches
|
c19889e676e231af44669a01c61854e9e5791227
|
[
"MIT"
] | null | null | null |
patches/datasets/__init__.py
|
sflippl/patches
|
c19889e676e231af44669a01c61854e9e5791227
|
[
"MIT"
] | null | null | null |
patches/datasets/__init__.py
|
sflippl/patches
|
c19889e676e231af44669a01c61854e9e5791227
|
[
"MIT"
] | null | null | null |
"""Datasets of latent predictability tasks.
"""
from .pilgrimm import *
| 14.6
| 43
| 0.726027
|
b9edd7dbf25e820fdbc6faa76fd63ef5d9d3ec94
| 1,090
|
py
|
Python
|
appengine/components/tests/datastore_utils_properties_test.py
|
pombreda/swarming
|
c70f311f3db8f25752c793a0d7b36cf537d95580
|
[
"Apache-2.0"
] | null | null | null |
appengine/components/tests/datastore_utils_properties_test.py
|
pombreda/swarming
|
c70f311f3db8f25752c793a0d7b36cf537d95580
|
[
"Apache-2.0"
] | null | null | null |
appengine/components/tests/datastore_utils_properties_test.py
|
pombreda/swarming
|
c70f311f3db8f25752c793a0d7b36cf537d95580
|
[
"Apache-2.0"
] | 1
|
2021-12-06T03:37:36.000Z
|
2021-12-06T03:37:36.000Z
|
#!/usr/bin/env python
# Copyright 2014 The Swarming Authors. All rights reserved.
# Use of this source code is governed by the Apache v2.0 license that can be
# found in the LICENSE file.
import sys
import unittest
import test_env
test_env.setup_test_env()
from google.appengine.ext import ndb
from components.datastore_utils import properties
from support import test_case
if __name__ == '__main__':
if '-v' in sys.argv:
unittest.TestCase.maxDiff = None
unittest.main()
| 23.695652
| 76
| 0.713761
|
b9ef242e4a5b9cd66209cacaae0f38bad7d2a39e
| 128,492
|
py
|
Python
|
neutron/tests/unit/services/qos/test_qos_plugin.py
|
dangervon/neutron
|
06ce0c2c94d2256a8f6804a1eacb0733747dcf46
|
[
"Apache-2.0"
] | null | null | null |
neutron/tests/unit/services/qos/test_qos_plugin.py
|
dangervon/neutron
|
06ce0c2c94d2256a8f6804a1eacb0733747dcf46
|
[
"Apache-2.0"
] | null | null | null |
neutron/tests/unit/services/qos/test_qos_plugin.py
|
dangervon/neutron
|
06ce0c2c94d2256a8f6804a1eacb0733747dcf46
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from unittest import mock
from keystoneauth1 import exceptions as ks_exc
import netaddr
from neutron_lib.api.definitions import qos
from neutron_lib.callbacks import events
from neutron_lib import constants as lib_constants
from neutron_lib import context
from neutron_lib import exceptions as lib_exc
from neutron_lib.exceptions import placement as pl_exc
from neutron_lib.exceptions import qos as qos_exc
from neutron_lib.objects import utils as obj_utils
from neutron_lib.plugins import constants as plugins_constants
from neutron_lib.plugins import directory
from neutron_lib.services.qos import constants as qos_consts
from neutron_lib.utils import net as net_utils
import os_resource_classes as orc
from oslo_config import cfg
from oslo_utils import uuidutils
import webob.exc
from neutron.exceptions import qos as neutron_qos_exc
from neutron.extensions import qos_pps_minimum_rule_alias
from neutron.extensions import qos_rules_alias
from neutron import manager
from neutron.objects import network as network_object
from neutron.objects import ports as ports_object
from neutron.objects.qos import policy as policy_object
from neutron.objects.qos import rule as rule_object
from neutron.services.qos import qos_plugin
from neutron.tests.unit.db import test_db_base_plugin_v2
from neutron.tests.unit.services.qos import base
DB_PLUGIN_KLASS = 'neutron.db.db_base_plugin_v2.NeutronDbPluginV2'
SERVICE_PLUGIN_KLASS = 'neutron.services.qos.qos_plugin.QoSPlugin'
class QoSRuleAliasTestExtensionManager(object):
class QoSRuleAliasMinimumPacketRateTestExtensionManager(object):
class TestQoSRuleAlias(test_db_base_plugin_v2.NeutronDbPluginV2TestCase):
def test_show_non_existing_rule(self):
for rule_type, rule_object_class in self.rule_objects.items():
rule_id = uuidutils.generate_uuid()
with mock.patch('neutron.objects.qos.rule.QosRule.get_object',
return_value=None):
resource = '%s/alias-%s-rules' % (qos.ALIAS,
rule_type.replace('_', '-'))
request = self.new_show_request(resource, rule_id, self.fmt)
res = request.get_response(self.ext_api)
self.assertEqual(webob.exc.HTTPNotFound.code, res.status_int)
class TestQoSRuleAliasMinimumPacketRate(TestQoSRuleAlias):
class TestQosPluginDB(base.BaseQosTestCase):
PORT_ID = 'f02f160e-1612-11ec-b2b8-bf60ab98186c'
QOS_MIN_BW_RULE_ID = '8bf8eb46-160e-11ec-8024-9f96be32099d'
# uuid -v5 f02f160e-1612-11ec-b2b8-bf60ab98186c
# 8bf8eb46-160e-11ec-8024-9f96be32099d
MIN_BW_REQUEST_GROUP_UUID = 'c8bc1b27-59a1-5135-aa33-aeecad6093f4'
MIN_BW_RP = 'd7bea120-1626-11ec-9148-c32debfcf0f6'
QOS_MIN_PPS_RULE_ID = '6ac5db7e-1626-11ec-8c7f-0b70dbb8a8eb'
# uuid -v5 f02f160e-1612-11ec-b2b8-bf60ab98186c
# 6ac5db7e-1626-11ec-8c7f-0b70dbb8a8eb
MIN_PPS_REQUEST_GROUP_UUID = '995008f4-f120-547a-b051-428b89076067'
MIN_PPS_RP = 'e16161f4-1626-11ec-a5a2-1fc9396e27cc'
| 45.742969
| 79
| 0.617556
|
b9ef252652f99c5c9feffaab6f06bdbb7fe7dd89
| 953
|
py
|
Python
|
covfefe/covfefe.py
|
fixator10/Trusty-cogs
|
3d47a63f562cb64eb44da6bb53cfe9f8324026e7
|
[
"MIT"
] | 148
|
2017-04-23T19:57:50.000Z
|
2022-03-12T06:59:58.000Z
|
covfefe/covfefe.py
|
mina9999/Trusty-cogs
|
a47de7c233f3c1802effd29f4a86f8a9b0e2b34a
|
[
"MIT"
] | 155
|
2018-01-01T13:27:45.000Z
|
2022-03-12T05:17:51.000Z
|
covfefe/covfefe.py
|
mina9999/Trusty-cogs
|
a47de7c233f3c1802effd29f4a86f8a9b0e2b34a
|
[
"MIT"
] | 221
|
2017-04-02T00:26:08.000Z
|
2022-03-26T15:06:54.000Z
|
import re
import discord
from redbot.core import commands
| 24.435897
| 79
| 0.541448
|
b9ef44f166a7664004d3feffe782db268867e247
| 1,487
|
py
|
Python
|
src/api/bkuser_core/audit/views.py
|
trueware/bk-user
|
8c633e0a3821beb839ed120c4514c5733e675862
|
[
"MIT"
] | null | null | null |
src/api/bkuser_core/audit/views.py
|
trueware/bk-user
|
8c633e0a3821beb839ed120c4514c5733e675862
|
[
"MIT"
] | null | null | null |
src/api/bkuser_core/audit/views.py
|
trueware/bk-user
|
8c633e0a3821beb839ed120c4514c5733e675862
|
[
"MIT"
] | 1
|
2021-12-31T06:48:41.000Z
|
2021-12-31T06:48:41.000Z
|
# -*- coding: utf-8 -*-
"""
TencentBlueKing is pleased to support the open source community by making -(Bk-User) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from bkuser_core.common.viewset import AdvancedListAPIView, AdvancedModelViewSet
from . import serializers as local_serializers
from .models import GeneralLog, LogIn, ResetPassword
| 45.060606
| 115
| 0.799597
|
b9ef4b5c2209cb05949e60eccf8cd9158602e350
| 4,784
|
py
|
Python
|
exp_gqa/test.py
|
ronghanghu/gqa_single_hop_baseline
|
332d342da60dfefd40f2364d60215ed2f191aa2d
|
[
"BSD-2-Clause"
] | 19
|
2019-08-19T18:09:26.000Z
|
2021-08-29T15:58:30.000Z
|
exp_gqa/test.py
|
ronghanghu/gqa_single_hop_baseline
|
332d342da60dfefd40f2364d60215ed2f191aa2d
|
[
"BSD-2-Clause"
] | 1
|
2019-11-24T14:36:29.000Z
|
2019-12-11T08:33:12.000Z
|
exp_gqa/test.py
|
ronghanghu/gqa_single_hop_baseline
|
332d342da60dfefd40f2364d60215ed2f191aa2d
|
[
"BSD-2-Clause"
] | 1
|
2019-10-30T05:55:52.000Z
|
2019-10-30T05:55:52.000Z
|
import os
import numpy as np
import tensorflow as tf
from models_gqa.model import Model
from models_gqa.config import build_cfg_from_argparse
from util.gqa_train.data_reader import DataReader
import json
# Load config
cfg = build_cfg_from_argparse()
# Start session
os.environ["CUDA_VISIBLE_DEVICES"] = str(cfg.GPU_ID)
sess = tf.Session(config=tf.ConfigProto(
gpu_options=tf.GPUOptions(allow_growth=cfg.GPU_MEM_GROWTH)))
# Data files
imdb_file = cfg.IMDB_FILE % cfg.TEST.SPLIT_VQA
scene_graph_file = cfg.SCENE_GRAPH_FILE % \
cfg.TEST.SPLIT_VQA.replace('_balanced', '').replace('_all', '')
data_reader = DataReader(
imdb_file, shuffle=False, one_pass=True, batch_size=cfg.TEST.BATCH_SIZE,
T_encoder=cfg.T_ENCODER,
vocab_question_file=cfg.VOCAB_QUESTION_FILE,
vocab_answer_file=cfg.VOCAB_ANSWER_FILE,
feature_type=cfg.FEAT_TYPE,
spatial_feature_dir=cfg.SPATIAL_FEATURE_DIR,
objects_feature_dir=cfg.OBJECTS_FEATURE_DIR,
objects_max_num=cfg.W_FEAT,
scene_graph_file=scene_graph_file,
vocab_name_file=cfg.VOCAB_NAME_FILE,
vocab_attr_file=cfg.VOCAB_ATTR_FILE,
spatial_pos_enc_dim=cfg.SPATIAL_POS_ENC_DIM,
bbox_tile_num=cfg.BBOX_TILE_NUM)
num_vocab = data_reader.batch_loader.vocab_dict.num_vocab
num_choices = data_reader.batch_loader.answer_dict.num_vocab
# Inputs and model
input_seq_batch = tf.placeholder(tf.int32, [None, None])
seq_length_batch = tf.placeholder(tf.int32, [None])
image_feat_batch = tf.placeholder(
tf.float32, [None, cfg.H_FEAT, cfg.W_FEAT, cfg.D_FEAT])
image_valid_batch = tf.placeholder(
tf.float32, [None, cfg.H_FEAT, cfg.W_FEAT])
model = Model(
input_seq_batch, seq_length_batch, image_feat_batch, image_valid_batch,
num_vocab=num_vocab, num_choices=num_choices, is_training=False)
# Load snapshot
if cfg.TEST.USE_EMA:
ema = tf.train.ExponentialMovingAverage(decay=0.9) # decay doesn't matter
var_names = {
(ema.average_name(v) if v in model.params else v.op.name): v
for v in tf.global_variables()}
else:
var_names = {v.op.name: v for v in tf.global_variables()}
snapshot_file = cfg.TEST.SNAPSHOT_FILE % (cfg.EXP_NAME, cfg.TEST.ITER)
print('loading model snapshot from %s' % snapshot_file)
snapshot_saver = tf.train.Saver(var_names)
snapshot_saver.restore(sess, snapshot_file)
print('Done')
# Write results
result_dir = cfg.TEST.RESULT_DIR % (cfg.EXP_NAME, cfg.TEST.ITER)
os.makedirs(result_dir, exist_ok=True)
# Run test
answer_correct, num_questions = 0, 0
if cfg.TEST.OUTPUT_VQA_EVAL_PRED:
output_predictions = []
answer_word_list = data_reader.batch_loader.answer_dict.word_list
pred_file = os.path.join(
result_dir, 'gqa_eval_preds_%s_%s_%08d.json' % (
cfg.TEST.SPLIT_VQA, cfg.EXP_NAME, cfg.TEST.ITER))
for n_batch, batch in enumerate(data_reader.batches()):
if 'answer_label_batch' not in batch:
batch['answer_label_batch'] = -np.ones(
len(batch['qid_list']), np.int32)
if num_questions == 0:
print('imdb has no answer labels. Using dummy labels.\n\n'
'**The final accuracy will be zero (no labels provided)**\n')
vqa_scores_value = sess.run(model.vqa_scores, feed_dict={
input_seq_batch: batch['input_seq_batch'],
seq_length_batch: batch['seq_length_batch'],
image_feat_batch: batch['image_feat_batch'],
image_valid_batch: batch['image_valid_batch']})
# compute accuracy
vqa_labels = batch['answer_label_batch']
vqa_predictions = np.argmax(vqa_scores_value, axis=1)
answer_correct += np.sum(vqa_predictions == vqa_labels)
num_questions += len(vqa_labels)
accuracy = answer_correct / num_questions
if n_batch % 20 == 0:
print('exp: %s, iter = %d, accumulated accuracy on %s = %f (%d / %d)' %
(cfg.EXP_NAME, cfg.TEST.ITER, cfg.TEST.SPLIT_VQA,
accuracy, answer_correct, num_questions))
if cfg.TEST.OUTPUT_VQA_EVAL_PRED:
output_predictions.extend([
{"questionId": qId, "prediction": answer_word_list[p]}
for qId, p in zip(batch['qid_list'], vqa_predictions)])
with open(os.path.join(
result_dir, 'vqa_results_%s.txt' % cfg.TEST.SPLIT_VQA), 'w') as f:
print('\nexp: %s, iter = %d, final accuracy on %s = %f (%d / %d)' %
(cfg.EXP_NAME, cfg.TEST.ITER, cfg.TEST.SPLIT_VQA,
accuracy, answer_correct, num_questions))
print('exp: %s, iter = %d, final accuracy on %s = %f (%d / %d)' %
(cfg.EXP_NAME, cfg.TEST.ITER, cfg.TEST.SPLIT_VQA,
accuracy, answer_correct, num_questions), file=f)
if cfg.TEST.OUTPUT_VQA_EVAL_PRED:
with open(pred_file, 'w') as f:
json.dump(output_predictions, f, indent=2)
print('prediction file written to %s' % pred_file)
| 40.201681
| 79
| 0.713002
|
b9f15c3b93c89d0226f4f6b8fd7503987d856e88
| 542
|
py
|
Python
|
gamla/url_utils_test.py
|
hyroai/gamla
|
bfa05807685bd51cba7c4c9cc47f1f5e73e6f7ee
|
[
"MIT"
] | 17
|
2020-03-19T08:40:39.000Z
|
2022-03-06T14:43:35.000Z
|
gamla/url_utils_test.py
|
hyroai/gamla
|
bfa05807685bd51cba7c4c9cc47f1f5e73e6f7ee
|
[
"MIT"
] | 39
|
2020-04-13T16:52:43.000Z
|
2022-03-24T08:30:49.000Z
|
gamla/url_utils_test.py
|
uriva/gamla
|
65b450a7761cbc13bdbc7a4216003932f18cf433
|
[
"MIT"
] | 2
|
2019-11-13T15:13:51.000Z
|
2019-12-10T15:19:04.000Z
|
from gamla import url_utils
| 24.636364
| 67
| 0.586716
|
b9f25a250dce61318cad2d5bfa0bebb70f70d2dc
| 1,071
|
py
|
Python
|
examples/temp_feie_shetland.py
|
nilsmkMET/roppy
|
c68d698fa4970174af2c7f7137bd4a3e5983b644
|
[
"MIT"
] | null | null | null |
examples/temp_feie_shetland.py
|
nilsmkMET/roppy
|
c68d698fa4970174af2c7f7137bd4a3e5983b644
|
[
"MIT"
] | null | null | null |
examples/temp_feie_shetland.py
|
nilsmkMET/roppy
|
c68d698fa4970174af2c7f7137bd4a3e5983b644
|
[
"MIT"
] | null | null | null |
import numpy as np
from netCDF4 import Dataset
# Import development version of roppy
import sys
sys.path = ['..'] + sys.path
import roppy
# --- EDIT -----------------
# ROMS file
romsfile = 'data/ocean_avg_example.nc'
# Section definition
lon0, lat0 = -0.67, 60.75 # Shetland
lon1, lat1 = 4.72, 60.75 # Feie
# --- EDIT ------------------
# Make a grid object
f = Dataset(romsfile)
grd = roppy.SGrid(f)
# Get grid coordinates of end points
x0, y0 = grd.ll2xy(lon0, lat0)
x1, y1 = grd.ll2xy(lon1, lat1)
# Find nearest rho-points
i0, j0, i1, j1 = [int(round(v)) for v in x0, y0, x1, y1]
# Make a Section object
sec = roppy.linear_section(i0, i1, j0, j1, grd)
# Read in a 3D temperature field
temp = f.variables['temp'][0,:,:,:]
# Interpolate to the section
temp_sec = sec.sample3D(temp)
# Compute mean temperature along section
# using trapezoidal integration
print "mean tempeature = ", np.sum(sec.Area * temp_sec) / np.sum(sec.Area)
# TODO: Make a mean method in the Section class
# Usage: sec.mean(temp_sec)
# or even directly from 3D: sec.mean(temp)
| 22.787234
| 74
| 0.6676
|
b9f401385afbe018601c2bef20e53c9b587fb7df
| 485
|
py
|
Python
|
examples/test_scalar_field.py
|
gemini3d/pv-gemini
|
99dff15b43a2c93cbcb63d2f8946d425d0555ef3
|
[
"Apache-2.0"
] | null | null | null |
examples/test_scalar_field.py
|
gemini3d/pv-gemini
|
99dff15b43a2c93cbcb63d2f8946d425d0555ef3
|
[
"Apache-2.0"
] | null | null | null |
examples/test_scalar_field.py
|
gemini3d/pv-gemini
|
99dff15b43a2c93cbcb63d2f8946d425d0555ef3
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
"""
example of 3D scalar field
If you get this error, ParaView doesn't know your data file format:
TypeError: TestFileReadability argument %Id: %V
"""
from pathlib import Path
import argparse
import paraview.simple as pvs
p = argparse.ArgumentParser()
p.add_argument("fn", help="data file to load with paraview OpenDataFile()")
P = p.parse_args()
fn = Path(P.fn).expanduser()
if not fn.is_file():
raise FileNotFoundError(fn)
pvs.OpenDataFile(str(fn))
| 20.208333
| 75
| 0.740206
|
b9f4182f4b0683cbf4f51c72cef042f5acb55553
| 341
|
py
|
Python
|
src/cms/forms/languages/language_form.py
|
S10MC2015/cms-django
|
b08f2be60a9db6c8079ee923de2cd8912f550b12
|
[
"Apache-2.0"
] | null | null | null |
src/cms/forms/languages/language_form.py
|
S10MC2015/cms-django
|
b08f2be60a9db6c8079ee923de2cd8912f550b12
|
[
"Apache-2.0"
] | null | null | null |
src/cms/forms/languages/language_form.py
|
S10MC2015/cms-django
|
b08f2be60a9db6c8079ee923de2cd8912f550b12
|
[
"Apache-2.0"
] | null | null | null |
from django import forms
from ...models import Language
| 17.947368
| 52
| 0.548387
|
b9f437d2e63f9838da4ffa0491804e95e149a773
| 1,482
|
py
|
Python
|
search/forms.py
|
gregneagle/sal
|
74c583fb1c1b33d3201b308b147376b3dcaca33f
|
[
"Apache-2.0"
] | 2
|
2019-11-01T20:50:35.000Z
|
2021-01-13T22:02:55.000Z
|
search/forms.py
|
gregneagle/sal
|
74c583fb1c1b33d3201b308b147376b3dcaca33f
|
[
"Apache-2.0"
] | null | null | null |
search/forms.py
|
gregneagle/sal
|
74c583fb1c1b33d3201b308b147376b3dcaca33f
|
[
"Apache-2.0"
] | null | null | null |
from django import forms
from .models import *
from server.models import *
| 27.962264
| 87
| 0.609312
|
b9f59c6c5e552b8bde064c8fa9f25427a65b2006
| 158,531
|
py
|
Python
|
pysnmp-with-texts/InternetThruway-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 8
|
2019-05-09T17:04:00.000Z
|
2021-06-09T06:50:51.000Z
|
pysnmp-with-texts/InternetThruway-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 4
|
2019-05-31T16:42:59.000Z
|
2020-01-31T21:57:17.000Z
|
pysnmp-with-texts/InternetThruway-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module InternetThruway-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/InternetThruway-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:58:27 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsUnion, ValueRangeConstraint, ConstraintsIntersection, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ConstraintsIntersection", "SingleValueConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, ObjectIdentity, Counter64, Gauge32, NotificationType, Bits, NotificationType, MibIdentifier, TimeTicks, enterprises, ModuleIdentity, iso, Integer32, Unsigned32, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "ObjectIdentity", "Counter64", "Gauge32", "NotificationType", "Bits", "NotificationType", "MibIdentifier", "TimeTicks", "enterprises", "ModuleIdentity", "iso", "Integer32", "Unsigned32", "Counter32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
nortel = MibIdentifier((1, 3, 6, 1, 4, 1, 562))
dialaccess = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 14))
csg = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 14, 2))
system = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 14, 2, 1))
components = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 14, 2, 2))
traps = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 14, 2, 3))
alarms = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 14, 2, 4))
ncServer = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 14, 2, 5))
ss7 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 14, 2, 6))
omData = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 14, 2, 7))
disk = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 14, 2, 1, 1))
linkOMs = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 1))
maintenanceOMs = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 2))
callOMs = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3))
trunkGroupOMs = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4))
phoneNumberOMs = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 5))
systemOMs = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6))
nasOMs = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7))
partitionTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 1, 1, 1), )
if mibBuilder.loadTexts: partitionTable.setStatus('mandatory')
if mibBuilder.loadTexts: partitionTable.setDescription('The PartitionTable contains information about each disk partition on the CSG')
partitionTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 1, 1, 1, 1), ).setIndexNames((0, "InternetThruway-MIB", "partitionIndex"))
if mibBuilder.loadTexts: partitionTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: partitionTableEntry.setDescription('An entry in the PartitionTable. Indexed by partitionIndex')
partitionIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 1, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4)))
if mibBuilder.loadTexts: partitionIndex.setStatus('mandatory')
if mibBuilder.loadTexts: partitionIndex.setDescription('Identifies partition number.')
partitionName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 1, 1, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: partitionName.setStatus('mandatory')
if mibBuilder.loadTexts: partitionName.setDescription('Identifies partition name.')
partitionPercentFull = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 1, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: partitionPercentFull.setStatus('mandatory')
if mibBuilder.loadTexts: partitionPercentFull.setDescription('Indicates (in Percent) how full the disk is.')
partitionMegsFree = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 1, 1, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: partitionMegsFree.setStatus('mandatory')
if mibBuilder.loadTexts: partitionMegsFree.setDescription('Indicates how many Megabytes are free on the partition.')
partitionSpaceStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 1, 1, 1, 1, 5), PartitionSpaceStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: partitionSpaceStatus.setStatus('mandatory')
if mibBuilder.loadTexts: partitionSpaceStatus.setDescription('Indicates if there is currently a space alarm in progress.')
partitionSpaceKey = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 1, 1, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: partitionSpaceKey.setStatus('mandatory')
if mibBuilder.loadTexts: partitionSpaceKey.setDescription('Unique indicator for the partition space alarm.')
partitionSpaceTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 1, 1, 1, 1, 7), TimeString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: partitionSpaceTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: partitionSpaceTimeStamp.setDescription('Indicates the time of the last partitionSpaceStatus transition.')
componentTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 2, 10), )
if mibBuilder.loadTexts: componentTable.setStatus('mandatory')
if mibBuilder.loadTexts: componentTable.setDescription('The ComponentTable contains information about all the Components that should be running on the CSG.')
componentTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 2, 10, 1), ).setIndexNames((0, "InternetThruway-MIB", "componentIndex"))
if mibBuilder.loadTexts: componentTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: componentTableEntry.setDescription('An entry in the ComponentTable. componentTable entries are indexed by componentIndex, which is an integer. ')
componentIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 2, 10, 1, 1), ComponentIndex())
if mibBuilder.loadTexts: componentIndex.setStatus('mandatory')
if mibBuilder.loadTexts: componentIndex.setDescription('Identifies the component entry with an enumerated list.')
componentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 2, 10, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: componentName.setStatus('mandatory')
if mibBuilder.loadTexts: componentName.setDescription('Identifies component name.')
compSecsInCurrentState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 2, 10, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: compSecsInCurrentState.setStatus('mandatory')
if mibBuilder.loadTexts: compSecsInCurrentState.setDescription('Indicates how many seconds a component has been running in its current state. ')
compProvStateStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 2, 10, 1, 4), ComponentSysmanState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: compProvStateStatus.setStatus('mandatory')
if mibBuilder.loadTexts: compProvStateStatus.setDescription('Indicates the current state of the particular CSG component. The states are one of the following: inProvisionedState(1), notInProvisionedState(2), unknown(3)')
compProvStateKey = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 2, 10, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: compProvStateKey.setStatus('mandatory')
if mibBuilder.loadTexts: compProvStateKey.setDescription('Unique indicator for the prov state alarm.')
compProvStateTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 2, 10, 1, 6), TimeString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: compProvStateTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: compProvStateTimeStamp.setDescription('Indicates the time of the last state transition.')
compDebugStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 2, 10, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: compDebugStatus.setStatus('mandatory')
if mibBuilder.loadTexts: compDebugStatus.setDescription('Shows if the component is running with debug on.')
compDebugKey = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 2, 10, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: compDebugKey.setStatus('mandatory')
if mibBuilder.loadTexts: compDebugKey.setDescription('Unique indicator for the debug state.')
compDebugTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 2, 10, 1, 9), TimeString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: compDebugTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: compDebugTimeStamp.setDescription('Indicates the time of the last debug transition.')
compRestartStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 2, 10, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: compRestartStatus.setStatus('mandatory')
if mibBuilder.loadTexts: compRestartStatus.setDescription('Shows if the component has had multiple restarts recently.')
compRestartKey = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 2, 10, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: compRestartKey.setStatus('mandatory')
if mibBuilder.loadTexts: compRestartKey.setDescription('Unique indicator for the multi-restart of components.')
compRestartTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 2, 10, 1, 12), TimeString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: compRestartTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: compRestartTimeStamp.setDescription('Indicates the time of the last restart flagging.')
linksetTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 1), )
if mibBuilder.loadTexts: linksetTable.setStatus('mandatory')
if mibBuilder.loadTexts: linksetTable.setDescription('The linksetTable contains information about all the linksets on the CSG.')
linksetTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 1, 1), ).setIndexNames((0, "InternetThruway-MIB", "linksetIndex"))
if mibBuilder.loadTexts: linksetTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: linksetTableEntry.setDescription('An entry in the linksetTable. Entries in the linkset table are indexed by linksetIndex, which is an integer.')
linksetIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: linksetIndex.setStatus('mandatory')
if mibBuilder.loadTexts: linksetIndex.setDescription("Identifies the n'th position in the table.")
linksetId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linksetId.setStatus('mandatory')
if mibBuilder.loadTexts: linksetId.setDescription('The id of the linkset to be used as index.')
linksetAdjPointcode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linksetAdjPointcode.setStatus('mandatory')
if mibBuilder.loadTexts: linksetAdjPointcode.setDescription('The adjacent pointcode of the linkset.')
linksetState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 1, 1, 4), LinksetState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linksetState.setStatus('mandatory')
if mibBuilder.loadTexts: linksetState.setDescription('The state of the linkset.')
linkTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 2), )
if mibBuilder.loadTexts: linkTable.setStatus('mandatory')
if mibBuilder.loadTexts: linkTable.setDescription('The linkTable contains information about the links in a given linkset on the CSG.')
linkTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 2, 1), ).setIndexNames((0, "InternetThruway-MIB", "linksetIndex"), (0, "InternetThruway-MIB", "linkIndex"))
if mibBuilder.loadTexts: linkTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: linkTableEntry.setDescription('An entry in the linkTable. Entries in the link table table are indexed by linksetIndex and linkIndex, which are both integers.')
linkIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: linkIndex.setStatus('mandatory')
if mibBuilder.loadTexts: linkIndex.setDescription("Identifies the n'th position in the table.")
linkId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkId.setStatus('mandatory')
if mibBuilder.loadTexts: linkId.setDescription('The id of the link.')
linkHostname = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 2, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkHostname.setStatus('mandatory')
if mibBuilder.loadTexts: linkHostname.setDescription('The hostname of the CSG to which this link is attached.')
linkCardDeviceName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 2, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkCardDeviceName.setStatus('mandatory')
if mibBuilder.loadTexts: linkCardDeviceName.setDescription('The device name of the card upon which this link is hosted.')
linkState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 2, 1, 5), LinkState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkState.setStatus('mandatory')
if mibBuilder.loadTexts: linkState.setDescription('The state of the link.')
linkInhibitionState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 2, 1, 6), LinkInhibitionState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkInhibitionState.setStatus('mandatory')
if mibBuilder.loadTexts: linkInhibitionState.setDescription('The inhibition status of the link.')
linkCongestionState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 2, 1, 7), LinkCongestionState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkCongestionState.setStatus('mandatory')
if mibBuilder.loadTexts: linkCongestionState.setDescription('The congestion status of the link.')
linkAlignmentState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 2, 1, 8), LinkAlignmentState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkAlignmentState.setStatus('mandatory')
if mibBuilder.loadTexts: linkAlignmentState.setDescription('The alignment status of the link.')
linkNumMSUReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 2, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkNumMSUReceived.setStatus('mandatory')
if mibBuilder.loadTexts: linkNumMSUReceived.setDescription("This object supplies the number of MSU's received by the link.")
linkNumMSUDiscarded = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 2, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkNumMSUDiscarded.setStatus('mandatory')
if mibBuilder.loadTexts: linkNumMSUDiscarded.setDescription("This object supplies the number of received MSU's discarded by the link.")
linkNumMSUTransmitted = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 2, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkNumMSUTransmitted.setStatus('mandatory')
if mibBuilder.loadTexts: linkNumMSUTransmitted.setDescription("This object supplies the number of MSU's transmitted by the link.")
linkNumSIFReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 2, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkNumSIFReceived.setStatus('mandatory')
if mibBuilder.loadTexts: linkNumSIFReceived.setDescription('This object supplies the number of SIF and SIO octets received by the link.')
linkNumSIFTransmitted = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 2, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkNumSIFTransmitted.setStatus('mandatory')
if mibBuilder.loadTexts: linkNumSIFTransmitted.setDescription('This object supplies the number of SIF and SIO octects transmitted by the link.')
linkNumAutoChangeovers = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 2, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkNumAutoChangeovers.setStatus('mandatory')
if mibBuilder.loadTexts: linkNumAutoChangeovers.setDescription('This object supplies the number of automatic changeovers undergone by the link.')
linkNumUnexpectedMsgs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 2, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkNumUnexpectedMsgs.setStatus('mandatory')
if mibBuilder.loadTexts: linkNumUnexpectedMsgs.setDescription('This object supplies the number of unexpected messages received by the link.')
routeTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 3), )
if mibBuilder.loadTexts: routeTable.setStatus('mandatory')
if mibBuilder.loadTexts: routeTable.setDescription('The routeTable contains information about the routes provisioned in the CSG complex.')
routeTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 3, 1), ).setIndexNames((0, "InternetThruway-MIB", "routeIndex"))
if mibBuilder.loadTexts: routeTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: routeTableEntry.setDescription('An entry in the routeTable. Entries in the route table are indexed by routeIndex, which is an integer.')
routeIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 3, 1, 1), Integer32())
if mibBuilder.loadTexts: routeIndex.setStatus('mandatory')
if mibBuilder.loadTexts: routeIndex.setDescription("Identifies the n'th position in the table.")
routeId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 3, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: routeId.setStatus('mandatory')
if mibBuilder.loadTexts: routeId.setDescription('The unique identifier of the route.')
routeDestPointCode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 3, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: routeDestPointCode.setStatus('mandatory')
if mibBuilder.loadTexts: routeDestPointCode.setDescription('The destination point code associated with this route.')
routeState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 3, 1, 4), RouteState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: routeState.setStatus('mandatory')
if mibBuilder.loadTexts: routeState.setDescription('The current state of the route.')
routeRank = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 3, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: routeRank.setStatus('mandatory')
if mibBuilder.loadTexts: routeRank.setDescription('Rank assigned to this route.')
routeLinksetId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 3, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: routeLinksetId.setStatus('mandatory')
if mibBuilder.loadTexts: routeLinksetId.setDescription('The linkset associated with this route.')
destinationTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 4), )
if mibBuilder.loadTexts: destinationTable.setStatus('mandatory')
if mibBuilder.loadTexts: destinationTable.setDescription('The destinationTable contains information about the destinations provisioned in the CSG complex.')
destinationTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 4, 1), ).setIndexNames((0, "InternetThruway-MIB", "destIndex"))
if mibBuilder.loadTexts: destinationTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: destinationTableEntry.setDescription('An entry in the destinationTable. Entries in the destination table are indexed by destIndex, which is an integer.')
destIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 4, 1, 1), Integer32())
if mibBuilder.loadTexts: destIndex.setStatus('mandatory')
if mibBuilder.loadTexts: destIndex.setDescription("Identifies the n'th position in the table.")
destPointCode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 4, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: destPointCode.setStatus('mandatory')
if mibBuilder.loadTexts: destPointCode.setDescription('The destination point code of this destination.')
destState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 4, 1, 3), DestinationState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: destState.setStatus('mandatory')
if mibBuilder.loadTexts: destState.setDescription('The current state of the destination.')
destRuleId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 6, 4, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: destRuleId.setStatus('mandatory')
if mibBuilder.loadTexts: destRuleId.setDescription('Rule Identifier (for the routing table to be used) for this destination.')
ncServerId = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ncServerId.setStatus('mandatory')
if mibBuilder.loadTexts: ncServerId.setDescription(' The ServerId attribute value of the node.')
ncServerName = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ncServerName.setStatus('mandatory')
if mibBuilder.loadTexts: ncServerName.setDescription(' The ServerName attribute value of the node.')
ncHostName = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ncHostName.setStatus('mandatory')
if mibBuilder.loadTexts: ncHostName.setDescription(' The HostName attribute value of the node.')
ncEthernetName = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ncEthernetName.setStatus('mandatory')
if mibBuilder.loadTexts: ncEthernetName.setDescription(' The EthernetName attribute value of the node.')
ncEthernetIP = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 6), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ncEthernetIP.setStatus('mandatory')
if mibBuilder.loadTexts: ncEthernetIP.setDescription(' The EthernetIP attribute value of the node.')
ncClusterName = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ncClusterName.setStatus('mandatory')
if mibBuilder.loadTexts: ncClusterName.setDescription(' The ClusterName attribute value of the node.')
ncClusterIP = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 8), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ncClusterIP.setStatus('mandatory')
if mibBuilder.loadTexts: ncClusterIP.setDescription(' The ClusterIP attribute value of the node.')
ncOperationalState = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ncOperationalState.setStatus('mandatory')
if mibBuilder.loadTexts: ncOperationalState.setDescription(' The OperationalState of the node. Possible values are: UNKNOWN, ENABLED, ENABLED_NETDSC, ENABLED_NETPAR, DISABLED ')
ncStandbyState = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 10), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ncStandbyState.setStatus('mandatory')
if mibBuilder.loadTexts: ncStandbyState.setDescription(' The StandbyState attribute value of the node. Possible values are: UNKNOWN, HOT_STANDBY, COLD_STANDBY, WARM_STANDBY, PROVIDING_SERVICE ')
ncAvailabilityState = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 11), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ncAvailabilityState.setStatus('mandatory')
if mibBuilder.loadTexts: ncAvailabilityState.setDescription(' The AvailabilityState attribute value of the node. Possible values are: UNKNOWN, AVAILABLE, DEGRADED, OFFLINE ')
ncSoftwareVersion = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 12), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ncSoftwareVersion.setStatus('mandatory')
if mibBuilder.loadTexts: ncSoftwareVersion.setDescription(' The SoftwareVersion attribute value of the node.')
ncUpgradeInProgress = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 13), UpgradeInProgress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ncUpgradeInProgress.setStatus('mandatory')
if mibBuilder.loadTexts: ncUpgradeInProgress.setDescription(' The UpgradeInProgress attribute value of the node. Possible values are: 0 = UNKNOWN, 1 = ACTIVE, 2 = INACTIVE ')
hgAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 10), )
if mibBuilder.loadTexts: hgAlarmTable.setStatus('mandatory')
if mibBuilder.loadTexts: hgAlarmTable.setDescription('The HgAlarmTable contains information about all the current HG alarms')
hgAlarmTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 10, 1), ).setIndexNames((0, "InternetThruway-MIB", "hgIndex"))
if mibBuilder.loadTexts: hgAlarmTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hgAlarmTableEntry.setDescription('An entry in the HgAlarmTable. HgAlarmTable entries are indexed by componentIndex, which is an integer.')
hgIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 10, 1, 1), Integer32())
if mibBuilder.loadTexts: hgIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hgIndex.setDescription("Identifies the n'th position in the table.")
hgName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 10, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hgName.setStatus('mandatory')
if mibBuilder.loadTexts: hgName.setDescription('The Home gateway to be used as index')
hgKey = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 10, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hgKey.setStatus('mandatory')
if mibBuilder.loadTexts: hgKey.setDescription('Unique identifier for the HgFailure alarm ')
hgAlarmTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 10, 1, 4), TimeString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hgAlarmTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: hgAlarmTimeStamp.setDescription('Indicates the time of the HG Alarm.')
hgIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 10, 1, 5), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hgIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: hgIPAddress.setDescription('This object identifies the IP Address of the machine which sent the alarm.')
nasAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 11), )
if mibBuilder.loadTexts: nasAlarmTable.setStatus('mandatory')
if mibBuilder.loadTexts: nasAlarmTable.setDescription('The NasAlarmTable contains information about all the current NAS alarms')
nasAlarmTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 11, 1), ).setIndexNames((0, "InternetThruway-MIB", "nasIndex"))
if mibBuilder.loadTexts: nasAlarmTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nasAlarmTableEntry.setDescription('An entry in the NasAlarmTable. NasAlarmTable entries are indexed by nasIndex, which is an integer.')
nasIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 11, 1, 1), Integer32())
if mibBuilder.loadTexts: nasIndex.setStatus('mandatory')
if mibBuilder.loadTexts: nasIndex.setDescription("Identifies the n'th position in the table.")
nasName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 11, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasName.setStatus('mandatory')
if mibBuilder.loadTexts: nasName.setDescription('The NAS Name')
nasKey = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 11, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasKey.setStatus('mandatory')
if mibBuilder.loadTexts: nasKey.setDescription('Unique identifier for the NAS Failure alarm ')
nasAlarmTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 11, 1, 4), TimeString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasAlarmTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: nasAlarmTimeStamp.setDescription('Indicates the time of the NAS Alarm.')
nasIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 11, 1, 5), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: nasIPAddress.setDescription('This object identifies the IP Address of the machine which sent the alarm.')
nasCmplxName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 11, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasCmplxName.setStatus('mandatory')
if mibBuilder.loadTexts: nasCmplxName.setDescription(' The complex which this alarm is raised against.')
ss7LinkFailureAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 12), )
if mibBuilder.loadTexts: ss7LinkFailureAlarmTable.setStatus('mandatory')
if mibBuilder.loadTexts: ss7LinkFailureAlarmTable.setDescription('The SS7LinkFailureAlarmTable contains alarms for SS7 link failures.')
ss7LinkFailureAlarmTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 12, 1), ).setIndexNames((0, "InternetThruway-MIB", "lfIndex"))
if mibBuilder.loadTexts: ss7LinkFailureAlarmTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ss7LinkFailureAlarmTableEntry.setDescription('This object defines a row within the SS7 Link Failure Alarm Table. A row can be uniquely identified with the row index.')
lfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 12, 1, 1), Integer32())
if mibBuilder.loadTexts: lfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: lfIndex.setDescription('Identifies the row number in the table.')
lfKey = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 12, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lfKey.setStatus('mandatory')
if mibBuilder.loadTexts: lfKey.setDescription('Unique identifier for the alarm.')
lfIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 12, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lfIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: lfIPAddress.setDescription('This object identifies the IP Address of the machine which sent the alarm.')
lfLinkCode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 12, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lfLinkCode.setStatus('mandatory')
if mibBuilder.loadTexts: lfLinkCode.setDescription('This object identifies the signalling link code (SLC) of the failed link.')
lfTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 12, 1, 6), TimeString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lfTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: lfTimeStamp.setDescription('Indicates the time of the alarm.')
lfName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 12, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lfName.setStatus('mandatory')
if mibBuilder.loadTexts: lfName.setDescription('Indicates the configured name for the machine which sent the alarm.')
lfCardId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 12, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lfCardId.setStatus('mandatory')
if mibBuilder.loadTexts: lfCardId.setDescription('This object identifies the device that hosts the failed link. It provides a physical description of the device, as well as its slot number.')
lfLinkSet = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 12, 1, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lfLinkSet.setStatus('mandatory')
if mibBuilder.loadTexts: lfLinkSet.setDescription('This object identifies the linkset associated with the link via its adjacent point code.')
ss7LinkCongestionAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 13), )
if mibBuilder.loadTexts: ss7LinkCongestionAlarmTable.setStatus('mandatory')
if mibBuilder.loadTexts: ss7LinkCongestionAlarmTable.setDescription('The SS7LinkCongestionAlarmTable contains alarms to indicate congestion on an SS7 link.')
ss7LinkCongestionAlarmTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 13, 1), ).setIndexNames((0, "InternetThruway-MIB", "lcIndex"))
if mibBuilder.loadTexts: ss7LinkCongestionAlarmTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ss7LinkCongestionAlarmTableEntry.setDescription('This object defines a row within the SS7 Link Congestion Alarm Table. A row can be uniquely identified with the row index.')
lcIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 13, 1, 1), Integer32())
if mibBuilder.loadTexts: lcIndex.setStatus('mandatory')
if mibBuilder.loadTexts: lcIndex.setDescription('Identifies the row number in the table.')
lcKey = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 13, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lcKey.setStatus('mandatory')
if mibBuilder.loadTexts: lcKey.setDescription('Unique identifier for the alarm.')
lcIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 13, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lcIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: lcIPAddress.setDescription('This object identifies the IP Address of the machine which sent the alarm.')
lcLinkCode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 13, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lcLinkCode.setStatus('mandatory')
if mibBuilder.loadTexts: lcLinkCode.setDescription('This object identifies the signalling link code (SLC) of the affected link.')
lcTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 13, 1, 5), TimeString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lcTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: lcTimeStamp.setDescription('Indicates the time of the alarm.')
lcName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 13, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lcName.setStatus('mandatory')
if mibBuilder.loadTexts: lcName.setDescription('Indicates the configured name for the machine which sent the alarm.')
lcCardId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 13, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lcCardId.setStatus('mandatory')
if mibBuilder.loadTexts: lcCardId.setDescription('This object identifies the device that hosts the failed link. It provides a physical description of the device, as well as its slot number.')
lcLinkSet = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 13, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lcLinkSet.setStatus('mandatory')
if mibBuilder.loadTexts: lcLinkSet.setDescription('This object identifies the linkset associated with the link via its adjacent point code.')
ss7ISUPFailureAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 14), )
if mibBuilder.loadTexts: ss7ISUPFailureAlarmTable.setStatus('mandatory')
if mibBuilder.loadTexts: ss7ISUPFailureAlarmTable.setDescription('The SS7ISUPFailureAlarmTable contains alarms for SS7 ISUP protocol stack failures.')
ss7ISUPFailureAlarmTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 14, 1), ).setIndexNames((0, "InternetThruway-MIB", "ifIndex"))
if mibBuilder.loadTexts: ss7ISUPFailureAlarmTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ss7ISUPFailureAlarmTableEntry.setDescription('This object defines a row within the SS7 ISUP Failure Alarm Table. A row can be uniquely identified with the row index.')
ifIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 14, 1, 1), Integer32())
if mibBuilder.loadTexts: ifIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ifIndex.setDescription('Identifies the row number in the table.')
ifKey = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 14, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ifKey.setStatus('mandatory')
if mibBuilder.loadTexts: ifKey.setDescription('Unique identifier for the alarm.')
ifIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 14, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ifIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: ifIPAddress.setDescription('This object identifies the IP Address of the machine which sent the alarm.')
ifTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 14, 1, 4), TimeString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ifTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: ifTimeStamp.setDescription('Indicates the time of the alarm.')
ifName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 14, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ifName.setStatus('mandatory')
if mibBuilder.loadTexts: ifName.setDescription('Indicates the configured name for the machine which sent the alarm.')
ss7ISUPCongestionAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 15), )
if mibBuilder.loadTexts: ss7ISUPCongestionAlarmTable.setStatus('mandatory')
if mibBuilder.loadTexts: ss7ISUPCongestionAlarmTable.setDescription('The SS7ISUPCongestionAlarmTable contains alarms to indicate congestion with an ISUP protocol stack.')
ss7ISUPCongestionAlarmTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 15, 1), ).setIndexNames((0, "InternetThruway-MIB", "icIndex"))
if mibBuilder.loadTexts: ss7ISUPCongestionAlarmTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ss7ISUPCongestionAlarmTableEntry.setDescription('This object defines a row within the SS7 ISUP Congestion Alarm Table. A row can be uniquely identified with the row index.')
icIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 15, 1, 1), Integer32())
if mibBuilder.loadTexts: icIndex.setStatus('mandatory')
if mibBuilder.loadTexts: icIndex.setDescription('Identifies the row number in the table.')
icKey = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 15, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: icKey.setStatus('mandatory')
if mibBuilder.loadTexts: icKey.setDescription('Unique identifier for the alarm.')
icIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 15, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: icIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: icIPAddress.setDescription('This object identifies the IP Address of the machine which sent the alarm.')
icCongestionLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 15, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: icCongestionLevel.setStatus('mandatory')
if mibBuilder.loadTexts: icCongestionLevel.setDescription('This object indicates the congestion level with an ISUP protocol stack. Possible congestion levels are: (0) Normal (1) Congestion ')
icTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 15, 1, 5), TimeString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: icTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: icTimeStamp.setDescription('Indicates the time of the alarm.')
icName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 15, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: icName.setStatus('mandatory')
if mibBuilder.loadTexts: icName.setDescription('Indicates the configured name for the machine which sent the alarm.')
ss7MTP3CongestionAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 16), )
if mibBuilder.loadTexts: ss7MTP3CongestionAlarmTable.setStatus('mandatory')
if mibBuilder.loadTexts: ss7MTP3CongestionAlarmTable.setDescription('The SS7MTP3CongestionAlarmTable contains alarms to indicate congestion on an MTP3 link.')
ss7MTP3CongestionAlarmTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 16, 1), ).setIndexNames((0, "InternetThruway-MIB", "mtp3Index"))
if mibBuilder.loadTexts: ss7MTP3CongestionAlarmTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ss7MTP3CongestionAlarmTableEntry.setDescription('This object defines a row within the SS7 MTP3 Congestion Alarm Table. A row can be uniquely identified with the row index.')
mtp3Index = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 16, 1, 1), Integer32())
if mibBuilder.loadTexts: mtp3Index.setStatus('mandatory')
if mibBuilder.loadTexts: mtp3Index.setDescription('Identifies the row number in the table.')
mtp3Key = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 16, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mtp3Key.setStatus('mandatory')
if mibBuilder.loadTexts: mtp3Key.setDescription('Unique identifier for the alarm.')
mtp3IPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 16, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mtp3IPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: mtp3IPAddress.setDescription('This object identifies the IP Address of the machine which sent the alarm.')
mtp3CongestionLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 16, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mtp3CongestionLevel.setStatus('mandatory')
if mibBuilder.loadTexts: mtp3CongestionLevel.setDescription('This object indicates the congestion level on a problem SS7 Link. Possible congestion values are: (0) Normal (1) Minor (2) Major (3) Critical ')
mtp3TimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 16, 1, 5), TimeString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mtp3TimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: mtp3TimeStamp.setDescription('Indicates the time of the alarm.')
mtp3Name = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 16, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mtp3Name.setStatus('mandatory')
if mibBuilder.loadTexts: mtp3Name.setDescription('Represents the configured name of the machine which sent the alarm.')
ss7MTP2TrunkFailureAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 17), )
if mibBuilder.loadTexts: ss7MTP2TrunkFailureAlarmTable.setStatus('mandatory')
if mibBuilder.loadTexts: ss7MTP2TrunkFailureAlarmTable.setDescription('The SS7MTP2TrunkFailureAlarmTable contains alarms to indicate MTP2 trunk failures.')
ss7MTP2TrunkFailureAlarmTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 17, 1), ).setIndexNames((0, "InternetThruway-MIB", "mtp2Index"))
if mibBuilder.loadTexts: ss7MTP2TrunkFailureAlarmTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ss7MTP2TrunkFailureAlarmTableEntry.setDescription('This object defines a row within the SS7 MTP2 Failure Alarm Table. A row can be uniquely identified with the row index.')
mtp2Index = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 17, 1, 1), Integer32())
if mibBuilder.loadTexts: mtp2Index.setStatus('mandatory')
if mibBuilder.loadTexts: mtp2Index.setDescription('Identifies the row number in the table.')
mtp2Key = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 17, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mtp2Key.setStatus('mandatory')
if mibBuilder.loadTexts: mtp2Key.setDescription('Unique identifier for the alarm.')
mtp2IPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 17, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mtp2IPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: mtp2IPAddress.setDescription('This object identifies the IP Address of the machine which sent the alarm.')
mtp2Name = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 17, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mtp2Name.setStatus('mandatory')
if mibBuilder.loadTexts: mtp2Name.setDescription('This object identifies the configured name of the machine which sent the alarm.')
mtp2CardId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 17, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mtp2CardId.setStatus('mandatory')
if mibBuilder.loadTexts: mtp2CardId.setDescription('This object indicates the device upon which the affected trunk is hosted. The string contains a physical description of the device, as well as its slot number.')
mtp2AlarmCondition = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 17, 1, 6), MTP2AlarmConditionType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mtp2AlarmCondition.setStatus('mandatory')
if mibBuilder.loadTexts: mtp2AlarmCondition.setDescription('This object indicates which of the possible alarm conditions is in effect. Alarms are not nested: a new alarm is only reported if there is no current alarm condition.')
mtp2TimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 17, 1, 7), TimeString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mtp2TimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: mtp2TimeStamp.setDescription('Indicates the time of the alarm.')
ss7LinksetFailureAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 18), )
if mibBuilder.loadTexts: ss7LinksetFailureAlarmTable.setStatus('mandatory')
if mibBuilder.loadTexts: ss7LinksetFailureAlarmTable.setDescription('The SS7LinksetFailureAlarmTable contains alarms to indicate failure on an CSG linkset.')
ss7LinksetFailureAlarmTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 18, 1), ).setIndexNames((0, "InternetThruway-MIB", "lsFailureIndex"))
if mibBuilder.loadTexts: ss7LinksetFailureAlarmTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ss7LinksetFailureAlarmTableEntry.setDescription('This object defines a row within the SS7 Linkset Failure Alarm Table. A row can be uniquely identified with the row index.')
lsFailureIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 18, 1, 1), Integer32())
if mibBuilder.loadTexts: lsFailureIndex.setStatus('mandatory')
if mibBuilder.loadTexts: lsFailureIndex.setDescription('Identifies the row number in the table.')
lsFailureKey = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 18, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lsFailureKey.setStatus('mandatory')
if mibBuilder.loadTexts: lsFailureKey.setDescription('Unique identifier for the alarm.')
lsFailureIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 18, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lsFailureIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: lsFailureIPAddress.setDescription('This object identifies the IP Address of the machine which sent the alarm.')
lsFailureName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 18, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lsFailureName.setStatus('mandatory')
if mibBuilder.loadTexts: lsFailureName.setDescription('Represents the configured name of the machine which sent the alarm.')
lsFailurePointcode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 18, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lsFailurePointcode.setStatus('mandatory')
if mibBuilder.loadTexts: lsFailurePointcode.setDescription('This object indicates the pointcode associated with the linkset.')
lsFailureTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 18, 1, 6), TimeString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lsFailureTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: lsFailureTimeStamp.setDescription('Indicates the time of the alarm.')
ss7DestinationInaccessibleAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 19), )
if mibBuilder.loadTexts: ss7DestinationInaccessibleAlarmTable.setStatus('mandatory')
if mibBuilder.loadTexts: ss7DestinationInaccessibleAlarmTable.setDescription('The SS7DestinationAccessAlarmTable contains alarms which indicate inaccessible signalling destinations.')
ss7DestinationInaccessibleAlarmTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 19, 1), ).setIndexNames((0, "InternetThruway-MIB", "destInaccessIndex"))
if mibBuilder.loadTexts: ss7DestinationInaccessibleAlarmTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ss7DestinationInaccessibleAlarmTableEntry.setDescription('This object defines a row within the SS7 Destination Inaccessible Alarm Table. A row can be uniquely identified with the row index.')
destInaccessIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 19, 1, 1), Integer32())
if mibBuilder.loadTexts: destInaccessIndex.setStatus('mandatory')
if mibBuilder.loadTexts: destInaccessIndex.setDescription('Identifies the row number in the table.')
destInaccessKey = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 19, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: destInaccessKey.setStatus('mandatory')
if mibBuilder.loadTexts: destInaccessKey.setDescription('Unique identifier for the alarm.')
destInaccessIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 19, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: destInaccessIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: destInaccessIPAddress.setDescription('This object identifies the IP Address of the machine which sent the alarm.')
destInaccessName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 19, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: destInaccessName.setStatus('mandatory')
if mibBuilder.loadTexts: destInaccessName.setDescription('Represents the configured name of the machine which sent the alarm.')
destInaccessPointcode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 19, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: destInaccessPointcode.setStatus('mandatory')
if mibBuilder.loadTexts: destInaccessPointcode.setDescription('This object indicates the point code of the inaccessible signalling destination.')
destInaccessTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 19, 1, 6), TimeString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: destInaccessTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: destInaccessTimeStamp.setDescription('Indicates the time of the alarm.')
ss7DestinationCongestedAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 20), )
if mibBuilder.loadTexts: ss7DestinationCongestedAlarmTable.setStatus('mandatory')
if mibBuilder.loadTexts: ss7DestinationCongestedAlarmTable.setDescription('The SS7DestinationCongestedAlarmTable contains alarms to indicate congestion on the given destination.')
ss7DestinationCongestedAlarmTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 20, 1), ).setIndexNames((0, "InternetThruway-MIB", "destCongestIndex"))
if mibBuilder.loadTexts: ss7DestinationCongestedAlarmTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ss7DestinationCongestedAlarmTableEntry.setDescription('This object defines a row within the SS7 Destination Congestion Table. A row can be uniquely identified with the row index.')
destCongestIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 20, 1, 1), Integer32())
if mibBuilder.loadTexts: destCongestIndex.setStatus('mandatory')
if mibBuilder.loadTexts: destCongestIndex.setDescription('Identifies the row number in the table.')
destCongestKey = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 20, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: destCongestKey.setStatus('mandatory')
if mibBuilder.loadTexts: destCongestKey.setDescription('Unique identifier for the alarm.')
destCongestIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 20, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: destCongestIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: destCongestIPAddress.setDescription('This object identifies the IP Address of the machine which sent the alarm.')
destCongestName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 20, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: destCongestName.setStatus('mandatory')
if mibBuilder.loadTexts: destCongestName.setDescription('Represents the configured name of the machine which sent the alarm.')
destCongestPointcode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 20, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: destCongestPointcode.setStatus('mandatory')
if mibBuilder.loadTexts: destCongestPointcode.setDescription('This object indicates the pointcode of the congested destination.')
destCongestCongestionLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 20, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: destCongestCongestionLevel.setStatus('mandatory')
if mibBuilder.loadTexts: destCongestCongestionLevel.setDescription('This object indicates the congestion level on a problem SS7 pointcode. Possible congestion values are: (0) Normal (1) Minor (2) Major (3) Critical ')
destCongestTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 20, 1, 7), TimeString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: destCongestTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: destCongestTimeStamp.setDescription('Indicates the time of the alarm.')
ss7LinkAlignmentAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 21), )
if mibBuilder.loadTexts: ss7LinkAlignmentAlarmTable.setStatus('mandatory')
if mibBuilder.loadTexts: ss7LinkAlignmentAlarmTable.setDescription('The SS7LinkAlignmentAlarmTable contains alarms to indicate congestion on the CSG.')
ss7LinkAlignmentAlarmTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 21, 1), ).setIndexNames((0, "InternetThruway-MIB", "linkAlignIndex"))
if mibBuilder.loadTexts: ss7LinkAlignmentAlarmTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ss7LinkAlignmentAlarmTableEntry.setDescription('This object defines a row within the SS7 Link Alignment Alarm Table. A row can be uniquely identified with the row index.')
linkAlignIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 21, 1, 1), Integer32())
if mibBuilder.loadTexts: linkAlignIndex.setStatus('mandatory')
if mibBuilder.loadTexts: linkAlignIndex.setDescription('Identifies the row number in the table.')
linkAlignKey = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 21, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkAlignKey.setStatus('mandatory')
if mibBuilder.loadTexts: linkAlignKey.setDescription('Unique identifier for the alarm.')
linkAlignIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 21, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkAlignIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: linkAlignIPAddress.setDescription('This object identifies the IP Address of the machine which sent the alarm.')
linkAlignName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 21, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkAlignName.setStatus('mandatory')
if mibBuilder.loadTexts: linkAlignName.setDescription('Represents the configured name of the machine which sent the alarm.')
linkAlignLinkCode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 21, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkAlignLinkCode.setStatus('mandatory')
if mibBuilder.loadTexts: linkAlignLinkCode.setDescription('This object identifies the signalling link code (SLC) of the affected link.')
linkAlignTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 21, 1, 6), TimeString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkAlignTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: linkAlignTimeStamp.setDescription('Indicates the time of the alarm.')
linkAlignCardId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 21, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkAlignCardId.setStatus('mandatory')
if mibBuilder.loadTexts: linkAlignCardId.setDescription('This object identifies the device that hosts the failed link. It provides a physical description of the device, as well as its slot number.')
linkAlignLinkSet = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 21, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkAlignLinkSet.setStatus('mandatory')
if mibBuilder.loadTexts: linkAlignLinkSet.setDescription('This object identifies the linkset associated with the link via its adjacent point code.')
csgComplexStateTrapInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 22))
cplxName = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 22, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cplxName.setStatus('mandatory')
if mibBuilder.loadTexts: cplxName.setDescription('CLLI, A unique identifier of the CSG Complex.')
cplxLocEthernetName = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 22, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cplxLocEthernetName.setStatus('mandatory')
if mibBuilder.loadTexts: cplxLocEthernetName.setDescription(' The EthernetName attribute value of the node.')
cplxLocEthernetIP = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 22, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cplxLocEthernetIP.setStatus('mandatory')
if mibBuilder.loadTexts: cplxLocEthernetIP.setDescription(' The EthernetIP attribute value of the node.')
cplxLocOperationalState = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 22, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cplxLocOperationalState.setStatus('mandatory')
if mibBuilder.loadTexts: cplxLocOperationalState.setDescription(' The OperationalState of the node. Possible values are: UNKNOWN, ENABLED, ENABLED_NETDSC, ENABLED_NETPAR, DISABLED ')
cplxLocStandbyState = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 22, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cplxLocStandbyState.setStatus('mandatory')
if mibBuilder.loadTexts: cplxLocStandbyState.setDescription(' The StandbyState attribute value of the node. Possible values are: UNKNOWN, HOT_STANDBY, COLD_STANDBY, WARM_STANDBY, PROVIDING_SERVICE ')
cplxLocAvailabilityState = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 22, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cplxLocAvailabilityState.setStatus('mandatory')
if mibBuilder.loadTexts: cplxLocAvailabilityState.setDescription(' The AvailabilityState attribute value of the node. Possible values are: UNKNOWN, AVAILABLE, DEGRADED, OFFLINE ')
cplxMateEthernetName = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 22, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cplxMateEthernetName.setStatus('mandatory')
if mibBuilder.loadTexts: cplxMateEthernetName.setDescription(' The EthernetName attribute value of the mate node.')
cplxMateEthernetIP = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 22, 8), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cplxMateEthernetIP.setStatus('mandatory')
if mibBuilder.loadTexts: cplxMateEthernetIP.setDescription(' The EthernetIP attribute value of the mate node.')
cplxMateOperationalState = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 22, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cplxMateOperationalState.setStatus('mandatory')
if mibBuilder.loadTexts: cplxMateOperationalState.setDescription(' The OperationalState of the mate node. Possible values are: UNKNOWN, ENABLED, ENABLED_NETDSC, ENABLED_NETPAR, DISABLED ')
cplxMateStandbyState = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 22, 10), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cplxMateStandbyState.setStatus('mandatory')
if mibBuilder.loadTexts: cplxMateStandbyState.setDescription(' The StandbyState attribute value of the mate node. Possible values are: UNKNOWN, HOT_STANDBY, COLD_STANDBY, WARM_STANDBY, PROVIDING_SERVICE ')
cplxMateAvailabilityState = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 22, 11), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cplxMateAvailabilityState.setStatus('mandatory')
if mibBuilder.loadTexts: cplxMateAvailabilityState.setDescription(' The AvailabilityState attribute value of the mate node. Possible values are: UNKNOWN, AVAILABLE, DEGRADED, OFFLINE ')
cplxAlarmStatus = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 22, 12), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cplxAlarmStatus.setStatus('mandatory')
if mibBuilder.loadTexts: cplxAlarmStatus.setDescription('This object indicates the alarm status of the CSG Complex. Possible status are: NORMAL, MAJOR, CRITICAL ')
lostServerAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 1), )
if mibBuilder.loadTexts: lostServerAlarmTable.setStatus('mandatory')
if mibBuilder.loadTexts: lostServerAlarmTable.setDescription('')
lostServerAlarmTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 1, 1), ).setIndexNames((0, "InternetThruway-MIB", "lsIndex"))
if mibBuilder.loadTexts: lostServerAlarmTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: lostServerAlarmTableEntry.setDescription('This object defines a row within the Lost Server Alarm Table. A row can be uniquely identified with the row index.')
lsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: lsIndex.setStatus('mandatory')
if mibBuilder.loadTexts: lsIndex.setDescription('Identifies the row number in the table.')
lsKey = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lsKey.setStatus('mandatory')
if mibBuilder.loadTexts: lsKey.setDescription('Unique identifier for the alarm.')
lsIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 1, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lsIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: lsIPAddress.setDescription('This object identifies the IP Address of the machine which is lost.')
lsName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 1, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lsName.setStatus('mandatory')
if mibBuilder.loadTexts: lsName.setDescription('The configured name associated with the IP Address of the machine which sent the alarm.')
lsTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 5, 1, 1, 5), TimeString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lsTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: lsTimeStamp.setDescription('Indicates the time of the alarm.')
alarmMaskInt1 = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 1), Gauge32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alarmMaskInt1.setStatus('mandatory')
if mibBuilder.loadTexts: alarmMaskInt1.setDescription('The value of this bit mask reflects the current filtering policy of CSG events and alarms. Management stations which wish to not receive certain events or alarm types from the CSG can modify this value as needed. Note, however, that changes in the filtration policy affect what is received by all management stations. Initially, the bit mask is set so that all bits are in a false state. Each bit in a true state reflects a currently filtered event, alarm, or alarm type. The actual bit position meanings are given below. Bit 0 is LSB. Bits 0 = Generic Normal Alarm 1 = Generic Warning Alarm 2 = Generic Minor Alarm 3 = Generic Major Alarm 4 = Generic Critical Alarm 5 = Partition Space Alarm 6 = Home Gateway Failure Alarm 7 = Component Not In Provisioned State Alarm 8 = Component Debug On Alarm 9 = Component Multiple Restart Alarm 10 = Component Restart Warning 11 = NAS Registration Failure Warning 12 = NAS Failure Alarm 13 = File Deletion Warning 14 = File Backup Warning 15 = Sysman Restart Warning 16 = File Access Warning 17 = Home Gateway/NAS Provisioning Mismatch Warning 18 = SS7 Link Failure Alarm 19 = SS7 Link Congestion Alarm 20 = ISUP Failure Alarm 21 = ISUP Congestion Alarm 22 = SS7 FEP Congestion Alarm 23 = SS7 BEP Congestion Alarm 24 = High Availability Peer Contact Lost Alarm 25 = SS7 MTP3 Congestion Alarm 26 = SS7 MTP2 Trunk Failure Alarm 27 = SS7 Linkset Failure Alarm 28 = SS7 Destination Inaccessible Alarm 29 = SS7 Destination Congested Alarm 30 = SS7 Link Alignment Failure Alarm ')
alarmStatusInt1 = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: alarmStatusInt1.setStatus('mandatory')
if mibBuilder.loadTexts: alarmStatusInt1.setDescription('The value of this bit mask indicates that current status of CSG component alarms. Each components is represented by a single bit within the range occupied by each component alarm type. Each bit in a true state reflects a currently raised alarm. The actual bit position meanings are given below. Bit 0 is the LSB. Bits 0-15 = Component Not In Provisioned State Alarm 16-31 = Component Multi Restart Alarm ')
alarmStatusInt2 = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: alarmStatusInt2.setStatus('mandatory')
if mibBuilder.loadTexts: alarmStatusInt2.setDescription('The value of this bit mask indicates the current status of active CSG alarms. Component-related alarms occupy a range of bits: each bit within that range represents the alarm status for a particular component. Each bit in a true state reflects a currently raised alarm. The actual bit position meanings are given below. Bit 0 is the LSB. Bits 0-15 = Component Debug On Alarm 16-23 = Partition Space Alarm 24 = Home Gateway Failure Alarm 25 = NAS Failure Alarm 26 = SS7 Link Failure Alarm 27 = SS7 Link Congestion Alarm 28 = ISUP Failure Alarm 29 = ISUP Congestion Alarm 30 = High Availability Peer Contact Lost Alarm 31 = SS7 MTP3 Congestion Alarm ')
alarmStatusInt3 = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: alarmStatusInt3.setStatus('mandatory')
if mibBuilder.loadTexts: alarmStatusInt3.setDescription('The value of this bit mask indicates the current status of active CSG alarms. Each bit in a true state reflects a currently raised alarm. The actual bit position meanings are given below. Bit 0 is the LSB. Bits 0 = SS7 MTP2 Trunk Failure Alarm 1 = SS7 Linkset Failure Alarm 2 = SS7 Destination Inaccessible Alarm 3 = SS7 Destination Congestion Alarm 4 = SS7 Link Alignment Failure Alarm 5 = CSG Complex Status Alarm 6 = External Ethernet Alarm ')
alarmMaskInt2 = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 4, 5), Gauge32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alarmMaskInt2.setStatus('mandatory')
if mibBuilder.loadTexts: alarmMaskInt2.setDescription('The value of this bit mask reflects the current additional filtering policy of CSG events and alarms. Management stations which wish to not receive certain events or alarm types from the CSG can modify this value as needed. Note, however, that changes in the filtration policy affect what is received by all management stations. Initially, the bit mask is set so that all bits are in a false state. Each bit in a true state reflects a currently filtered event, alarm, or alarm type. The actual bit position meanings are given below. Bit 0 is LSB. Bits 0 = External Ethernet Alarm 1 = Cluster Information retrieval Alarm ')
trapCompName = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 3, 1), DisplayString())
if mibBuilder.loadTexts: trapCompName.setStatus('mandatory')
if mibBuilder.loadTexts: trapCompName.setDescription('OID for the Component name.')
trapFileName = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 3, 2), DisplayString())
if mibBuilder.loadTexts: trapFileName.setStatus('mandatory')
if mibBuilder.loadTexts: trapFileName.setDescription('OID for file Name.')
trapDate = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 3, 3), TimeString())
if mibBuilder.loadTexts: trapDate.setStatus('mandatory')
if mibBuilder.loadTexts: trapDate.setDescription('OID for the date.')
trapGenericStr1 = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 3, 4), DisplayString())
if mibBuilder.loadTexts: trapGenericStr1.setStatus('mandatory')
if mibBuilder.loadTexts: trapGenericStr1.setDescription('OID for the generic data.')
trapIdKey = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 3, 5), Integer32())
if mibBuilder.loadTexts: trapIdKey.setStatus('mandatory')
if mibBuilder.loadTexts: trapIdKey.setDescription('OID for the identification key.')
trapIPAddress = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 3, 6), IpAddress())
if mibBuilder.loadTexts: trapIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: trapIPAddress.setDescription('OID for IP address.')
trapName = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 3, 7), DisplayString())
if mibBuilder.loadTexts: trapName.setStatus('mandatory')
if mibBuilder.loadTexts: trapName.setDescription('OID for configured name associated with an IpAddress.')
trapTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 3, 8), DisplayString())
if mibBuilder.loadTexts: trapTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: trapTimeStamp.setDescription('Indicates the time at which the alarm occurred.')
diskSpaceClear = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,1001)).setObjects(("InternetThruway-MIB", "partitionSpaceKey"), ("InternetThruway-MIB", "partitionIndex"), ("InternetThruway-MIB", "partitionName"), ("InternetThruway-MIB", "partitionPercentFull"), ("InternetThruway-MIB", "partitionSpaceTimeStamp"))
if mibBuilder.loadTexts: diskSpaceClear.setDescription('The Trap generated when a disk partition has a space increase after a previously sent DiskSpaceAlarm.')
diskSpaceAlarm = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,1004)).setObjects(("InternetThruway-MIB", "partitionSpaceKey"), ("InternetThruway-MIB", "partitionIndex"), ("InternetThruway-MIB", "partitionName"), ("InternetThruway-MIB", "partitionPercentFull"), ("InternetThruway-MIB", "partitionSpaceTimeStamp"))
if mibBuilder.loadTexts: diskSpaceAlarm.setDescription('The Trap generated when a disk partition is running out of space provisioned state.')
etherCardTrapClear = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,1011))
if mibBuilder.loadTexts: etherCardTrapClear.setDescription(' The Trap generated when the external ethernet card becomes available.')
etherCardTrapMajor = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,1014))
if mibBuilder.loadTexts: etherCardTrapMajor.setDescription('The Trap generated when the external ethernet card is down.')
etherCardTrapCritical = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,1015))
if mibBuilder.loadTexts: etherCardTrapCritical.setDescription('The Trap generated when the external ethernet card is down.')
compDebugOff = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,2001)).setObjects(("InternetThruway-MIB", "compDebugKey"), ("InternetThruway-MIB", "componentIndex"), ("InternetThruway-MIB", "componentName"), ("InternetThruway-MIB", "compDebugTimeStamp"))
if mibBuilder.loadTexts: compDebugOff.setDescription('The Trap generated when a Component turns off its debug info.')
compDebugOn = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,2002)).setObjects(("InternetThruway-MIB", "compDebugKey"), ("InternetThruway-MIB", "componentIndex"), ("InternetThruway-MIB", "componentName"), ("InternetThruway-MIB", "compDebugTimeStamp"))
if mibBuilder.loadTexts: compDebugOn.setDescription('The Trap generated when a Component turns on its debug info.')
compStateClear = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,2011)).setObjects(("InternetThruway-MIB", "compProvStateKey"), ("InternetThruway-MIB", "componentIndex"), ("InternetThruway-MIB", "componentName"), ("InternetThruway-MIB", "compProvStateStatus"), ("InternetThruway-MIB", "compSecsInCurrentState"), ("InternetThruway-MIB", "compProvStateTimeStamp"))
if mibBuilder.loadTexts: compStateClear.setDescription('The Trap generated when a component goes to its provisioned states after a CompStatusAlarm trap has been sent.')
compStateAlarm = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,2014)).setObjects(("InternetThruway-MIB", "compProvStateKey"), ("InternetThruway-MIB", "componentIndex"), ("InternetThruway-MIB", "componentName"), ("InternetThruway-MIB", "compProvStateStatus"), ("InternetThruway-MIB", "compSecsInCurrentState"), ("InternetThruway-MIB", "compProvStateTimeStamp"))
if mibBuilder.loadTexts: compStateAlarm.setDescription("The Trap generated when a component is not in it's provisioned state.")
restartStateClear = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,2021)).setObjects(("InternetThruway-MIB", "compRestartKey"), ("InternetThruway-MIB", "componentIndex"), ("InternetThruway-MIB", "componentName"), ("InternetThruway-MIB", "compRestartStatus"), ("InternetThruway-MIB", "compRestartTimeStamp"))
if mibBuilder.loadTexts: restartStateClear.setDescription('The Trap generated when a component goes to its provisioned states after a RestartStateAlarm trap has been sent.')
restartStateAlarm = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,2024)).setObjects(("InternetThruway-MIB", "compRestartKey"), ("InternetThruway-MIB", "componentIndex"), ("InternetThruway-MIB", "componentName"), ("InternetThruway-MIB", "compRestartStatus"), ("InternetThruway-MIB", "compRestartTimeStamp"))
if mibBuilder.loadTexts: restartStateAlarm.setDescription('The Trap generated when a component restarts repeatedly.')
ss7LinkFailureAlarm = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3004)).setObjects(("InternetThruway-MIB", "lfIndex"), ("InternetThruway-MIB", "lfKey"), ("InternetThruway-MIB", "lfIPAddress"), ("InternetThruway-MIB", "lfLinkCode"), ("InternetThruway-MIB", "lfName"), ("InternetThruway-MIB", "lfCardId"), ("InternetThruway-MIB", "lfLinkSet"), ("InternetThruway-MIB", "lfTimeStamp"))
if mibBuilder.loadTexts: ss7LinkFailureAlarm.setDescription('Trap generated for an SS7 link failure.')
ss7LinkFailureClear = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3001)).setObjects(("InternetThruway-MIB", "lfIndex"), ("InternetThruway-MIB", "lfKey"), ("InternetThruway-MIB", "lfIPAddress"), ("InternetThruway-MIB", "lfLinkCode"), ("InternetThruway-MIB", "lfName"), ("InternetThruway-MIB", "lfCardId"), ("InternetThruway-MIB", "lfLinkSet"), ("InternetThruway-MIB", "lfTimeStamp"))
if mibBuilder.loadTexts: ss7LinkFailureClear.setDescription('Trap generated to clear an SS7 link failure.')
ss7LinkCongestionAlarm = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3012)).setObjects(("InternetThruway-MIB", "lcIndex"), ("InternetThruway-MIB", "lcKey"), ("InternetThruway-MIB", "lcIPAddress"), ("InternetThruway-MIB", "lcLinkCode"), ("InternetThruway-MIB", "lcName"), ("InternetThruway-MIB", "lcCardId"), ("InternetThruway-MIB", "lcLinkSet"), ("InternetThruway-MIB", "lcTimeStamp"))
if mibBuilder.loadTexts: ss7LinkCongestionAlarm.setDescription('Trap generated for congestion on an SS7 Link.')
ss7LinkCongestionClear = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3011)).setObjects(("InternetThruway-MIB", "lcIndex"), ("InternetThruway-MIB", "lcKey"), ("InternetThruway-MIB", "lcIPAddress"), ("InternetThruway-MIB", "lcLinkCode"), ("InternetThruway-MIB", "lcName"), ("InternetThruway-MIB", "lcCardId"), ("InternetThruway-MIB", "lcLinkSet"), ("InternetThruway-MIB", "lcTimeStamp"))
if mibBuilder.loadTexts: ss7LinkCongestionClear.setDescription('Trap generated to indicate there is no longer congestion on an SS7 link.')
ss7ISUPFailureAlarm = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3025)).setObjects(("InternetThruway-MIB", "ifIndex"), ("InternetThruway-MIB", "ifKey"), ("InternetThruway-MIB", "ifIPAddress"), ("InternetThruway-MIB", "ifName"), ("InternetThruway-MIB", "ifTimeStamp"))
if mibBuilder.loadTexts: ss7ISUPFailureAlarm.setDescription('Trap generated to indicate ISUP failure.')
ss7ISUPFailureClear = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3021)).setObjects(("InternetThruway-MIB", "ifIndex"), ("InternetThruway-MIB", "ifKey"), ("InternetThruway-MIB", "ifIPAddress"), ("InternetThruway-MIB", "ifName"), ("InternetThruway-MIB", "ifTimeStamp"))
if mibBuilder.loadTexts: ss7ISUPFailureClear.setDescription('Trap generated to clear an ISUP failure alarm.')
ss7ISUPCongestionAlarm = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3033)).setObjects(("InternetThruway-MIB", "icIndex"), ("InternetThruway-MIB", "icKey"), ("InternetThruway-MIB", "icIPAddress"), ("InternetThruway-MIB", "icCongestionLevel"), ("InternetThruway-MIB", "icName"), ("InternetThruway-MIB", "icTimeStamp"))
if mibBuilder.loadTexts: ss7ISUPCongestionAlarm.setDescription('Trap generated to indicate congestion with the ISUP protocol stack.')
ss7ISUPCongestionClear = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3031)).setObjects(("InternetThruway-MIB", "icIndex"), ("InternetThruway-MIB", "icKey"), ("InternetThruway-MIB", "icIPAddress"), ("InternetThruway-MIB", "icCongestionLevel"), ("InternetThruway-MIB", "icName"), ("InternetThruway-MIB", "icTimeStamp"))
if mibBuilder.loadTexts: ss7ISUPCongestionClear.setDescription('Trap generated to indicate there is no longer congestion with the ISUP protocol stack.')
ss7FEPCongestionWarning = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3042)).setObjects(("InternetThruway-MIB", "trapIdKey"), ("InternetThruway-MIB", "trapIPAddress"), ("InternetThruway-MIB", "trapName"), ("InternetThruway-MIB", "trapTimeStamp"))
if mibBuilder.loadTexts: ss7FEPCongestionWarning.setDescription('Notification trap generated to indicate congestion encountered by the SS7 front-end process.')
ss7BEPCongestionWarning = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3052)).setObjects(("InternetThruway-MIB", "trapIdKey"), ("InternetThruway-MIB", "trapIPAddress"), ("InternetThruway-MIB", "trapName"), ("InternetThruway-MIB", "trapTimeStamp"))
if mibBuilder.loadTexts: ss7BEPCongestionWarning.setDescription('Notification trap generated to indicate congestion encountered by the SS7 back-end process.')
ss7MTP3CongestionMinor = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3063)).setObjects(("InternetThruway-MIB", "mtp3Index"), ("InternetThruway-MIB", "mtp3Key"), ("InternetThruway-MIB", "mtp3IPAddress"), ("InternetThruway-MIB", "mtp3Name"), ("InternetThruway-MIB", "mtp3TimeStamp"))
if mibBuilder.loadTexts: ss7MTP3CongestionMinor.setDescription('Trap generated for MTP3 congestion.')
ss7MTP3CongestionMajor = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3064)).setObjects(("InternetThruway-MIB", "mtp3Index"), ("InternetThruway-MIB", "mtp3Key"), ("InternetThruway-MIB", "mtp3IPAddress"), ("InternetThruway-MIB", "mtp3Name"), ("InternetThruway-MIB", "mtp3TimeStamp"))
if mibBuilder.loadTexts: ss7MTP3CongestionMajor.setDescription('Trap generated for MTP3 congestion.')
ss7MTP3CongestionCritical = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3065)).setObjects(("InternetThruway-MIB", "mtp3Index"), ("InternetThruway-MIB", "mtp3Key"), ("InternetThruway-MIB", "mtp3IPAddress"), ("InternetThruway-MIB", "mtp3Name"), ("InternetThruway-MIB", "mtp3TimeStamp"))
if mibBuilder.loadTexts: ss7MTP3CongestionCritical.setDescription('Trap generated for MTP3 congestion.')
ss7MTP3CongestionClear = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3061)).setObjects(("InternetThruway-MIB", "mtp3Index"), ("InternetThruway-MIB", "mtp3Key"), ("InternetThruway-MIB", "mtp3IPAddress"), ("InternetThruway-MIB", "mtp3Name"), ("InternetThruway-MIB", "mtp3TimeStamp"))
if mibBuilder.loadTexts: ss7MTP3CongestionClear.setDescription('Trap generated to indicate there is no longer MTP3 congestion.')
ss7MTP2TrunkFailureAlarm = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3075)).setObjects(("InternetThruway-MIB", "mtp2Index"), ("InternetThruway-MIB", "mtp2Key"), ("InternetThruway-MIB", "mtp2IPAddress"), ("InternetThruway-MIB", "mtp2Name"), ("InternetThruway-MIB", "mtp2CardId"), ("InternetThruway-MIB", "mtp2AlarmCondition"), ("InternetThruway-MIB", "mtp2TimeStamp"))
if mibBuilder.loadTexts: ss7MTP2TrunkFailureAlarm.setDescription('Trap generated to indicate an MTP2 trunk failure condition.')
ss7MTP2TrunkFailureClear = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3071)).setObjects(("InternetThruway-MIB", "mtp2Index"), ("InternetThruway-MIB", "mtp2Key"), ("InternetThruway-MIB", "mtp2IPAddress"), ("InternetThruway-MIB", "mtp2Name"), ("InternetThruway-MIB", "mtp2CardId"), ("InternetThruway-MIB", "mtp2TimeStamp"))
if mibBuilder.loadTexts: ss7MTP2TrunkFailureClear.setDescription('Trap generated to clear an MTP2 trunk failure alarm.')
ss7LinksetFailureAlarm = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3085)).setObjects(("InternetThruway-MIB", "lsFailureIndex"), ("InternetThruway-MIB", "lsFailureKey"), ("InternetThruway-MIB", "lsFailureIPAddress"), ("InternetThruway-MIB", "lsFailureName"), ("InternetThruway-MIB", "lsFailurePointcode"), ("InternetThruway-MIB", "lsFailureTimeStamp"))
if mibBuilder.loadTexts: ss7LinksetFailureAlarm.setDescription('Trap generated to indicate a linkset failure.')
ss7LinksetFailureClear = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3081)).setObjects(("InternetThruway-MIB", "lsFailureIndex"), ("InternetThruway-MIB", "lsFailureKey"), ("InternetThruway-MIB", "lsFailureIPAddress"), ("InternetThruway-MIB", "lsFailureName"), ("InternetThruway-MIB", "lsFailurePointcode"), ("InternetThruway-MIB", "lsFailureTimeStamp"))
if mibBuilder.loadTexts: ss7LinksetFailureClear.setDescription('Trap generated to clear a linkset failure alarm.')
ss7DestinationInaccessible = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3092)).setObjects(("InternetThruway-MIB", "destInaccessIndex"), ("InternetThruway-MIB", "destInaccessKey"), ("InternetThruway-MIB", "destInaccessIPAddress"), ("InternetThruway-MIB", "destInaccessName"), ("InternetThruway-MIB", "destInaccessPointcode"), ("InternetThruway-MIB", "destInaccessTimeStamp"))
if mibBuilder.loadTexts: ss7DestinationInaccessible.setDescription('Trap generated to indicate that a signalling destination is inaccessible. A destination is considered inaccessible once Transfer Prohibited (TFP) messages are received which indicate that the route to that destination is prohibited.')
ss7DestinationAccessible = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3091)).setObjects(("InternetThruway-MIB", "destInaccessIndex"), ("InternetThruway-MIB", "destInaccessKey"), ("InternetThruway-MIB", "destInaccessIPAddress"), ("InternetThruway-MIB", "destInaccessName"), ("InternetThruway-MIB", "destInaccessPointcode"), ("InternetThruway-MIB", "destInaccessTimeStamp"))
if mibBuilder.loadTexts: ss7DestinationAccessible.setDescription('Trap generated to clear a destination inacessible alarm. An inaccessible signalling destination is considered accessible once Transfer Allowed (TFA) messages are sent along its prohibited signalling routes.')
ss7DestinationCongestedAlarm = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3103)).setObjects(("InternetThruway-MIB", "destCongestIndex"), ("InternetThruway-MIB", "destCongestKey"), ("InternetThruway-MIB", "destCongestIPAddress"), ("InternetThruway-MIB", "destCongestName"), ("InternetThruway-MIB", "destCongestPointcode"), ("InternetThruway-MIB", "destCongestCongestionLevel"), ("InternetThruway-MIB", "destCongestTimeStamp"))
if mibBuilder.loadTexts: ss7DestinationCongestedAlarm.setDescription('Trap generated to indicate congestion at an SS7 destination.')
ss7DestinationCongestedClear = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3101)).setObjects(("InternetThruway-MIB", "destCongestIndex"), ("InternetThruway-MIB", "destCongestKey"), ("InternetThruway-MIB", "destCongestIPAddress"), ("InternetThruway-MIB", "destCongestName"), ("InternetThruway-MIB", "destCongestPointcode"), ("InternetThruway-MIB", "destCongestCongestionLevel"), ("InternetThruway-MIB", "destCongestTimeStamp"))
if mibBuilder.loadTexts: ss7DestinationCongestedClear.setDescription('Trap generated to indicate that there is no longer congestion at an SS7 destination.')
ss7LinkAlignmentFailureAlarm = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3114)).setObjects(("InternetThruway-MIB", "linkAlignIndex"), ("InternetThruway-MIB", "linkAlignKey"), ("InternetThruway-MIB", "linkAlignIPAddress"), ("InternetThruway-MIB", "linkAlignName"), ("InternetThruway-MIB", "linkAlignLinkCode"), ("InternetThruway-MIB", "linkAlignCardId"), ("InternetThruway-MIB", "linkAlignLinkSet"), ("InternetThruway-MIB", "linkAlignTimeStamp"))
if mibBuilder.loadTexts: ss7LinkAlignmentFailureAlarm.setDescription('Trap generated to indicate alignment failure on a datalink.')
ss7LinkAlignmentFailureClear = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,3111)).setObjects(("InternetThruway-MIB", "linkAlignIndex"), ("InternetThruway-MIB", "linkAlignKey"), ("InternetThruway-MIB", "linkAlignIPAddress"), ("InternetThruway-MIB", "linkAlignName"), ("InternetThruway-MIB", "linkAlignLinkCode"), ("InternetThruway-MIB", "linkAlignCardId"), ("InternetThruway-MIB", "linkAlignLinkSet"), ("InternetThruway-MIB", "linkAlignTimeStamp"))
if mibBuilder.loadTexts: ss7LinkAlignmentFailureClear.setDescription('Trap generated to clear a datalink alignment failure alarm.')
ncLostServerTrap = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,4014)).setObjects(("InternetThruway-MIB", "lsIndex"), ("InternetThruway-MIB", "lsKey"), ("InternetThruway-MIB", "lsName"), ("InternetThruway-MIB", "lsIPAddress"), ("InternetThruway-MIB", "lsTimeStamp"))
if mibBuilder.loadTexts: ncLostServerTrap.setDescription('This trap is generated when the CSG loses contact with its peer in the cluster. The variables in this trap identify the server that has been lost. The originator of this trap is implicitly defined.')
ncFoundServerTrap = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,4011)).setObjects(("InternetThruway-MIB", "lsIndex"), ("InternetThruway-MIB", "lsKey"), ("InternetThruway-MIB", "lsName"), ("InternetThruway-MIB", "lsIPAddress"), ("InternetThruway-MIB", "lsTimeStamp"))
if mibBuilder.loadTexts: ncFoundServerTrap.setDescription('This trap is generated when the initially comes into contact with or regains contact with its peer in the cluster. The variables in this trap identify the server that has been found. The originator of this trap is implicitly defined.')
ncStateChangeTrap = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,4022)).setObjects(("InternetThruway-MIB", "ncEthernetName"), ("InternetThruway-MIB", "ncEthernetIP"), ("InternetThruway-MIB", "ncOperationalState"), ("InternetThruway-MIB", "ncStandbyState"), ("InternetThruway-MIB", "ncAvailabilityState"))
if mibBuilder.loadTexts: ncStateChangeTrap.setDescription('This trap is generated when any of the state values change.')
csgComplexStateTrapClear = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,4031)).setObjects(("InternetThruway-MIB", "cplxName"), ("InternetThruway-MIB", "cplxLocEthernetName"), ("InternetThruway-MIB", "cplxLocEthernetIP"), ("InternetThruway-MIB", "cplxLocOperationalState"), ("InternetThruway-MIB", "cplxLocStandbyState"), ("InternetThruway-MIB", "cplxLocAvailabilityState"), ("InternetThruway-MIB", "cplxMateEthernetName"), ("InternetThruway-MIB", "cplxMateEthernetIP"), ("InternetThruway-MIB", "cplxMateOperationalState"), ("InternetThruway-MIB", "cplxMateStandbyState"), ("InternetThruway-MIB", "cplxMateAvailabilityState"), ("InternetThruway-MIB", "cplxAlarmStatus"))
if mibBuilder.loadTexts: csgComplexStateTrapClear.setDescription('This trap is generated when any of the state values change Severity is determined only by the operational and standby states of both servers.')
csgComplexStateTrapMajor = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,4034)).setObjects(("InternetThruway-MIB", "cplxName"), ("InternetThruway-MIB", "cplxLocEthernetName"), ("InternetThruway-MIB", "cplxLocEthernetIP"), ("InternetThruway-MIB", "cplxLocOperationalState"), ("InternetThruway-MIB", "cplxLocStandbyState"), ("InternetThruway-MIB", "cplxLocAvailabilityState"), ("InternetThruway-MIB", "cplxMateEthernetName"), ("InternetThruway-MIB", "cplxMateEthernetIP"), ("InternetThruway-MIB", "cplxMateOperationalState"), ("InternetThruway-MIB", "cplxMateStandbyState"), ("InternetThruway-MIB", "cplxMateAvailabilityState"), ("InternetThruway-MIB", "cplxAlarmStatus"))
if mibBuilder.loadTexts: csgComplexStateTrapMajor.setDescription('This trap is generated when any of the state values change Severity is determined only by the operational and standby states of both servers.')
csgComplexStateTrapCritical = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,4035)).setObjects(("InternetThruway-MIB", "cplxName"), ("InternetThruway-MIB", "cplxLocEthernetName"), ("InternetThruway-MIB", "cplxLocEthernetIP"), ("InternetThruway-MIB", "cplxLocOperationalState"), ("InternetThruway-MIB", "cplxLocStandbyState"), ("InternetThruway-MIB", "cplxLocAvailabilityState"), ("InternetThruway-MIB", "cplxMateEthernetName"), ("InternetThruway-MIB", "cplxMateEthernetIP"), ("InternetThruway-MIB", "cplxMateOperationalState"), ("InternetThruway-MIB", "cplxMateStandbyState"), ("InternetThruway-MIB", "cplxMateAvailabilityState"), ("InternetThruway-MIB", "cplxAlarmStatus"))
if mibBuilder.loadTexts: csgComplexStateTrapCritical.setDescription('This trap is generated when any of the state values change Severity is determined only by the operational and standby states of both servers.')
cisRetrievalFailureTrapMajor = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,4044))
if mibBuilder.loadTexts: cisRetrievalFailureTrapMajor.setDescription('This trap is generated when the TruCluster ASE information retrieval attempts failed repeatedly. ')
genericNormal = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,9001)).setObjects(("InternetThruway-MIB", "trapIdKey"), ("InternetThruway-MIB", "trapGenericStr1"), ("InternetThruway-MIB", "trapTimeStamp"))
if mibBuilder.loadTexts: genericNormal.setDescription('The Trap generated for generic normal priority text messages')
genericWarning = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,9002)).setObjects(("InternetThruway-MIB", "trapIdKey"), ("InternetThruway-MIB", "trapGenericStr1"), ("InternetThruway-MIB", "trapTimeStamp"))
if mibBuilder.loadTexts: genericWarning.setDescription('The Trap generated for generic warning priority text messages')
genericMinor = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,9003)).setObjects(("InternetThruway-MIB", "trapIdKey"), ("InternetThruway-MIB", "trapGenericStr1"), ("InternetThruway-MIB", "trapTimeStamp"))
if mibBuilder.loadTexts: genericMinor.setDescription('The Trap generated for generic minor priority text messages')
genericMajor = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,9004)).setObjects(("InternetThruway-MIB", "trapIdKey"), ("InternetThruway-MIB", "trapGenericStr1"), ("InternetThruway-MIB", "trapTimeStamp"))
if mibBuilder.loadTexts: genericMajor.setDescription('The Trap generated for generic major priority text messages')
genericCritical = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,9005)).setObjects(("InternetThruway-MIB", "trapIdKey"), ("InternetThruway-MIB", "trapGenericStr1"), ("InternetThruway-MIB", "trapTimeStamp"))
if mibBuilder.loadTexts: genericCritical.setDescription('The Trap generated for generic critical priority text messages')
hgStatusClear = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,9011)).setObjects(("InternetThruway-MIB", "hgKey"), ("InternetThruway-MIB", "hgIndex"), ("InternetThruway-MIB", "hgName"), ("InternetThruway-MIB", "hgIPAddress"), ("InternetThruway-MIB", "hgAlarmTimeStamp"))
if mibBuilder.loadTexts: hgStatusClear.setDescription('The Trap generated when a Home Gateway Status returns to normal after having previously been in the failed status.')
hgStatusAlarm = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,9014)).setObjects(("InternetThruway-MIB", "hgKey"), ("InternetThruway-MIB", "hgIndex"), ("InternetThruway-MIB", "hgName"), ("InternetThruway-MIB", "hgIPAddress"), ("InternetThruway-MIB", "hgAlarmTimeStamp"))
if mibBuilder.loadTexts: hgStatusAlarm.setDescription('The Trap generated when a Home Gateway is indicated to be failed.')
nasStatusClear = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,9021)).setObjects(("InternetThruway-MIB", "nasKey"), ("InternetThruway-MIB", "nasIndex"), ("InternetThruway-MIB", "nasName"), ("InternetThruway-MIB", "nasIPAddress"), ("InternetThruway-MIB", "nasAlarmTimeStamp"), ("InternetThruway-MIB", "nasCmplxName"))
if mibBuilder.loadTexts: nasStatusClear.setDescription('The Trap generated when a rapport registers after having previously been in the failed status.')
nasStatusAlarm = NotificationType((1, 3, 6, 1, 4, 1, 562, 14, 2, 3) + (0,9024)).setObjects(("InternetThruway-MIB", "nasKey"), ("InternetThruway-MIB", "nasIndex"), ("InternetThruway-MIB", "nasName"), ("InternetThruway-MIB", "nasIPAddress"), ("InternetThruway-MIB", "nasAlarmTimeStamp"), ("InternetThruway-MIB", "nasCmplxName"))
if mibBuilder.loadTexts: nasStatusAlarm.setDescription('The Trap generated when a Nas is indicated to be failed.')
linkOMTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 1, 1), )
if mibBuilder.loadTexts: linkOMTable.setStatus('mandatory')
if mibBuilder.loadTexts: linkOMTable.setDescription('The LinkTable contains information about each signaling link on the CSG')
linkOMTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 1, 1, 1), ).setIndexNames((0, "InternetThruway-MIB", "linksetIndex"), (0, "InternetThruway-MIB", "linkIndex"))
if mibBuilder.loadTexts: linkOMTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: linkOMTableEntry.setDescription('An entry in the LinkTable. Indexed by linkIndex')
linkOMId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 1, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkOMId.setStatus('mandatory')
if mibBuilder.loadTexts: linkOMId.setDescription('The id of the link.')
linkOMSetId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 1, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkOMSetId.setStatus('mandatory')
if mibBuilder.loadTexts: linkOMSetId.setDescription('The id of the linkset.')
linkFailures = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 1, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkFailures.setStatus('mandatory')
if mibBuilder.loadTexts: linkFailures.setDescription('Number of times this signaling link has failed.')
linkCongestions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 1, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkCongestions.setStatus('mandatory')
if mibBuilder.loadTexts: linkCongestions.setDescription('Number of times this signaling link has Congested.')
linkInhibits = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 1, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkInhibits.setStatus('mandatory')
if mibBuilder.loadTexts: linkInhibits.setDescription('Number of times this signaling link has been inhibited.')
linkTransmittedMSUs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 1, 1, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkTransmittedMSUs.setStatus('mandatory')
if mibBuilder.loadTexts: linkTransmittedMSUs.setDescription('Number of messages sent on this signaling link.')
linkReceivedMSUs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 1, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkReceivedMSUs.setStatus('mandatory')
if mibBuilder.loadTexts: linkReceivedMSUs.setDescription('Number of messages received on this signaling link.')
linkRemoteProcOutages = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 1, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: linkRemoteProcOutages.setStatus('mandatory')
if mibBuilder.loadTexts: linkRemoteProcOutages.setDescription('Number of times the remote processor outgaes have been reported.')
bLATimerExpiries = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 2, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bLATimerExpiries.setStatus('mandatory')
if mibBuilder.loadTexts: bLATimerExpiries.setDescription('Number of times the BLA timer has expired.')
rLCTimerExpiries = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 2, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rLCTimerExpiries.setStatus('mandatory')
if mibBuilder.loadTexts: rLCTimerExpiries.setDescription('Number of times the RLC timer has expired.')
uBATimerExpiries = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 2, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uBATimerExpiries.setStatus('mandatory')
if mibBuilder.loadTexts: uBATimerExpiries.setDescription('Number of times the UBA timer has expired.')
rSATimerExpiries = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 2, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rSATimerExpiries.setStatus('mandatory')
if mibBuilder.loadTexts: rSATimerExpiries.setDescription('Number of times the RSA timer has expired.')
outCallAttempts = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: outCallAttempts.setStatus('mandatory')
if mibBuilder.loadTexts: outCallAttempts.setDescription('Total number of outgoing call legs attempted.')
outCallNormalCompletions = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: outCallNormalCompletions.setStatus('mandatory')
if mibBuilder.loadTexts: outCallNormalCompletions.setDescription('Total number of outgoing call legs completed normally.')
outCallAbnormalCompletions = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: outCallAbnormalCompletions.setStatus('mandatory')
if mibBuilder.loadTexts: outCallAbnormalCompletions.setDescription('Total number of outgoing call legs completed abnormally.')
userBusyOutCallRejects = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: userBusyOutCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: userBusyOutCallRejects.setDescription('Total Number of outgoing call legs rejected due to user busy signal.')
tempFailOutCallRejects = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tempFailOutCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: tempFailOutCallRejects.setDescription('Total Number of outgoing call legs rejected due to temporary failure.')
userUnavailableOutCallRejects = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: userUnavailableOutCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: userUnavailableOutCallRejects.setDescription('Total number of outgoing call legs failed due to user not available.')
abnormalReleaseOutCallRejects = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: abnormalReleaseOutCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: abnormalReleaseOutCallRejects.setDescription('Total Number of outgoing call legs rejected due to abnormal release.')
otherOutCallRejects = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherOutCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: otherOutCallRejects.setDescription('Total Number of outgoing call legs rejected due to other reasons.')
cumulativeActiveOutCalls = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cumulativeActiveOutCalls.setStatus('mandatory')
if mibBuilder.loadTexts: cumulativeActiveOutCalls.setDescription('Cumulatvie Number of outgoing call legs active so far.')
currentlyActiveOutCalls = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: currentlyActiveOutCalls.setStatus('mandatory')
if mibBuilder.loadTexts: currentlyActiveOutCalls.setDescription('Total Number of outgoing call legs currently active.')
currentlyActiveDigitalOutCalls = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: currentlyActiveDigitalOutCalls.setStatus('mandatory')
if mibBuilder.loadTexts: currentlyActiveDigitalOutCalls.setDescription('Total Number of outgoing digital call legs currently active.')
currentlyActiveAnalogOutCalls = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: currentlyActiveAnalogOutCalls.setStatus('mandatory')
if mibBuilder.loadTexts: currentlyActiveAnalogOutCalls.setDescription('Total Number of outgoing analog call legs currently active.')
inCallAttempts = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: inCallAttempts.setStatus('mandatory')
if mibBuilder.loadTexts: inCallAttempts.setDescription('Total number of incoming call legs attempted.')
inCallNormalCompletions = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: inCallNormalCompletions.setStatus('mandatory')
if mibBuilder.loadTexts: inCallNormalCompletions.setDescription('Total number of incoming call legs completed normally.')
inCallAbnormalCompletions = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: inCallAbnormalCompletions.setStatus('mandatory')
if mibBuilder.loadTexts: inCallAbnormalCompletions.setDescription('Total number of incoming call legs completed abnormally.')
userBusyInCallRejects = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: userBusyInCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: userBusyInCallRejects.setDescription('Total Number of incoming call legs rejected due to user busy signal.')
tempFailInCallRejects = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tempFailInCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: tempFailInCallRejects.setDescription('Total Number of incoming call legs rejected due to temporary failure.')
userUnavailableInCallRejects = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: userUnavailableInCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: userUnavailableInCallRejects.setDescription('Total number of incoming call legs failed due to user not available.')
abnormalReleaseInCallRejects = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: abnormalReleaseInCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: abnormalReleaseInCallRejects.setDescription('Total Number of incoming call legs rejected due to abnormal release.')
otherInCallRejects = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherInCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: otherInCallRejects.setDescription('Total Number of incoming call legs rejected due to other reasons.')
cumulativeActiveInCalls = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cumulativeActiveInCalls.setStatus('mandatory')
if mibBuilder.loadTexts: cumulativeActiveInCalls.setDescription('Cumulatvie Number of incoming call legs active so far.')
currentlyActiveInCalls = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: currentlyActiveInCalls.setStatus('mandatory')
if mibBuilder.loadTexts: currentlyActiveInCalls.setDescription('Total Number of incoming call legs currently active.')
currentlyActiveDigitalInCalls = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: currentlyActiveDigitalInCalls.setStatus('mandatory')
if mibBuilder.loadTexts: currentlyActiveDigitalInCalls.setDescription('Total Number of incoming digital call legs currently active.')
currentlyActiveAnalogInCalls = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 3, 24), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: currentlyActiveAnalogInCalls.setStatus('mandatory')
if mibBuilder.loadTexts: currentlyActiveAnalogInCalls.setDescription('Total Number of incoming analog call legs currently active.')
trunkCallOMTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1), )
if mibBuilder.loadTexts: trunkCallOMTable.setStatus('mandatory')
if mibBuilder.loadTexts: trunkCallOMTable.setDescription('The TrunkCallOMTable contains call related OMs on a trunk group basis')
trunkCallOMTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1), ).setIndexNames((0, "InternetThruway-MIB", "trunkCallOMIndex"))
if mibBuilder.loadTexts: trunkCallOMTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: trunkCallOMTableEntry.setDescription('An entry in the TrunkCallOMTable. Indexed by trunkCallOMIndex.')
trunkCallOMIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: trunkCallOMIndex.setStatus('mandatory')
if mibBuilder.loadTexts: trunkCallOMIndex.setDescription('Identifies a trunk group index.')
trunkGroupCLLI = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkGroupCLLI.setStatus('mandatory')
if mibBuilder.loadTexts: trunkGroupCLLI.setDescription('The Common Language Location Identifier(CLLI), a unique alphanumeric value to identify this Trunk Group.')
numberOfCircuits = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: numberOfCircuits.setStatus('mandatory')
if mibBuilder.loadTexts: numberOfCircuits.setDescription('Total Number of Circuits provisioned against this trunk group.')
trunkOutCallAttempts = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkOutCallAttempts.setStatus('mandatory')
if mibBuilder.loadTexts: trunkOutCallAttempts.setDescription('Total number of outgoing call legs attempted.')
trunkOutCallNormalCompletions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkOutCallNormalCompletions.setStatus('mandatory')
if mibBuilder.loadTexts: trunkOutCallNormalCompletions.setDescription('Total number of outgoing call legs completed normally.')
trunkOutCallAbnormalCompletions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkOutCallAbnormalCompletions.setStatus('mandatory')
if mibBuilder.loadTexts: trunkOutCallAbnormalCompletions.setDescription('Total number of outgoing call legs completed abnormally.')
trunkUserBusyOutCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkUserBusyOutCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: trunkUserBusyOutCallRejects.setDescription('Total Number of outgoing call legs rejected due to user busy signal.')
trunkTempFailOutCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkTempFailOutCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: trunkTempFailOutCallRejects.setDescription('Total Number of outgoing call legs rejected due to temporary failure.')
trunkUserUnavailableOutCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkUserUnavailableOutCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: trunkUserUnavailableOutCallRejects.setDescription('Total number of outgoing call legs failed due to user not available.')
trunkAbnormalReleaseOutCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkAbnormalReleaseOutCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: trunkAbnormalReleaseOutCallRejects.setDescription('Total Number of outgoing call legs rejected due to abnormal release.')
trunkOtherOutCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkOtherOutCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: trunkOtherOutCallRejects.setDescription('Total Number of outgoing call legs rejected due to other reasons.')
trunkCumulativeActiveOutCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkCumulativeActiveOutCalls.setStatus('mandatory')
if mibBuilder.loadTexts: trunkCumulativeActiveOutCalls.setDescription('Cumulatvie Number of outgoing call legs active so far.')
trunkCurrentlyActiveOutCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkCurrentlyActiveOutCalls.setStatus('mandatory')
if mibBuilder.loadTexts: trunkCurrentlyActiveOutCalls.setDescription('Total Number of outgoing call legs currently active.')
trunkCurrentlyActiveDigitalOutCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkCurrentlyActiveDigitalOutCalls.setStatus('mandatory')
if mibBuilder.loadTexts: trunkCurrentlyActiveDigitalOutCalls.setDescription('Total Number of outgoing digital call legs currently active.')
trunkCurrentlyActiveAnalogOutCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkCurrentlyActiveAnalogOutCalls.setStatus('mandatory')
if mibBuilder.loadTexts: trunkCurrentlyActiveAnalogOutCalls.setDescription('Total Number of outgoing analog call legs currently active.')
trunkInCallAttempts = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkInCallAttempts.setStatus('mandatory')
if mibBuilder.loadTexts: trunkInCallAttempts.setDescription('Total number of incoming call legs attempted.')
trunkInCallNormalCompletions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkInCallNormalCompletions.setStatus('mandatory')
if mibBuilder.loadTexts: trunkInCallNormalCompletions.setDescription('Total number of incoming call legs completed normally.')
trunkInCallAbnormalCompletions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkInCallAbnormalCompletions.setStatus('mandatory')
if mibBuilder.loadTexts: trunkInCallAbnormalCompletions.setDescription('Total number of incoming call legs completed abnormally.')
trunkUserBusyInCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkUserBusyInCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: trunkUserBusyInCallRejects.setDescription('Total Number of incoming call legs rejected due to user busy signal.')
trunkTempFailInCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkTempFailInCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: trunkTempFailInCallRejects.setDescription('Total Number of incoming call legs rejected due to temporary failure.')
trunkUserUnavailableInCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkUserUnavailableInCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: trunkUserUnavailableInCallRejects.setDescription('Total number of incoming call legs failed due to user not available.')
trunkAbnormalReleaseInCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkAbnormalReleaseInCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: trunkAbnormalReleaseInCallRejects.setDescription('Total Number of incoming call legs rejected due to abnormal release.')
trunkOtherInCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkOtherInCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: trunkOtherInCallRejects.setDescription('Total Number of incoming call legs rejected due to other reasons.')
trunkCumulativeActiveInCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 24), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkCumulativeActiveInCalls.setStatus('mandatory')
if mibBuilder.loadTexts: trunkCumulativeActiveInCalls.setDescription('Cumulatvie Number of incoming call legs active so far.')
trunkCurrentlyActiveInCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkCurrentlyActiveInCalls.setStatus('mandatory')
if mibBuilder.loadTexts: trunkCurrentlyActiveInCalls.setDescription('Total Number of incoming call legs currently active.')
trunkCurrentlyActiveDigitalInCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 26), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkCurrentlyActiveDigitalInCalls.setStatus('mandatory')
if mibBuilder.loadTexts: trunkCurrentlyActiveDigitalInCalls.setDescription('Total Number of incoming digital call legs currently active.')
trunkCurrentlyActiveAnalogInCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkCurrentlyActiveAnalogInCalls.setStatus('mandatory')
if mibBuilder.loadTexts: trunkCurrentlyActiveAnalogInCalls.setDescription('Total Number of incoming analog call legs currently active.')
trunkAllActiveCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 28), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkAllActiveCalls.setStatus('mandatory')
if mibBuilder.loadTexts: trunkAllActiveCalls.setDescription('Total number of currently active call legs (all type).')
trunkOccupancyPerCCS = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 29), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trunkOccupancyPerCCS.setStatus('mandatory')
if mibBuilder.loadTexts: trunkOccupancyPerCCS.setDescription('Trunk occupancy in Centum Call Seconds.')
trafficInCCSs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trafficInCCSs.setStatus('mandatory')
if mibBuilder.loadTexts: trafficInCCSs.setDescription('Scanned om for tgms that are call busy')
trafficInCCSIncomings = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trafficInCCSIncomings.setStatus('mandatory')
if mibBuilder.loadTexts: trafficInCCSIncomings.setDescription('Scanned Om on tgms with an incoming call.')
localBusyInCCSs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 32), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: localBusyInCCSs.setStatus('mandatory')
if mibBuilder.loadTexts: localBusyInCCSs.setDescription('Scanned om for tgms that are locally blocked.')
remoteBusyInCCSs = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 4, 1, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: remoteBusyInCCSs.setStatus('mandatory')
if mibBuilder.loadTexts: remoteBusyInCCSs.setDescription('Scanned om for tgms that are remoteley blocked.')
nasCallOMTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1), )
if mibBuilder.loadTexts: nasCallOMTable.setStatus('mandatory')
if mibBuilder.loadTexts: nasCallOMTable.setDescription('The NasCallOMTable contains call related OMs on a nas basis')
nasCallOMTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1), ).setIndexNames((0, "InternetThruway-MIB", "nasCallOMIndex"))
if mibBuilder.loadTexts: nasCallOMTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nasCallOMTableEntry.setDescription('An entry in the NasCallOMTable. Indexed by nasCallOMIndex.')
nasCallOMIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: nasCallOMIndex.setStatus('mandatory')
if mibBuilder.loadTexts: nasCallOMIndex.setDescription('Identifies a nas Call OM .')
nasName1 = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasName1.setStatus('mandatory')
if mibBuilder.loadTexts: nasName1.setDescription('A unique alphanumeric value to identify this Nas.')
numberOfPorts = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: numberOfPorts.setStatus('mandatory')
if mibBuilder.loadTexts: numberOfPorts.setDescription('Total Number of Ports provisioned against this nas.')
nasOutCallAttempts = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasOutCallAttempts.setStatus('mandatory')
if mibBuilder.loadTexts: nasOutCallAttempts.setDescription('Total number of outgoing call legs attempted.')
nasOutCallNormalCompletions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasOutCallNormalCompletions.setStatus('mandatory')
if mibBuilder.loadTexts: nasOutCallNormalCompletions.setDescription('Total number of outgoing call legs completed normally.')
nasOutCallAbnormalCompletions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasOutCallAbnormalCompletions.setStatus('mandatory')
if mibBuilder.loadTexts: nasOutCallAbnormalCompletions.setDescription('Total number of outgoing call legs completed abnormally.')
nasUserBusyOutCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasUserBusyOutCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: nasUserBusyOutCallRejects.setDescription('Total Number of outgoing call legs rejected due to user busy signal.')
nasTempFailOutCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasTempFailOutCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: nasTempFailOutCallRejects.setDescription('Total Number of outgoing call legs rejected due to temporary failure.')
nasUserUnavailableOutCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasUserUnavailableOutCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: nasUserUnavailableOutCallRejects.setDescription('Total number of outgoing call legs failed due to user not available.')
nasAbnormalReleaseOutCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasAbnormalReleaseOutCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: nasAbnormalReleaseOutCallRejects.setDescription('Total Number of outgoing call legs rejected due to abnormal release.')
nasOtherOutCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasOtherOutCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: nasOtherOutCallRejects.setDescription('Total Number of outgoing call legs rejected due to other reasons.')
nasCumulativeActiveOutCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasCumulativeActiveOutCalls.setStatus('mandatory')
if mibBuilder.loadTexts: nasCumulativeActiveOutCalls.setDescription('Cumulatvie Number of outgoing call legs active so far.')
nasCurrentlyActiveOutCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasCurrentlyActiveOutCalls.setStatus('mandatory')
if mibBuilder.loadTexts: nasCurrentlyActiveOutCalls.setDescription('Total Number of outgoing call legs currently active.')
nasCurrentlyActiveDigitalOutCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasCurrentlyActiveDigitalOutCalls.setStatus('mandatory')
if mibBuilder.loadTexts: nasCurrentlyActiveDigitalOutCalls.setDescription('Total Number of outgoing digital call legs currently active.')
nasCurrentlyActiveAnalogOutCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasCurrentlyActiveAnalogOutCalls.setStatus('mandatory')
if mibBuilder.loadTexts: nasCurrentlyActiveAnalogOutCalls.setDescription('Total Number of outgoing analog call legs currently active.')
nasInCallAttempts = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasInCallAttempts.setStatus('mandatory')
if mibBuilder.loadTexts: nasInCallAttempts.setDescription('Total number of incoming call legs attempted.')
nasInCallNormalCompletions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasInCallNormalCompletions.setStatus('mandatory')
if mibBuilder.loadTexts: nasInCallNormalCompletions.setDescription('Total number of incoming call legs completed normally.')
nasInCallAbnormalCompletions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasInCallAbnormalCompletions.setStatus('mandatory')
if mibBuilder.loadTexts: nasInCallAbnormalCompletions.setDescription('Total number of incoming call legs completed abnormally.')
nasUserBusyInCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasUserBusyInCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: nasUserBusyInCallRejects.setDescription('Total Number of incoming call legs rejected due to user busy signal.')
nasTempFailInCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasTempFailInCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: nasTempFailInCallRejects.setDescription('Total Number of incoming call legs rejected due to temporary failure.')
nasUserUnavailableInCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasUserUnavailableInCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: nasUserUnavailableInCallRejects.setDescription('Total number of incoming call legs failed due to user not available.')
nasAbnormalReleaseInCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasAbnormalReleaseInCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: nasAbnormalReleaseInCallRejects.setDescription('Total Number of incoming call legs rejected due to abnormal release.')
nasOtherInCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasOtherInCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: nasOtherInCallRejects.setDescription('Total Number of incoming call legs rejected due to other reasons.')
nasCumulativeActiveInCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 24), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasCumulativeActiveInCalls.setStatus('mandatory')
if mibBuilder.loadTexts: nasCumulativeActiveInCalls.setDescription('Cumulatvie Number of incoming call legs active so far.')
nasCurrentlyActiveInCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasCurrentlyActiveInCalls.setStatus('mandatory')
if mibBuilder.loadTexts: nasCurrentlyActiveInCalls.setDescription('Total Number of incoming call legs currently active.')
nasCurrentlyActiveDigitalInCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 26), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasCurrentlyActiveDigitalInCalls.setStatus('mandatory')
if mibBuilder.loadTexts: nasCurrentlyActiveDigitalInCalls.setDescription('Total Number of incoming digital call legs currently active.')
nasCurrentlyActiveAnalogInCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasCurrentlyActiveAnalogInCalls.setStatus('mandatory')
if mibBuilder.loadTexts: nasCurrentlyActiveAnalogInCalls.setDescription('Total Number of incoming analog call legs currently active.')
nasAllActiveCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 28), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasAllActiveCalls.setStatus('mandatory')
if mibBuilder.loadTexts: nasAllActiveCalls.setDescription('Total number of currently active call legs (all type).')
nasMaxPortsUsed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 29), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasMaxPortsUsed.setStatus('mandatory')
if mibBuilder.loadTexts: nasMaxPortsUsed.setDescription('Maximum number of ports used in this nas since the last system restart.')
nasMinPortsUsed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 30), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasMinPortsUsed.setStatus('mandatory')
if mibBuilder.loadTexts: nasMinPortsUsed.setDescription('Minimum number of ports used in this nas since the last system restart.')
nasCurrentlyInUsePorts = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 7, 1, 1, 31), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nasCurrentlyInUsePorts.setStatus('mandatory')
if mibBuilder.loadTexts: nasCurrentlyInUsePorts.setDescription('Number of ports currently in use.')
phoneCallOMTable = MibTable((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 5, 1), )
if mibBuilder.loadTexts: phoneCallOMTable.setStatus('mandatory')
if mibBuilder.loadTexts: phoneCallOMTable.setDescription('The PhoneCallOMTable contains call related OMs on a phone number basis')
phoneCallOMTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 5, 1, 1), ).setIndexNames((0, "InternetThruway-MIB", "phoneCallOMIndex"))
if mibBuilder.loadTexts: phoneCallOMTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: phoneCallOMTableEntry.setDescription('An entry in the PhoneCallOMTable. Indexed by phoneGroupIndex.')
phoneCallOMIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 5, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: phoneCallOMIndex.setStatus('mandatory')
if mibBuilder.loadTexts: phoneCallOMIndex.setDescription('Uniquely identifies an entry in this table.')
phoneNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 5, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: phoneNumber.setStatus('mandatory')
if mibBuilder.loadTexts: phoneNumber.setDescription('Phone number for the underlying Call OM record.')
phoneDialCallAttempts = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 5, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: phoneDialCallAttempts.setStatus('mandatory')
if mibBuilder.loadTexts: phoneDialCallAttempts.setDescription('Total number of dial calls attempted.')
phoneDialCallNormalCompletions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 5, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: phoneDialCallNormalCompletions.setStatus('mandatory')
if mibBuilder.loadTexts: phoneDialCallNormalCompletions.setDescription('Total number of dial calls completed normally.')
phoneDialCallAbnormalCompletions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 5, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: phoneDialCallAbnormalCompletions.setStatus('mandatory')
if mibBuilder.loadTexts: phoneDialCallAbnormalCompletions.setDescription('Total number of dial calls completed abnormally.')
phoneUserBusyDialCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 5, 1, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: phoneUserBusyDialCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: phoneUserBusyDialCallRejects.setDescription('Total Number of dial calls rejected due to user busy signal.')
phoneTempFailDialCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 5, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: phoneTempFailDialCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: phoneTempFailDialCallRejects.setDescription('Total Number of dial calls rejected due to temporary failure.')
phoneUserUnavailableDialCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 5, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: phoneUserUnavailableDialCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: phoneUserUnavailableDialCallRejects.setDescription('Total number of dial calls failed due to user not available.')
phoneAbnormalReleaseDialCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 5, 1, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: phoneAbnormalReleaseDialCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: phoneAbnormalReleaseDialCallRejects.setDescription('Total Number of dial calls rejected due to abnormal release.')
phoneOtherDialCallRejects = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 5, 1, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: phoneOtherDialCallRejects.setStatus('mandatory')
if mibBuilder.loadTexts: phoneOtherDialCallRejects.setDescription('Total Number of dial calls rejected due to other reasons.')
phoneCumulativeActiveDialCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 5, 1, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: phoneCumulativeActiveDialCalls.setStatus('mandatory')
if mibBuilder.loadTexts: phoneCumulativeActiveDialCalls.setDescription('Cumulatvie Number of dial calls active so far.')
phoneCurrentlyActiveDialCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 5, 1, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: phoneCurrentlyActiveDialCalls.setStatus('mandatory')
if mibBuilder.loadTexts: phoneCurrentlyActiveDialCalls.setDescription('Total Number of dial calls currently active.')
phoneCurrentlyActiveDigitalDialCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 5, 1, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: phoneCurrentlyActiveDigitalDialCalls.setStatus('mandatory')
if mibBuilder.loadTexts: phoneCurrentlyActiveDigitalDialCalls.setDescription('Total Number of digital dial calls currently active.')
phoneCurrentlyActiveAnalogDialCalls = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 5, 1, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: phoneCurrentlyActiveAnalogDialCalls.setStatus('mandatory')
if mibBuilder.loadTexts: phoneCurrentlyActiveAnalogDialCalls.setDescription('Total Number of analog dial calls currently active.')
csgComplexCLLI = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: csgComplexCLLI.setStatus('mandatory')
if mibBuilder.loadTexts: csgComplexCLLI.setDescription('A unique identifier of the CSG Complex.')
serverHostName = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: serverHostName.setStatus('mandatory')
if mibBuilder.loadTexts: serverHostName.setDescription('Host Name of this server.')
serverIpAddress = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: serverIpAddress.setStatus('mandatory')
if mibBuilder.loadTexts: serverIpAddress.setDescription('IP address of this server.')
serverCLLI = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: serverCLLI.setStatus('mandatory')
if mibBuilder.loadTexts: serverCLLI.setDescription('A unique identifier of this server (common in the telco world).')
mateServerHostName = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mateServerHostName.setStatus('mandatory')
if mibBuilder.loadTexts: mateServerHostName.setDescription('Host Name of this server.')
mateServerIpAddress = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 6), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mateServerIpAddress.setStatus('mandatory')
if mibBuilder.loadTexts: mateServerIpAddress.setDescription('IP address of this server.')
serverMemSize = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: serverMemSize.setStatus('mandatory')
if mibBuilder.loadTexts: serverMemSize.setDescription('Memory size in mega bytes of this server.')
provisionedDPCs = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: provisionedDPCs.setStatus('mandatory')
if mibBuilder.loadTexts: provisionedDPCs.setDescription('Number of destination point codes provisioned.')
provisionedCircuits = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: provisionedCircuits.setStatus('mandatory')
if mibBuilder.loadTexts: provisionedCircuits.setDescription('Number of circuits provisioned.')
inserviceCircuits = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: inserviceCircuits.setStatus('mandatory')
if mibBuilder.loadTexts: inserviceCircuits.setDescription('Number of circuits in service. This number goes up or down at any given time.')
provisionedNASes = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: provisionedNASes.setStatus('mandatory')
if mibBuilder.loadTexts: provisionedNASes.setDescription('Number of NASes provisioned.')
aliveNASes = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: aliveNASes.setStatus('mandatory')
if mibBuilder.loadTexts: aliveNASes.setDescription('Number of NASes known to be alive. This number goes up or down at any given time.')
inserviceNASes = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: inserviceNASes.setStatus('mandatory')
if mibBuilder.loadTexts: inserviceNASes.setDescription('Number of NASes in service. This number goes up or down at any given time.')
provsionedCards = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: provsionedCards.setStatus('mandatory')
if mibBuilder.loadTexts: provsionedCards.setDescription('Number of NAS cards provisioned.')
inserviceCards = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 16), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: inserviceCards.setStatus('mandatory')
if mibBuilder.loadTexts: inserviceCards.setDescription('Number of NAS cards in service. This number goes up or down at any given time.')
provisionedPorts = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 17), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: provisionedPorts.setStatus('mandatory')
if mibBuilder.loadTexts: provisionedPorts.setDescription('Number of ports provisioned.')
inservicePorts = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 18), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: inservicePorts.setStatus('mandatory')
if mibBuilder.loadTexts: inservicePorts.setDescription('Number of ports in service. This number goes up or down at any given time.')
userCPUUsage = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 19), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: userCPUUsage.setStatus('mandatory')
if mibBuilder.loadTexts: userCPUUsage.setDescription('Percentage of CPU used in user domain. Survman computes this value in every 600 seconds. The value stored in the MIB will be the last one computed.')
systemCPUUsage = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 20), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemCPUUsage.setStatus('mandatory')
if mibBuilder.loadTexts: systemCPUUsage.setDescription('Percentage of CPU used in system domain in this server. Survman computes this value in every 600 seconds. The value stored in the MIB will be the last one computed.')
totalCPUUsage = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 21), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: totalCPUUsage.setStatus('mandatory')
if mibBuilder.loadTexts: totalCPUUsage.setDescription('Percentage of CPU used in total in this server Survman computes this value in every 600 seconds. The value stored in the MIB will be the last one computed.')
maxCPUUsage = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: maxCPUUsage.setStatus('mandatory')
if mibBuilder.loadTexts: maxCPUUsage.setDescription('High water measurement. Maximum CPU Usage (%) in this server. Survman computes this value in every 600 seconds. The value stored in the MIB will be the last one computed.')
avgLoad = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 23), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: avgLoad.setStatus('mandatory')
if mibBuilder.loadTexts: avgLoad.setDescription('Average CPU load factor in this server. Survman computes this value in every 600 seconds. The value stored in the MIB will be the last one computed.')
systemCallRate = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 24), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemCallRate.setStatus('mandatory')
if mibBuilder.loadTexts: systemCallRate.setDescription('System Call rate (per second) in this Cserver. Survman computes this value in every 600 seconds. The value stored in the MIB will be the one computed the last time.')
contextSwitchRate = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 25), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: contextSwitchRate.setStatus('mandatory')
if mibBuilder.loadTexts: contextSwitchRate.setDescription('Process context switching rate (per second) in this server. Survman computes this value in every 600 seconds. The value stored in the MIB will be the one computed the last time.')
lastUpdateOMFile = MibScalar((1, 3, 6, 1, 4, 1, 562, 14, 2, 7, 6, 26), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lastUpdateOMFile.setStatus('mandatory')
if mibBuilder.loadTexts: lastUpdateOMFile.setDescription('Name of the last updated OM file.')
mibBuilder.exportSymbols("InternetThruway-MIB", cplxMateStandbyState=cplxMateStandbyState, icTimeStamp=icTimeStamp, linkAlignLinkSet=linkAlignLinkSet, nasUserUnavailableInCallRejects=nasUserUnavailableInCallRejects, phoneCallOMIndex=phoneCallOMIndex, linkOMs=linkOMs, hgIPAddress=hgIPAddress, ss7MTP2TrunkFailureAlarmTableEntry=ss7MTP2TrunkFailureAlarmTableEntry, componentIndex=componentIndex, lsIPAddress=lsIPAddress, RouteState=RouteState, ncLostServerTrap=ncLostServerTrap, LinksetState=LinksetState, nasCallOMTableEntry=nasCallOMTableEntry, trunkOccupancyPerCCS=trunkOccupancyPerCCS, routeIndex=routeIndex, destCongestPointcode=destCongestPointcode, userBusyOutCallRejects=userBusyOutCallRejects, componentTable=componentTable, routeTable=routeTable, ss7DestinationAccessible=ss7DestinationAccessible, ss7LinkCongestionAlarmTableEntry=ss7LinkCongestionAlarmTableEntry, partitionSpaceTimeStamp=partitionSpaceTimeStamp, lcKey=lcKey, nasCurrentlyActiveDigitalOutCalls=nasCurrentlyActiveDigitalOutCalls, destInaccessPointcode=destInaccessPointcode, trunkUserUnavailableOutCallRejects=trunkUserUnavailableOutCallRejects, LinkAlignmentState=LinkAlignmentState, trunkOtherOutCallRejects=trunkOtherOutCallRejects, ss7ISUPFailureAlarmTableEntry=ss7ISUPFailureAlarmTableEntry, trunkOtherInCallRejects=trunkOtherInCallRejects, lcLinkSet=lcLinkSet, lsKey=lsKey, ss7FEPCongestionWarning=ss7FEPCongestionWarning, ss7BEPCongestionWarning=ss7BEPCongestionWarning, provisionedPorts=provisionedPorts, nasUserUnavailableOutCallRejects=nasUserUnavailableOutCallRejects, phoneDialCallNormalCompletions=phoneDialCallNormalCompletions, ss7DestinationCongestedAlarm=ss7DestinationCongestedAlarm, cplxMateAvailabilityState=cplxMateAvailabilityState, totalCPUUsage=totalCPUUsage, provsionedCards=provsionedCards, compDebugStatus=compDebugStatus, provisionedNASes=provisionedNASes, trafficInCCSs=trafficInCCSs, currentlyActiveInCalls=currentlyActiveInCalls, cplxMateOperationalState=cplxMateOperationalState, nasStatusAlarm=nasStatusAlarm, nasAlarmTableEntry=nasAlarmTableEntry, PartitionSpaceStatus=PartitionSpaceStatus, linksetTableEntry=linksetTableEntry, rSATimerExpiries=rSATimerExpiries, mtp2CardId=mtp2CardId, compStateAlarm=compStateAlarm, mtp2Key=mtp2Key, ss7DestinationInaccessibleAlarmTable=ss7DestinationInaccessibleAlarmTable, nasIPAddress=nasIPAddress, inserviceNASes=inserviceNASes, linkOMTableEntry=linkOMTableEntry, nasUserBusyInCallRejects=nasUserBusyInCallRejects, lcIPAddress=lcIPAddress, hgName=hgName, phoneNumberOMs=phoneNumberOMs, linkNumSIFReceived=linkNumSIFReceived, numberOfPorts=numberOfPorts, lsName=lsName, lfCardId=lfCardId, icIndex=icIndex, provisionedDPCs=provisionedDPCs, lfIPAddress=lfIPAddress, lostServerAlarmTableEntry=lostServerAlarmTableEntry, mateServerIpAddress=mateServerIpAddress, cumulativeActiveOutCalls=cumulativeActiveOutCalls, nasCurrentlyActiveOutCalls=nasCurrentlyActiveOutCalls, nasInCallAbnormalCompletions=nasInCallAbnormalCompletions, lsFailureTimeStamp=lsFailureTimeStamp, alarmStatusInt1=alarmStatusInt1, csgComplexStateTrapClear=csgComplexStateTrapClear, partitionPercentFull=partitionPercentFull, systemCPUUsage=systemCPUUsage, destPointCode=destPointCode, destInaccessIndex=destInaccessIndex, nasTempFailInCallRejects=nasTempFailInCallRejects, destinationTable=destinationTable, destinationTableEntry=destinationTableEntry, trunkGroupCLLI=trunkGroupCLLI, nasName=nasName, TimeString=TimeString, currentlyActiveDigitalInCalls=currentlyActiveDigitalInCalls, linksetTable=linksetTable, cplxLocEthernetName=cplxLocEthernetName, genericWarning=genericWarning, phoneDialCallAttempts=phoneDialCallAttempts, ss7MTP2TrunkFailureAlarm=ss7MTP2TrunkFailureAlarm, compRestartKey=compRestartKey, linkAlignIPAddress=linkAlignIPAddress, nasCurrentlyActiveInCalls=nasCurrentlyActiveInCalls, linkFailures=linkFailures, ss7MTP3CongestionCritical=ss7MTP3CongestionCritical, contextSwitchRate=contextSwitchRate, nasCumulativeActiveOutCalls=nasCumulativeActiveOutCalls, compDebugKey=compDebugKey, rLCTimerExpiries=rLCTimerExpiries, routeId=routeId, userUnavailableInCallRejects=userUnavailableInCallRejects, outCallNormalCompletions=outCallNormalCompletions, linkHostname=linkHostname, nasCallOMTable=nasCallOMTable, compProvStateStatus=compProvStateStatus, phoneOtherDialCallRejects=phoneOtherDialCallRejects, cisRetrievalFailureTrapMajor=cisRetrievalFailureTrapMajor, maintenanceOMs=maintenanceOMs, trunkOutCallAttempts=trunkOutCallAttempts, phoneNumber=phoneNumber, icName=icName, ncSoftwareVersion=ncSoftwareVersion, linkIndex=linkIndex, ss7DestinationCongestedAlarmTableEntry=ss7DestinationCongestedAlarmTableEntry, ifTimeStamp=ifTimeStamp, partitionSpaceStatus=partitionSpaceStatus, linkCardDeviceName=linkCardDeviceName, maxCPUUsage=maxCPUUsage, mateServerHostName=mateServerHostName, linkNumMSUDiscarded=linkNumMSUDiscarded, inCallAbnormalCompletions=inCallAbnormalCompletions, ncServerId=ncServerId, serverCLLI=serverCLLI, inservicePorts=inservicePorts, ncEthernetName=ncEthernetName, nasMaxPortsUsed=nasMaxPortsUsed, lsTimeStamp=lsTimeStamp, ss7LinkAlignmentFailureClear=ss7LinkAlignmentFailureClear, phoneCurrentlyActiveDialCalls=phoneCurrentlyActiveDialCalls, phoneUserUnavailableDialCallRejects=phoneUserUnavailableDialCallRejects, csg=csg, ncFoundServerTrap=ncFoundServerTrap, systemOMs=systemOMs, ncClusterIP=ncClusterIP, compSecsInCurrentState=compSecsInCurrentState, abnormalReleaseInCallRejects=abnormalReleaseInCallRejects, nasCumulativeActiveInCalls=nasCumulativeActiveInCalls, ncAvailabilityState=ncAvailabilityState, inserviceCards=inserviceCards, trunkCumulativeActiveOutCalls=trunkCumulativeActiveOutCalls, linkAlignTimeStamp=linkAlignTimeStamp, hgAlarmTableEntry=hgAlarmTableEntry, trunkUserBusyInCallRejects=trunkUserBusyInCallRejects, csgComplexCLLI=csgComplexCLLI, linkAlignLinkCode=linkAlignLinkCode, destState=destState, ifIndex=ifIndex, ss7LinksetFailureAlarmTable=ss7LinksetFailureAlarmTable, uBATimerExpiries=uBATimerExpiries, ss7DestinationCongestedAlarmTable=ss7DestinationCongestedAlarmTable, alarmMaskInt1=alarmMaskInt1, lfKey=lfKey, lastUpdateOMFile=lastUpdateOMFile, linkAlignCardId=linkAlignCardId, genericNormal=genericNormal, lfLinkCode=lfLinkCode, lcTimeStamp=lcTimeStamp, nasStatusClear=nasStatusClear, currentlyActiveDigitalOutCalls=currentlyActiveDigitalOutCalls, LinkCongestionState=LinkCongestionState, nasKey=nasKey, cplxLocOperationalState=cplxLocOperationalState, linkNumMSUTransmitted=linkNumMSUTransmitted, linkCongestions=linkCongestions, ncStandbyState=ncStandbyState, ss7ISUPCongestionAlarmTable=ss7ISUPCongestionAlarmTable, nasOtherOutCallRejects=nasOtherOutCallRejects, linkInhibitionState=linkInhibitionState, genericMinor=genericMinor, hgAlarmTable=hgAlarmTable, ncOperationalState=ncOperationalState, phoneCurrentlyActiveAnalogDialCalls=phoneCurrentlyActiveAnalogDialCalls, trunkUserUnavailableInCallRejects=trunkUserUnavailableInCallRejects, UpgradeInProgress=UpgradeInProgress, alarms=alarms, compDebugTimeStamp=compDebugTimeStamp, cplxMateEthernetIP=cplxMateEthernetIP, trunkCallOMIndex=trunkCallOMIndex, lfName=lfName, userBusyInCallRejects=userBusyInCallRejects, linkRemoteProcOutages=linkRemoteProcOutages, trapGenericStr1=trapGenericStr1, linkAlignKey=linkAlignKey, genericCritical=genericCritical, abnormalReleaseOutCallRejects=abnormalReleaseOutCallRejects, ncServer=ncServer, compProvStateTimeStamp=compProvStateTimeStamp, ss7LinkAlignmentAlarmTableEntry=ss7LinkAlignmentAlarmTableEntry, mtp3Name=mtp3Name, destCongestKey=destCongestKey, hgStatusClear=hgStatusClear, trapName=trapName, userCPUUsage=userCPUUsage, linkOMTable=linkOMTable, ss7ISUPFailureAlarm=ss7ISUPFailureAlarm, ss7MTP3CongestionMinor=ss7MTP3CongestionMinor, partitionIndex=partitionIndex, genericMajor=genericMajor, lcLinkCode=lcLinkCode, alarmMaskInt2=alarmMaskInt2, ncStateChangeTrap=ncStateChangeTrap, ss7MTP3CongestionAlarmTable=ss7MTP3CongestionAlarmTable, remoteBusyInCCSs=remoteBusyInCCSs, csgComplexStateTrapInfo=csgComplexStateTrapInfo, aliveNASes=aliveNASes, destCongestIPAddress=destCongestIPAddress, trunkGroupOMs=trunkGroupOMs, otherOutCallRejects=otherOutCallRejects, lsFailurePointcode=lsFailurePointcode, trapFileName=trapFileName, ss7LinkAlignmentAlarmTable=ss7LinkAlignmentAlarmTable, destIndex=destIndex, destCongestName=destCongestName, nasCurrentlyInUsePorts=nasCurrentlyInUsePorts, systemCallRate=systemCallRate, mtp2TimeStamp=mtp2TimeStamp, linkNumUnexpectedMsgs=linkNumUnexpectedMsgs, trapCompName=trapCompName, linkNumSIFTransmitted=linkNumSIFTransmitted, ncEthernetIP=ncEthernetIP, nortel=nortel, tempFailOutCallRejects=tempFailOutCallRejects, inserviceCircuits=inserviceCircuits, destInaccessIPAddress=destInaccessIPAddress, linksetState=linksetState, cplxLocAvailabilityState=cplxLocAvailabilityState, nasOutCallAbnormalCompletions=nasOutCallAbnormalCompletions, ss7LinkFailureAlarmTable=ss7LinkFailureAlarmTable, ss7LinkCongestionAlarm=ss7LinkCongestionAlarm, restartStateClear=restartStateClear, alarmStatusInt2=alarmStatusInt2, trunkCurrentlyActiveDigitalInCalls=trunkCurrentlyActiveDigitalInCalls, ss7ISUPCongestionClear=ss7ISUPCongestionClear, lfIndex=lfIndex, linkTableEntry=linkTableEntry, mtp2Name=mtp2Name, mtp3IPAddress=mtp3IPAddress, ncUpgradeInProgress=ncUpgradeInProgress, nasOutCallAttempts=nasOutCallAttempts, lfLinkSet=lfLinkSet, provisionedCircuits=provisionedCircuits, partitionTable=partitionTable, ss7LinkCongestionAlarmTable=ss7LinkCongestionAlarmTable, serverMemSize=serverMemSize, ss7LinkFailureClear=ss7LinkFailureClear, trunkInCallAttempts=trunkInCallAttempts, mtp2Index=mtp2Index, trapIdKey=trapIdKey, phoneCallOMTableEntry=phoneCallOMTableEntry, ss7LinksetFailureAlarm=ss7LinksetFailureAlarm)
mibBuilder.exportSymbols("InternetThruway-MIB", icIPAddress=icIPAddress, trunkCumulativeActiveInCalls=trunkCumulativeActiveInCalls, lfTimeStamp=lfTimeStamp, ss7LinkFailureAlarm=ss7LinkFailureAlarm, partitionMegsFree=partitionMegsFree, compStateClear=compStateClear, lsFailureIndex=lsFailureIndex, cumulativeActiveInCalls=cumulativeActiveInCalls, ss7LinksetFailureClear=ss7LinksetFailureClear, linksetId=linksetId, linkOMSetId=linkOMSetId, hgKey=hgKey, csgComplexStateTrapCritical=csgComplexStateTrapCritical, linkNumMSUReceived=linkNumMSUReceived, ss7LinksetFailureAlarmTableEntry=ss7LinksetFailureAlarmTableEntry, partitionName=partitionName, icKey=icKey, ss7MTP3CongestionMajor=ss7MTP3CongestionMajor, icCongestionLevel=icCongestionLevel, trunkCurrentlyActiveOutCalls=trunkCurrentlyActiveOutCalls, avgLoad=avgLoad, compDebugOff=compDebugOff, nasCurrentlyActiveDigitalInCalls=nasCurrentlyActiveDigitalInCalls, destCongestIndex=destCongestIndex, restartStateAlarm=restartStateAlarm, trunkInCallAbnormalCompletions=trunkInCallAbnormalCompletions, trunkAbnormalReleaseInCallRejects=trunkAbnormalReleaseInCallRejects, linksetIndex=linksetIndex, mtp2IPAddress=mtp2IPAddress, ifIPAddress=ifIPAddress, lsFailureName=lsFailureName, nasAlarmTimeStamp=nasAlarmTimeStamp, trafficInCCSIncomings=trafficInCCSIncomings, nasTempFailOutCallRejects=nasTempFailOutCallRejects, routeState=routeState, DestinationState=DestinationState, linkInhibits=linkInhibits, compRestartTimeStamp=compRestartTimeStamp, ss7MTP3CongestionClear=ss7MTP3CongestionClear, nasInCallNormalCompletions=nasInCallNormalCompletions, MTP2AlarmConditionType=MTP2AlarmConditionType, linkId=linkId, ss7ISUPFailureClear=ss7ISUPFailureClear, componentName=componentName, lcCardId=lcCardId, nasOMs=nasOMs, disk=disk, nasIndex=nasIndex, trunkCurrentlyActiveAnalogOutCalls=trunkCurrentlyActiveAnalogOutCalls, ncClusterName=ncClusterName, trunkCurrentlyActiveDigitalOutCalls=trunkCurrentlyActiveDigitalOutCalls, routeDestPointCode=routeDestPointCode, LinkState=LinkState, nasAlarmTable=nasAlarmTable, destCongestTimeStamp=destCongestTimeStamp, cplxAlarmStatus=cplxAlarmStatus, lsIndex=lsIndex, ss7=ss7, nasAbnormalReleaseOutCallRejects=nasAbnormalReleaseOutCallRejects, currentlyActiveOutCalls=currentlyActiveOutCalls, ComponentIndex=ComponentIndex, hgIndex=hgIndex, lostServerAlarmTable=lostServerAlarmTable, localBusyInCCSs=localBusyInCCSs, currentlyActiveAnalogOutCalls=currentlyActiveAnalogOutCalls, ss7LinkCongestionClear=ss7LinkCongestionClear, ss7DestinationCongestedClear=ss7DestinationCongestedClear, mtp3CongestionLevel=mtp3CongestionLevel, callOMs=callOMs, tempFailInCallRejects=tempFailInCallRejects, lcIndex=lcIndex, trunkOutCallAbnormalCompletions=trunkOutCallAbnormalCompletions, phoneUserBusyDialCallRejects=phoneUserBusyDialCallRejects, ss7ISUPCongestionAlarm=ss7ISUPCongestionAlarm, linkAlignIndex=linkAlignIndex, inCallNormalCompletions=inCallNormalCompletions, ifName=ifName, currentlyActiveAnalogInCalls=currentlyActiveAnalogInCalls, routeRank=routeRank, phoneDialCallAbnormalCompletions=phoneDialCallAbnormalCompletions, phoneTempFailDialCallRejects=phoneTempFailDialCallRejects, otherInCallRejects=otherInCallRejects, routeTableEntry=routeTableEntry, trapDate=trapDate, userUnavailableOutCallRejects=userUnavailableOutCallRejects, trapIPAddress=trapIPAddress, cplxMateEthernetName=cplxMateEthernetName, phoneCallOMTable=phoneCallOMTable, serverIpAddress=serverIpAddress, trunkTempFailOutCallRejects=trunkTempFailOutCallRejects, compRestartStatus=compRestartStatus, nasOutCallNormalCompletions=nasOutCallNormalCompletions, ss7DestinationInaccessible=ss7DestinationInaccessible, bLATimerExpiries=bLATimerExpiries, trunkAllActiveCalls=trunkAllActiveCalls, destInaccessName=destInaccessName, system=system, nasOtherInCallRejects=nasOtherInCallRejects, cplxName=cplxName, trunkUserBusyOutCallRejects=trunkUserBusyOutCallRejects, nasInCallAttempts=nasInCallAttempts, lcName=lcName, nasCurrentlyActiveAnalogOutCalls=nasCurrentlyActiveAnalogOutCalls, dialaccess=dialaccess, trapTimeStamp=trapTimeStamp, trunkCurrentlyActiveInCalls=trunkCurrentlyActiveInCalls, linkNumAutoChangeovers=linkNumAutoChangeovers, diskSpaceClear=diskSpaceClear, omData=omData, linkAlignName=linkAlignName, nasName1=nasName1, ss7LinkFailureAlarmTableEntry=ss7LinkFailureAlarmTableEntry, etherCardTrapMajor=etherCardTrapMajor, LinkInhibitionState=LinkInhibitionState, components=components, linkCongestionState=linkCongestionState, ss7MTP3CongestionAlarmTableEntry=ss7MTP3CongestionAlarmTableEntry, destInaccessKey=destInaccessKey, trunkCallOMTable=trunkCallOMTable, alarmStatusInt3=alarmStatusInt3, ss7ISUPCongestionAlarmTableEntry=ss7ISUPCongestionAlarmTableEntry, ifKey=ifKey, serverHostName=serverHostName, compProvStateKey=compProvStateKey, nasMinPortsUsed=nasMinPortsUsed, etherCardTrapCritical=etherCardTrapCritical, ComponentSysmanState=ComponentSysmanState, trunkCurrentlyActiveAnalogInCalls=trunkCurrentlyActiveAnalogInCalls, nasAbnormalReleaseInCallRejects=nasAbnormalReleaseInCallRejects, ss7ISUPFailureAlarmTable=ss7ISUPFailureAlarmTable, mtp2AlarmCondition=mtp2AlarmCondition, trunkOutCallNormalCompletions=trunkOutCallNormalCompletions, etherCardTrapClear=etherCardTrapClear, linkTransmittedMSUs=linkTransmittedMSUs, cplxLocEthernetIP=cplxLocEthernetIP, traps=traps, ncServerName=ncServerName, phoneCumulativeActiveDialCalls=phoneCumulativeActiveDialCalls, partitionTableEntry=partitionTableEntry, linkOMId=linkOMId, csgComplexStateTrapMajor=csgComplexStateTrapMajor, ncHostName=ncHostName, numberOfCircuits=numberOfCircuits, linkTable=linkTable, ss7MTP2TrunkFailureAlarmTable=ss7MTP2TrunkFailureAlarmTable, trunkInCallNormalCompletions=trunkInCallNormalCompletions, linkAlignmentState=linkAlignmentState, outCallAbnormalCompletions=outCallAbnormalCompletions, nasCallOMIndex=nasCallOMIndex, phoneAbnormalReleaseDialCallRejects=phoneAbnormalReleaseDialCallRejects, destRuleId=destRuleId, nasCmplxName=nasCmplxName, lsFailureIPAddress=lsFailureIPAddress, partitionSpaceKey=partitionSpaceKey, ss7DestinationInaccessibleAlarmTableEntry=ss7DestinationInaccessibleAlarmTableEntry, hgStatusAlarm=hgStatusAlarm, inCallAttempts=inCallAttempts, linkState=linkState, ss7MTP2TrunkFailureClear=ss7MTP2TrunkFailureClear, nasAllActiveCalls=nasAllActiveCalls, compDebugOn=compDebugOn, destCongestCongestionLevel=destCongestCongestionLevel, mtp3Key=mtp3Key, linkReceivedMSUs=linkReceivedMSUs, ss7LinkAlignmentFailureAlarm=ss7LinkAlignmentFailureAlarm, linksetAdjPointcode=linksetAdjPointcode, routeLinksetId=routeLinksetId, phoneCurrentlyActiveDigitalDialCalls=phoneCurrentlyActiveDigitalDialCalls, nasCurrentlyActiveAnalogInCalls=nasCurrentlyActiveAnalogInCalls, trunkTempFailInCallRejects=trunkTempFailInCallRejects, trunkCallOMTableEntry=trunkCallOMTableEntry, diskSpaceAlarm=diskSpaceAlarm, mtp3Index=mtp3Index, nasUserBusyOutCallRejects=nasUserBusyOutCallRejects, lsFailureKey=lsFailureKey, hgAlarmTimeStamp=hgAlarmTimeStamp, mtp3TimeStamp=mtp3TimeStamp, componentTableEntry=componentTableEntry, outCallAttempts=outCallAttempts, cplxLocStandbyState=cplxLocStandbyState, trunkAbnormalReleaseOutCallRejects=trunkAbnormalReleaseOutCallRejects, destInaccessTimeStamp=destInaccessTimeStamp)
| 127.231942
| 9,703
| 0.781809
|
b9f67833672023bef782862284907976acb9371f
| 2,216
|
py
|
Python
|
newsparser.py
|
antoreep-jana/BBC-News-Analyzer
|
0a6e54ddf4baefa4532213c5e6f60e712ff3a1ca
|
[
"MIT"
] | 1
|
2021-12-27T12:57:07.000Z
|
2021-12-27T12:57:07.000Z
|
newsparser.py
|
antoreep-jana/BBC-News-Analyzer
|
0a6e54ddf4baefa4532213c5e6f60e712ff3a1ca
|
[
"MIT"
] | null | null | null |
newsparser.py
|
antoreep-jana/BBC-News-Analyzer
|
0a6e54ddf4baefa4532213c5e6f60e712ff3a1ca
|
[
"MIT"
] | null | null | null |
from bs4 import BeautifulSoup as bs
import requests
parsed = BBC("https://www.bbc.co.uk/news/world-europe-49345912")
#print(parsed.title)
#print(parsed.link)
#print(parsed.author)
#print(parsed.date)
#print(parsed.title)
#print(parsed.body)
#print(parsed.images)
#print(parsed.body)
| 28.410256
| 98
| 0.564982
|
b9f73f41171ea9b93f4f79bc336c9fe6927dba89
| 2,044
|
py
|
Python
|
SIR_model-Copy.Caroline.1.py
|
Caroline-Odevall/final-project-team-18
|
fbf00ae4ec554dee9245a9834ff4108b3d339842
|
[
"MIT"
] | null | null | null |
SIR_model-Copy.Caroline.1.py
|
Caroline-Odevall/final-project-team-18
|
fbf00ae4ec554dee9245a9834ff4108b3d339842
|
[
"MIT"
] | null | null | null |
SIR_model-Copy.Caroline.1.py
|
Caroline-Odevall/final-project-team-18
|
fbf00ae4ec554dee9245a9834ff4108b3d339842
|
[
"MIT"
] | null | null | null |
# In[42]:
from scipy.integrate import odeint
import numpy as np
import matplotlib.pyplot as plt
# In[43]:
# describe the model
# In[44]:
# describe the parameters
N = 2283 #Totala befolkningen N=s(t)+I(t)+R(t)
D = 4.0 #infections last four days
gamma = 1.0 / D #Reoval rate (Hur mnga som tillfrisknar)
delta = 1.0 / 5.0 #incubation period of five days
R_0 = 2.5 #Reproduktionstalet
beta = R_0 * gamma #r_0=beta/gamma. antal som smittas per infekterad och per tid (beror pa virusets egenskaper samt hur vi beter oss).
S0, E0, I0, R0 = N-1, 1, 0, 0 # initial conditions: one infected, rest susceptible
#Rt = R0 * S(t)/Ntot* (1 b). b = effekt av policy och beteendeforandringar
# In[45]:
t = np.linspace(0, 99, 100) # Grid of time points (in days)
y0 = S0, E0, I0, R0 # Initial conditions vector
# Integrate the SIR equations over the time grid, t.
ret = odeint(deriv, y0, t, args=(N, beta, gamma, delta))
S, E, I, R = ret.T
# In[46]:
# plot the graph
# In[47]:
plotsir(t, S, E, I, R)
# In[ ]:
| 24.333333
| 137
| 0.630137
|
b9f778c162a3f42e748bada544f1b060ab9f29ed
| 466
|
py
|
Python
|
Condicionales anidados.py
|
gcardosov/PythonAprendeOrg
|
0cad81f0a584c98389ca729a337d30581780e520
|
[
"MIT"
] | 1
|
2018-03-07T05:26:12.000Z
|
2018-03-07T05:26:12.000Z
|
Condicionales anidados.py
|
gcardosov/PythonAprendeOrg
|
0cad81f0a584c98389ca729a337d30581780e520
|
[
"MIT"
] | null | null | null |
Condicionales anidados.py
|
gcardosov/PythonAprendeOrg
|
0cad81f0a584c98389ca729a337d30581780e520
|
[
"MIT"
] | null | null | null |
pregunta = input('trabajas desde casa? ')
if pregunta == True:
print 'Eres afortunado'
if pregunta == False:
print 'Trabajas fuera de casa'
tiempo = input('Cuantos minutos haces al trabajo: ')
if tiempo == 0:
print 'trabajas desde casa'
elif tiempo <=20:
print 'Es poco tiempo'
elif tiempo >= 21 and tiempo <=45:
print 'Es un tiempo razonable'
else:
print 'Busca otras rutas'
| 20.26087
| 57
| 0.583691
|
b9f8215f5040fa71b2646d52a053545a92c3fd12
| 1,681
|
py
|
Python
|
app/middleware/cache_headers.py
|
Niclnx/service-stac
|
ad9129a7130d09b2bed387d8e82575eb86fdfa7b
|
[
"BSD-3-Clause"
] | 9
|
2020-08-17T11:01:48.000Z
|
2022-01-17T22:24:13.000Z
|
app/middleware/cache_headers.py
|
Niclnx/service-stac
|
ad9129a7130d09b2bed387d8e82575eb86fdfa7b
|
[
"BSD-3-Clause"
] | 100
|
2020-08-14T05:56:40.000Z
|
2022-03-01T22:39:58.000Z
|
app/middleware/cache_headers.py
|
Niclnx/service-stac
|
ad9129a7130d09b2bed387d8e82575eb86fdfa7b
|
[
"BSD-3-Clause"
] | 3
|
2020-09-02T14:01:07.000Z
|
2021-07-27T06:30:26.000Z
|
import logging
import re
from urllib.parse import urlparse
from django.conf import settings
from django.utils.cache import add_never_cache_headers
from django.utils.cache import patch_cache_control
from django.utils.cache import patch_response_headers
logger = logging.getLogger(__name__)
STAC_BASE = settings.STAC_BASE
STAC_BASE_V = settings.STAC_BASE_V
| 32.960784
| 96
| 0.662701
|
b9f87264f50f9243a592053fcbe97aca0b8c2377
| 2,818
|
py
|
Python
|
mmdet/models/detectors/knowledge_distilling/kd_single_stage.py
|
anorthman/mmdetection
|
52e28154364f0e19d11c206bb357d88f29fc4a2d
|
[
"Apache-2.0"
] | 5
|
2019-06-11T11:08:54.000Z
|
2021-03-25T10:06:01.000Z
|
mmdet/models/detectors/knowledge_distilling/kd_single_stage.py
|
anorthman/mmdetection
|
52e28154364f0e19d11c206bb357d88f29fc4a2d
|
[
"Apache-2.0"
] | null | null | null |
mmdet/models/detectors/knowledge_distilling/kd_single_stage.py
|
anorthman/mmdetection
|
52e28154364f0e19d11c206bb357d88f29fc4a2d
|
[
"Apache-2.0"
] | 1
|
2019-06-11T11:08:55.000Z
|
2019-06-11T11:08:55.000Z
|
# author huangchuanhong
import torch
from mmcv.runner import load_checkpoint
from ..base import BaseDetector
from ..single_stage import SingleStageDetector
from ...registry import DETECTORS
from ...builder import build_detector
| 42.059701
| 112
| 0.551455
|
b9f8cb65181ebad752b9a810d28cc601137f1877
| 4,518
|
py
|
Python
|
metaworld/envs/mujoco/sawyer_xyz/v2/sawyer_dial_turn_v2.py
|
yiwc/robotics-world
|
48efda3a8ea6741b35828b02860f45753252e376
|
[
"MIT"
] | 681
|
2019-09-09T19:34:37.000Z
|
2022-03-31T12:17:58.000Z
|
metaworld/envs/mujoco/sawyer_xyz/v2/sawyer_dial_turn_v2.py
|
yiwc/robotics-world
|
48efda3a8ea6741b35828b02860f45753252e376
|
[
"MIT"
] | 212
|
2019-09-18T14:43:44.000Z
|
2022-03-27T22:21:00.000Z
|
metaworld/envs/mujoco/sawyer_xyz/v2/sawyer_dial_turn_v2.py
|
yiwc/robotics-world
|
48efda3a8ea6741b35828b02860f45753252e376
|
[
"MIT"
] | 157
|
2019-09-12T05:06:05.000Z
|
2022-03-29T14:47:24.000Z
|
import numpy as np
from gym.spaces import Box
from metaworld.envs import reward_utils
from metaworld.envs.asset_path_utils import full_v2_path_for
from metaworld.envs.mujoco.sawyer_xyz.sawyer_xyz_env import SawyerXYZEnv, _assert_task_is_set
| 31.816901
| 93
| 0.599823
|
b9fa33196acc6d33e769b7c8e96ca3b00aeee8cc
| 237
|
wsgi
|
Python
|
obviforum/obviforum.wsgi
|
dcloutman/obviforum
|
31af0cf6f2e243bff9d920276831415bd5210cf9
|
[
"MIT"
] | null | null | null |
obviforum/obviforum.wsgi
|
dcloutman/obviforum
|
31af0cf6f2e243bff9d920276831415bd5210cf9
|
[
"MIT"
] | null | null | null |
obviforum/obviforum.wsgi
|
dcloutman/obviforum
|
31af0cf6f2e243bff9d920276831415bd5210cf9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import sys
import logging
import os
logging.basicConfig(stream=sys.stderr)
file_dir = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, file_dir)
from main import app as application
from main import db
| 23.7
| 54
| 0.797468
|
b9fa7c6bd7a253ee2a588381042c5dfd3d99cb96
| 2,560
|
py
|
Python
|
yezdi/parser/parser.py
|
ragsagar/yezdi
|
5b97bedc56d5af7f28b244a0d7c0c8259f643102
|
[
"MIT"
] | 1
|
2021-04-27T20:07:42.000Z
|
2021-04-27T20:07:42.000Z
|
yezdi/parser/parser.py
|
ragsagar/yezdi
|
5b97bedc56d5af7f28b244a0d7c0c8259f643102
|
[
"MIT"
] | null | null | null |
yezdi/parser/parser.py
|
ragsagar/yezdi
|
5b97bedc56d5af7f28b244a0d7c0c8259f643102
|
[
"MIT"
] | null | null | null |
from yezdi.lexer.token import TokenType
from yezdi.parser.ast import Program, Statement, Participant, Title, LineStatement
| 32
| 86
| 0.640625
|
b9fae34b418d8854a4b364f1044c114896456110
| 1,050
|
py
|
Python
|
scripts/check_categories.py
|
oberron/entolusis
|
209e1e245d8e501e5e6ea2f52dd5b0da7d886f5c
|
[
"MIT"
] | null | null | null |
scripts/check_categories.py
|
oberron/entolusis
|
209e1e245d8e501e5e6ea2f52dd5b0da7d886f5c
|
[
"MIT"
] | null | null | null |
scripts/check_categories.py
|
oberron/entolusis
|
209e1e245d8e501e5e6ea2f52dd5b0da7d886f5c
|
[
"MIT"
] | null | null | null |
# list categories in category folder
from os import walk
from os.path import abspath,join, pardir
categories_folder = abspath(join(__file__,pardir,pardir,"category"))
post_folder = abspath(join(__file__,pardir,pardir,"_posts"))
site_categories = []
for root,directories,files in walk(categories_folder):
for f in files:
site_categories.append(f.split(".md")[0])
site_categories = set(site_categories)
for root,directories,files in walk(post_folder):
for f in files:
with open(join(root,f),'r',encoding="utf-8") as fi:
lines = fi.readlines()
for l in lines:
if l.find("categories")==0:
categories = l.split(":")[1]
for c in [" ","[","]","\n"]:
categories = categories.replace(c,"")
categories=categories.split(",")
if len(set(categories)-site_categories)>0:
print(f,set(categories)-site_categories)
break
print("done")
| 36.206897
| 68
| 0.578095
|
b9fb43e9d0e20574f25b444b461b284752a17b4c
| 5,311
|
py
|
Python
|
docsrc/makedoc.py
|
syoyo/soloud
|
cce88a2408a4b1e88ccbc75de9897b39bc3e7dda
|
[
"Libpng",
"Zlib"
] | 1
|
2019-11-25T11:32:09.000Z
|
2019-11-25T11:32:09.000Z
|
docsrc/makedoc.py
|
syoyo/soloud
|
cce88a2408a4b1e88ccbc75de9897b39bc3e7dda
|
[
"Libpng",
"Zlib"
] | null | null | null |
docsrc/makedoc.py
|
syoyo/soloud
|
cce88a2408a4b1e88ccbc75de9897b39bc3e7dda
|
[
"Libpng",
"Zlib"
] | null | null | null |
#!/usr/bin/env python3
""" builds documentation files from multimarkdown (mmd) source
to various formats, including the web site and pdf.
"""
import subprocess
import glob
import os
import sys
import time
import shutil
src = [
"intro.mmd",
"downloads.mmd",
"quickstart.mmd",
"faq.mmd",
"dirstruct.mmd",
"premake.mmd",
"legal.mmd",
"concepts.mmd",
"concepts3d.mmd",
"voicemanagement.mmd",
"examples.mmd",
"foreign_interface.mmd",
"c_api.mmd",
"python_api.mmd",
"ruby_api.mmd",
"rpgmaker_api.mmd",
"bmx_api.mmd",
"gamemaker_api.mmd",
"cs_api.mmd",
"d_api.mmd",
"codegen.mmd",
"basics.mmd",
"attributes.mmd",
"faders.mmd",
"voicegroups.mmd",
"coremisc.mmd",
"core3d.mmd",
"audiosource.mmd",
"newsoundsources.mmd",
"wav.mmd",
"wavstream.mmd",
"speech.mmd",
"sfxr.mmd",
"modplug.mmd",
"monotone.mmd",
"tedsid.mmd",
"vizsn.mmd",
"vic.mmd",
"filters.mmd",
"biquadfilter.mmd",
"echofilter.mmd",
"lofifilter.mmd",
"flangerfilter.mmd",
"dcremovalfilter.mmd",
"fftfilter.mmd",
"bassboostfilter.mmd",
"waveshaperfilter.mmd",
"mixbus.mmd",
"queue.mmd",
"collider.mmd",
"attenuator.mmd",
"file.mmd",
"backends.mmd"
]
website_only = [
"downloads.mmd"
]
unknown = 0
for file in glob.glob("*.mmd"):
if file not in src:
unknown = 1
print(file + " not included in docs!")
if unknown:
print("Add the new files to makedoc.py, soloud.tex and htmlpre.txt.")
sys.exit()
datestring = time.strftime("%Y%m%d")
if not os.path.exists(datestring + "/web"):
os.makedirs(datestring + "/web")
if not os.path.exists("temp/"):
os.makedirs("temp/")
print("- -- --- -- - Generating single-file HTML docs")
callp = ["pandoc", "-s", "-t", "html5", "-f", "markdown-smart", "--metadata", 'title="SoLoud ' + datestring + '"', "-H", "singlehtml_head.txt", "-B", "singlehtml_body.txt", "--toc", "--self-contained", "--default-image-extension=png", "-o", datestring + "/soloud_" + datestring + ".html"]
for x in src:
if x not in website_only:
callp.append(x)
subprocess.call(callp)
print("- -- --- -- - Generating web site")
for x in src:
subprocess.call(["pandoc", "--template=html.pandoc", "-f", "markdown-smart", "--metadata", 'title="SoLoud ' + datestring + ' ' + x[:len(x)-4] + '"', "-B", "htmlpre.txt", "-A", "htmlpost.txt", "--default-image-extension=png", x, "-o", datestring + "/web/" + x[:len(x)-3]+"html.bak"])
with open(datestring + "/web/" + x[:len(x)-3]+"html", "w") as file_out:
with open(datestring + "/web/" + x[:len(x)-3]+"html.bak", "r") as file_in:
for line in file_in:
file_out.write(line.replace('code>', 'code>\n').replace('::','::<wbr>').replace('\xc2','').replace('\xa0',''))
if x == "intro.mmd":
if os.path.isfile(datestring + "/web/index.html"):
os.remove(datestring + "/web/index.html")
os.rename(datestring + "/web/intro.html", datestring + "/web/index.html")
print("- -- --- -- - Generating epub")
callp = ["pandoc", "-N", "--toc", "--epub-cover-image=images/cover.png", "-t", "epub3", "--default-image-extension=png", "-f", "markdown-smart", "--css=epub.css", "--epub-metadata=metadata.xml", "-o", datestring + "/soloud_" + datestring + ".epub", "title.txt"]
for x in src:
if x not in website_only:
callp.append(x)
subprocess.call(callp)
print("- -- --- -- - Converting epub -> mobi (kindlegen_output.txt)")
with open('kindlegen_output.txt', 'w') as outfile:
subprocess.call(["kindlegen", datestring + "/soloud_" + datestring + ".epub", "-c2"], stdout=outfile)
print("- -- --- -- - Generating LaTex")
for x in src:
if x not in website_only:
subprocess.call(["pandoc", "-t", "latex", "--listings", "--default-image-extension=pdf", "--top-level-division=chapter", x, "-o", "temp/" + x[:len(x)-3]+"tex.orig"])
with open("temp/" + x[:len(x)-3]+"tex", "w") as file_out:
with open("temp/" + x[:len(x)-3]+"tex.orig", "r") as file_in:
for line in file_in:
file_out.write(line.replace('\\begin{longtable}[]{@{}ll@{}}', '\\begin{tabulary}{\\textwidth}{lJ}').replace('\\begin{longtable}[]{@{}lll@{}}', '\\begin{tabulary}{\\textwidth}{lJJ}').replace('\\begin{longtable}[]{@{}llll@{}}', '\\begin{tabulary}{\\textwidth}{lJJJ}').replace('\\endhead','').replace('\\end{longtable}','\\end{tabulary}'))
print("- -- --- -- - Generating pdf (xelatex_output.txt)")
with open('xelatex_output.txt', 'w') as outfile:
subprocess.call(["xelatex", "SoLoud.tex"], stdout=outfile)
print("- -- --- -- - Generating pdf pass 2..")
subprocess.call(["xelatex", "SoLoud.tex"], stdout=outfile)
shutil.move("SoLoud.pdf", datestring + "/soloud_" + datestring + ".pdf")
print("- -- --- -- - Cleanup..")
tempsuffix = ["aux", "toc", "out", "log", "lg", "4ct", "4tc", "idv", "tmp", "xdv", "xref", "bak"]
for suffix in tempsuffix:
for file in glob.glob("*."+suffix):
os.remove(file)
for file in glob.glob(datestring + "/web/*."+suffix):
os.remove(file)
for file in glob.glob("temp/*"):
os.remove(file)
os.rmdir("temp")
print("- -- --- -- - Done - " + datestring)
| 34.940789
| 356
| 0.583129
|
b9fc3dd10a80beed547f86b535cfadc6f817e0e2
| 4,872
|
tac
|
Python
|
6 复试/2 笔试/4 编译原理/hw/2016_黄家晖_PA/550405220_4_decaf_PA3/TestCases/S3/output/t9.tac
|
ladike/912_project
|
5178c1c93ac6ca30ffc72dd689f5c6932704b4ab
|
[
"MIT"
] | 1
|
2022-03-02T16:05:49.000Z
|
2022-03-02T16:05:49.000Z
|
6 复试/2 笔试/4 编译原理/hw/2016_黄家晖_PA/550405220_4_decaf_PA3/TestCases/S3/output/t9.tac
|
ladike/912_project
|
5178c1c93ac6ca30ffc72dd689f5c6932704b4ab
|
[
"MIT"
] | null | null | null |
6 复试/2 笔试/4 编译原理/hw/2016_黄家晖_PA/550405220_4_decaf_PA3/TestCases/S3/output/t9.tac
|
ladike/912_project
|
5178c1c93ac6ca30ffc72dd689f5c6932704b4ab
|
[
"MIT"
] | null | null | null |
VTABLE(_Main) {
<empty>
Main
_Main.COPY;
}
VTABLE(_Base) {
<empty>
Base
_Base.COPY;
}
VTABLE(_Sub1) {
_Base
Sub1
_Sub1.COPY;
}
VTABLE(_Sub2) {
_Base
Sub2
_Sub2.COPY;
}
VTABLE(_Sub3) {
_Sub1
Sub3
_Sub3.COPY;
}
VTABLE(_Sub4) {
_Sub3
Sub4
_Sub4.COPY;
}
FUNCTION(_Main_New) {
memo ''
_Main_New:
_T1 = 4
parm _T1
_T2 = call _Alloc
_T3 = VTBL <_Main>
*(_T2 + 0) = _T3
return _T2
}
FUNCTION(_Main.COPY) {
memo '_T4:4'
_Main.COPY:
_T5 = 4
parm _T5
_T6 = call _Alloc
_T7 = VTBL <_Main>
*(_T6 + 0) = _T7
return _T6
}
FUNCTION(_Base_New) {
memo ''
_Base_New:
_T8 = 4
parm _T8
_T9 = call _Alloc
_T10 = VTBL <_Base>
*(_T9 + 0) = _T10
return _T9
}
FUNCTION(_Base.COPY) {
memo '_T11:4'
_Base.COPY:
_T12 = 4
parm _T12
_T13 = call _Alloc
_T14 = VTBL <_Base>
*(_T13 + 0) = _T14
return _T13
}
FUNCTION(_Sub1_New) {
memo ''
_Sub1_New:
_T15 = 4
parm _T15
_T16 = call _Alloc
_T17 = VTBL <_Sub1>
*(_T16 + 0) = _T17
return _T16
}
FUNCTION(_Sub1.COPY) {
memo '_T18:4'
_Sub1.COPY:
_T19 = 4
parm _T19
_T20 = call _Alloc
_T21 = VTBL <_Sub1>
*(_T20 + 0) = _T21
return _T20
}
FUNCTION(_Sub2_New) {
memo ''
_Sub2_New:
_T22 = 4
parm _T22
_T23 = call _Alloc
_T24 = VTBL <_Sub2>
*(_T23 + 0) = _T24
return _T23
}
FUNCTION(_Sub2.COPY) {
memo '_T25:4'
_Sub2.COPY:
_T26 = 4
parm _T26
_T27 = call _Alloc
_T28 = VTBL <_Sub2>
*(_T27 + 0) = _T28
return _T27
}
FUNCTION(_Sub3_New) {
memo ''
_Sub3_New:
_T29 = 4
parm _T29
_T30 = call _Alloc
_T31 = VTBL <_Sub3>
*(_T30 + 0) = _T31
return _T30
}
FUNCTION(_Sub3.COPY) {
memo '_T32:4'
_Sub3.COPY:
_T33 = 4
parm _T33
_T34 = call _Alloc
_T35 = VTBL <_Sub3>
*(_T34 + 0) = _T35
return _T34
}
FUNCTION(_Sub4_New) {
memo ''
_Sub4_New:
_T36 = 4
parm _T36
_T37 = call _Alloc
_T38 = VTBL <_Sub4>
*(_T37 + 0) = _T38
return _T37
}
FUNCTION(_Sub4.COPY) {
memo '_T39:4'
_Sub4.COPY:
_T40 = 4
parm _T40
_T41 = call _Alloc
_T42 = VTBL <_Sub4>
*(_T41 + 0) = _T42
return _T41
}
FUNCTION(main) {
memo ''
main:
_T48 = call _Base_New
_T43 = _T48
_T49 = call _Sub1_New
_T44 = _T49
_T50 = call _Sub2_New
_T45 = _T50
_T51 = call _Sub3_New
_T46 = _T51
_T52 = call _Sub4_New
_T47 = _T52
parm _T43
call _Main.printType
parm _T44
call _Main.printType
parm _T45
call _Main.printType
parm _T46
call _Main.printType
parm _T47
call _Main.printType
_T43 = _T47
parm _T43
call _Main.printType
_T54 = VTBL <_Sub1>
_T55 = *(_T43 + 0)
_L22:
_T53 = (_T54 == _T55)
if (_T53 != 0) branch _L23
_T55 = *(_T55 + 0)
if (_T55 != 0) branch _L22
_T56 = "Decaf runtime error: "
parm _T56
call _PrintString
_T57 = *(_T43 + 0)
_T58 = *(_T57 + 4)
parm _T58
call _PrintString
_T59 = " cannot be cast to "
parm _T59
call _PrintString
_T60 = VTBL <_Sub1>
_T61 = *(_T60 + 4)
parm _T61
call _PrintString
_T62 = "\n"
parm _T62
call _PrintString
call _Halt
_L23:
_T44 = _T43
parm _T44
call _Main.printType
}
FUNCTION(_Main.printType) {
memo '_T0:4'
_Main.printType:
_T64 = VTBL <_Sub4>
_T65 = *(_T0 + 0)
_L24:
_T63 = (_T64 == _T65)
if (_T63 != 0) branch _L25
_T65 = *(_T65 + 0)
if (_T65 != 0) branch _L24
_T63 = 0
_L25:
if (_T63 == 0) branch _L26
_T66 = "Sub4\n"
parm _T66
call _PrintString
branch _L27
_L26:
_T68 = VTBL <_Sub3>
_T69 = *(_T0 + 0)
_L28:
_T67 = (_T68 == _T69)
if (_T67 != 0) branch _L29
_T69 = *(_T69 + 0)
if (_T69 != 0) branch _L28
_T67 = 0
_L29:
if (_T67 == 0) branch _L30
_T70 = "Sub3\n"
parm _T70
call _PrintString
branch _L31
_L30:
_T72 = VTBL <_Sub2>
_T73 = *(_T0 + 0)
_L32:
_T71 = (_T72 == _T73)
if (_T71 != 0) branch _L33
_T73 = *(_T73 + 0)
if (_T73 != 0) branch _L32
_T71 = 0
_L33:
if (_T71 == 0) branch _L34
_T74 = "Sub2\n"
parm _T74
call _PrintString
branch _L35
_L34:
_T76 = VTBL <_Sub1>
_T77 = *(_T0 + 0)
_L36:
_T75 = (_T76 == _T77)
if (_T75 != 0) branch _L37
_T77 = *(_T77 + 0)
if (_T77 != 0) branch _L36
_T75 = 0
_L37:
if (_T75 == 0) branch _L38
_T78 = "Sub1\n"
parm _T78
call _PrintString
branch _L39
_L38:
_T80 = VTBL <_Base>
_T81 = *(_T0 + 0)
_L40:
_T79 = (_T80 == _T81)
if (_T79 != 0) branch _L41
_T81 = *(_T81 + 0)
if (_T81 != 0) branch _L40
_T79 = 0
_L41:
if (_T79 == 0) branch _L42
_T82 = "Base\n"
parm _T82
call _PrintString
_L42:
_L39:
_L35:
_L31:
_L27:
}
| 15.76699
| 34
| 0.555829
|
b9fc6312cdae3331d02a69bbbf58d767d486a41b
| 1,361
|
py
|
Python
|
arch/api/base/utils/party.py
|
yzjba/FATE
|
9a6d252da637b2583a0f8a51f6cb4c615850bab9
|
[
"Apache-2.0"
] | 32
|
2020-06-12T08:39:58.000Z
|
2022-03-20T06:57:08.000Z
|
arch/api/base/utils/party.py
|
ErikSun2020/FATE
|
bdda535c7d8a974fc2c43102837964b7da199730
|
[
"Apache-2.0"
] | 10
|
2020-11-13T18:55:48.000Z
|
2022-02-10T02:00:12.000Z
|
arch/api/base/utils/party.py
|
ErikSun2020/FATE
|
bdda535c7d8a974fc2c43102837964b7da199730
|
[
"Apache-2.0"
] | 16
|
2020-06-12T06:51:46.000Z
|
2022-03-29T10:23:42.000Z
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| 30.244444
| 79
| 0.683321
|
b9fd9ad743eac1d46c7b5d951facfcfa09dbb1bf
| 3,778
|
py
|
Python
|
src/sentry/options/defaults.py
|
faulkner/sentry
|
f9dd4d0d7c683632cf02810c03bd42d7051ad010
|
[
"BSD-3-Clause"
] | null | null | null |
src/sentry/options/defaults.py
|
faulkner/sentry
|
f9dd4d0d7c683632cf02810c03bd42d7051ad010
|
[
"BSD-3-Clause"
] | null | null | null |
src/sentry/options/defaults.py
|
faulkner/sentry
|
f9dd4d0d7c683632cf02810c03bd42d7051ad010
|
[
"BSD-3-Clause"
] | null | null | null |
"""
sentry.options.defaults
~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
from sentry.logging import LoggingFormat
from sentry.options import (
FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY,
register,
)
from sentry.utils.types import Dict, String, Sequence
# Cache
# register('cache.backend', flags=FLAG_NOSTORE)
# register('cache.options', type=Dict, flags=FLAG_NOSTORE)
# System
register('system.admin-email', flags=FLAG_REQUIRED)
register('system.support-email', flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('system.security-email', flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('system.databases', type=Dict, flags=FLAG_NOSTORE)
# register('system.debug', default=False, flags=FLAG_NOSTORE)
register('system.rate-limit', default=0, flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('system.secret-key', flags=FLAG_NOSTORE)
# Absolute URL to the sentry root directory. Should not include a trailing slash.
register('system.url-prefix', ttl=60, grace=3600, flags=FLAG_REQUIRED | FLAG_PRIORITIZE_DISK)
register('system.root-api-key', flags=FLAG_PRIORITIZE_DISK)
register('system.logging-format', default=LoggingFormat.HUMAN, flags=FLAG_NOSTORE)
# Redis
register(
'redis.clusters',
type=Dict,
default={
'default': {
'hosts': {
0: {
'host': '127.0.0.1',
'port': 6379,
}
},
},
},
flags=FLAG_NOSTORE | FLAG_IMMUTABLE
)
register('redis.options', type=Dict, flags=FLAG_NOSTORE)
# symbolizer specifics
register('dsym.cache-path', type=String, default='/tmp/sentry-dsym-cache')
# Mail
register('mail.backend', default='smtp', flags=FLAG_NOSTORE)
register('mail.host', default='localhost', flags=FLAG_REQUIRED | FLAG_PRIORITIZE_DISK)
register('mail.port', default=25, flags=FLAG_REQUIRED | FLAG_PRIORITIZE_DISK)
register('mail.username', flags=FLAG_REQUIRED | FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('mail.password', flags=FLAG_REQUIRED | FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('mail.use-tls', default=False, flags=FLAG_REQUIRED | FLAG_PRIORITIZE_DISK)
register('mail.subject-prefix', default='[Sentry] ', flags=FLAG_PRIORITIZE_DISK)
register('mail.from', default='root@localhost', flags=FLAG_REQUIRED | FLAG_PRIORITIZE_DISK)
register('mail.list-namespace', type=String, default='localhost', flags=FLAG_NOSTORE)
register('mail.enable-replies', default=False, flags=FLAG_PRIORITIZE_DISK)
register('mail.reply-hostname', default='', flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('mail.mailgun-api-key', default='', flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
# SMS
register('sms.twilio-account', default='', flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('sms.twilio-token', default='', flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('sms.twilio-number', default='', flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
# U2F
register('u2f.app-id', default='', flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('u2f.facets', default=(), type=Sequence,
flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('auth.ip-rate-limit', default=0, flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('auth.user-rate-limit', default=0, flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('api.rate-limit.org-create', default=5, flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
# Filestore
register('filestore.backend', default='filesystem', flags=FLAG_NOSTORE)
register('filestore.options', default={'location': '/tmp/sentry-files'}, flags=FLAG_NOSTORE)
| 43.425287
| 95
| 0.75172
|
b9ff46cab163507c14f9b26bf086ce4979f54a2c
| 4,972
|
py
|
Python
|
tools/unidatadownload.py
|
henryiii/backrefs
|
ec82844098bc3bdc7bcaa61b32f80271e6a73da6
|
[
"MIT"
] | null | null | null |
tools/unidatadownload.py
|
henryiii/backrefs
|
ec82844098bc3bdc7bcaa61b32f80271e6a73da6
|
[
"MIT"
] | null | null | null |
tools/unidatadownload.py
|
henryiii/backrefs
|
ec82844098bc3bdc7bcaa61b32f80271e6a73da6
|
[
"MIT"
] | null | null | null |
"""Download `Unicodedata` files."""
from __future__ import unicode_literals
import os
import zipfile
import codecs
from urllib.request import urlopen
__version__ = '2.2.0'
HOME = os.path.dirname(os.path.abspath(__file__))
def zip_unicode(output, version):
"""Zip the Unicode files."""
zipper = zipfile.ZipFile(os.path.join(output, 'unicodedata', '%s.zip' % version), 'w', zipfile.ZIP_DEFLATED)
target = os.path.join(output, 'unicodedata', version)
print('Zipping %s.zip...' % version)
for root, dirs, files in os.walk(target):
for file in files:
if file.endswith('.txt'):
zipper.write(os.path.join(root, file), arcname=file)
def unzip_unicode(output, version):
"""Unzip the Unicode files."""
unzipper = zipfile.ZipFile(os.path.join(output, 'unicodedata', '%s.zip' % version))
target = os.path.join(output, 'unicodedata', version)
print('Unzipping %s.zip...' % version)
os.makedirs(target)
for f in unzipper.namelist():
# Do I need backslash on windows? Or is it forward as well?
unzipper.extract(f, target)
def download_unicodedata(version, output=HOME, no_zip=False):
"""Download Unicode data scripts and blocks."""
ver = tuple([int(x) for x in version.split('.')])
files = [
'UnicodeData.txt',
'Scripts.txt',
'Blocks.txt',
'PropList.txt',
'DerivedCoreProperties.txt',
'DerivedNormalizationProps.txt',
'CompositionExclusions.txt',
'PropertyValueAliases.txt',
'PropertyAliases.txt',
'EastAsianWidth.txt',
'LineBreak.txt',
'HangulSyllableType.txt',
'DerivedAge.txt',
'auxiliary/WordBreakProperty.txt',
'auxiliary/SentenceBreakProperty.txt',
'auxiliary/GraphemeBreakProperty.txt',
'extracted/DerivedDecompositionType.txt',
'extracted/DerivedNumericType.txt',
'extracted/DerivedNumericValues.txt',
'extracted/DerivedJoiningType.txt',
'extracted/DerivedJoiningGroup.txt',
'extracted/DerivedCombiningClass.txt',
'emoji/emoji-data.txt'
]
files.append('ScriptExtensions.txt')
files.append('IndicPositionalCategory.txt')
files.append('IndicSyllabicCategory.txt')
files.append('BidiBrackets.txt')
if ver >= (11, 0, 0):
files.append('VerticalOrientation.txt')
http_url = 'http://www.unicode.org/Public/%s/ucd/' % version
ftp_url = 'ftp://ftp.unicode.org/Public/%s/ucd/' % version
destination = os.path.join(output, 'unicodedata', version)
if not os.path.exists(destination):
os.makedirs(destination)
zip_data = not no_zip
for f in files:
file_location = os.path.join(destination, os.path.basename(f))
retrieved = False
if not os.path.exists(file_location):
for url in (ftp_url, http_url):
furl = url + f
try:
print('Downloading: %s --> %s' % (furl, file_location))
response = urlopen(furl, timeout=30)
data = response.read()
except Exception:
print('Failed: %s' % url)
continue
with codecs.open(file_location, 'w', encoding='utf-8') as uf:
uf.write(data.decode('utf-8'))
retrieved = True
break
if not retrieved:
print('Failed to acquire all needed Unicode files!')
break
else:
retrieved = True
print('Skipping: found %s' % file_location)
if not retrieved:
zip_data = False
break
if zip_data and not os.path.exists(os.path.join(output, 'unicodedata', '%s.zip' % version)):
zip_unicode(output, version)
def get_unicodedata(version, output=HOME, no_zip=False):
"""Ensure we have Unicode data to generate Unicode tables."""
target = os.path.join(output, 'unicodedata', version)
zip_target = os.path.join(output, 'unicodedata', '%s.zip' % version)
if not os.path.exists(target) and os.path.exists(zip_target):
unzip_unicode(output, version)
# Download missing files if any. Zip if required.
download_unicodedata(version, output, no_zip)
if __name__ == '__main__':
import argparse
import unicodedata
parser = argparse.ArgumentParser(prog='unidatadownload', description='Generate a unicode property table.')
parser.add_argument('--version', action='version', version="%(prog)s " + __version__)
parser.add_argument('--output', default=HOME, help='Output file.')
parser.add_argument('--unicode-version', default=None, help='Force a specific Unicode version.')
args = parser.parse_args()
if args.unicode_version is None:
version = unicodedata.unidata_version
else:
version = args.unicode_version
get_unicodedata(version, output=args.output)
| 32.927152
| 112
| 0.627715
|
b9ffb7c6fff3e245dc8ea1ea786cc6f60c2d4cde
| 2,427
|
py
|
Python
|
generator/cache/cache.py
|
biarmic/OpenCache
|
bb9e110e434deb83900de328cc76b63901ba582f
|
[
"BSD-3-Clause"
] | 5
|
2021-09-15T18:29:49.000Z
|
2022-03-26T04:41:01.000Z
|
generator/cache/cache.py
|
VLSIDA/OpenCache
|
0e79bf353c68d57dcc49d78178b12fd0b468f19a
|
[
"BSD-3-Clause"
] | null | null | null |
generator/cache/cache.py
|
VLSIDA/OpenCache
|
0e79bf353c68d57dcc49d78178b12fd0b468f19a
|
[
"BSD-3-Clause"
] | null | null | null |
# See LICENSE for licensing information.
#
# Copyright (c) 2021 Regents of the University of California and The Board
# of Regents for the Oklahoma Agricultural and Mechanical College
# (acting for and on behalf of Oklahoma State University)
# All rights reserved.
#
import debug
import datetime
from policy import associativity
from globals import OPTS, print_time
| 33.246575
| 86
| 0.646477
|
b9ffed8a41299969ab07da01999635758df5ba4f
| 11,469
|
py
|
Python
|
utils/data_loader.py
|
elieser1101/loglizer
|
985c5f582fbbe4d6365184086ac091134a5b5d07
|
[
"MIT"
] | null | null | null |
utils/data_loader.py
|
elieser1101/loglizer
|
985c5f582fbbe4d6365184086ac091134a5b5d07
|
[
"MIT"
] | null | null | null |
utils/data_loader.py
|
elieser1101/loglizer
|
985c5f582fbbe4d6365184086ac091134a5b5d07
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Shilin He'
import pandas as pd
import os
import numpy as np
def hdfs_data_loader(para):
""" load the log sequence matrix and labels from the file path.
Args:
--------
para: the parameters dictionary
Returns:
--------
raw_data: log sequences matrix
label_data: labels matrix
"""
file_path = para['path'] + para['log_seq_file_name']
label_path = para['path'] + para['label_file_name']
# load log sequence matrix
pre_df = pd.read_csv(file_path, nrows=1, header=None, delimiter=r'\s+')
columns = pre_df.columns.tolist()
# remove the last column of block name
use_cols = columns[:-1]
data_df = pd.read_csv(file_path, delimiter=r'\s+', header=None, usecols =use_cols, dtype =int)
raw_data = data_df.as_matrix()
# load lables
label_df = pd.read_csv(label_path, delimiter=r'\s+', header=None, usecols = [0], dtype =int) # usecols must be a list
label_data = label_df.as_matrix()
print("The raw data shape is {} and label shape is {}".format(raw_data.shape, label_data.shape))
assert raw_data.shape[0] == label_data.shape[0]
print('The number of anomaly instances is %d' % sum(label_data))
return raw_data, label_data
def bgl_data_loader(para):
""" load the logs and the log_event_mapping from the file path.
Args:
--------
para: the parameters dictionary
Returns:
--------
raw_data: list of (label, time)
event_mapping_data: a list of event index, where each row index indicates a corresponding log
"""
file_path = para['path'] + para['log_file_name']
event_mapping_path = para['path'] + para['log_event_mapping']
# load data
data_df = pd.read_csv(file_path, delimiter=r'\s+', header=None, names = ['label','time'], usecols = para['select_column']) #, parse_dates = [1], date_parser=dateparse)
# convert to date time format
data_df['time'] = pd.to_datetime(data_df['time'], format="%Y-%m-%d-%H.%M.%S.%f")
# calculate the time interval since the start time
data_df['seconds_since'] = (data_df['time']-data_df['time'][0]).dt.total_seconds().astype(int)
# get the label for each log
data_df['label'] = (data_df['label'] != '-').astype(int)
raw_data = data_df[['label','seconds_since']].as_matrix()
# load the event mapping list
event_mapping = pd.read_csv(event_mapping_path, delimiter=r'\s+', header=None, usecols = [0], dtype =int)
event_mapping_data = event_mapping.as_matrix()
print("The raw data shape is {} and label shape is {}".format(raw_data.shape, event_mapping_data.shape))
assert raw_data.shape[0] == event_mapping_data.shape[0]
print('The number of anomaly logs is %d, but it requires further processing' % sum(raw_data[:, 0]))
return raw_data, event_mapping_data
def bgl_preprocess_data(para, raw_data, event_mapping_data):
""" split logs into sliding windows, built an event count matrix and get the corresponding label
Args:
--------
para: the parameters dictionary
raw_data: list of (label, time)
event_mapping_data: a list of event index, where each row index indicates a corresponding log
Returns:
--------
event_count_matrix: event count matrix, where each row is an instance (log sequence vector)
labels: a list of labels, 1 represents anomaly
"""
# create the directory for saving the sliding windows (start_index, end_index), which can be directly loaded in future running
if not os.path.exists(para['save_path']):
os.mkdir(para['save_path'])
log_size = raw_data.shape[0]
sliding_file_path = para['save_path']+'sliding_'+str(para['window_size'])+'h_'+str(para['step_size'])+'h.csv'
#=================divide into sliding windows=============#
start_end_index_list = [] # list of tuples, tuple contains two number, which represent the start and end of sliding time window
label_data, time_data = raw_data[:,0], raw_data[:, 1]
if not os.path.exists(sliding_file_path):
# split into sliding window
start_time = time_data[0]
start_index = 0
end_index = 0
# get the first start, end index, end time
for cur_time in time_data:
if cur_time < start_time + para['window_size']*3600:
end_index += 1
end_time = cur_time
else:
start_end_pair=tuple((start_index,end_index))
start_end_index_list.append(start_end_pair)
break
# move the start and end index until next sliding window
while end_index < log_size:
start_time = start_time + para['step_size']*3600
end_time = end_time + para['step_size']*3600
for i in range(start_index,end_index):
if time_data[i] < start_time:
i+=1
else:
break
for j in range(end_index, log_size):
if time_data[j] < end_time:
j+=1
else:
break
start_index = i
end_index = j
start_end_pair = tuple((start_index, end_index))
start_end_index_list.append(start_end_pair)
inst_number = len(start_end_index_list)
print('there are %d instances (sliding windows) in this dataset\n'%inst_number)
np.savetxt(sliding_file_path,start_end_index_list,delimiter=',',fmt='%d')
else:
print('Loading start_end_index_list from file')
start_end_index_list = pd.read_csv(sliding_file_path, header=None).as_matrix()
inst_number = len(start_end_index_list)
print('there are %d instances (sliding windows) in this dataset' % inst_number)
# get all the log indexes in each time window by ranging from start_index to end_index
expanded_indexes_list=[]
for t in range(inst_number):
index_list = []
expanded_indexes_list.append(index_list)
for i in range(inst_number):
start_index = start_end_index_list[i][0]
end_index = start_end_index_list[i][1]
for l in range(start_index, end_index):
expanded_indexes_list[i].append(l)
event_mapping_data = [row[0] for row in event_mapping_data]
event_num = len(list(set(event_mapping_data)))
print('There are %d log events'%event_num)
#=================get labels and event count of each sliding window =============#
labels = []
event_count_matrix = np.zeros((inst_number,event_num))
for j in range(inst_number):
label = 0 #0 represent success, 1 represent failure
for k in expanded_indexes_list[j]:
event_index = event_mapping_data[k]
event_count_matrix[j, event_index] += 1
if label_data[k]:
label = 1
continue
labels.append(label)
assert inst_number == len(labels)
print("Among all instances, %d are anomalies"%sum(labels))
assert event_count_matrix.shape[0] == len(labels)
return event_count_matrix, labels
def deepia_data_loader(para):
""" load the logs and the log_event_mapping from the file path.
Args:
--------
para: the parameters dictionary
Returns:
--------
raw_data: list of (label, time)
event_mapping_data: a list of event index, where each row index indicates a corresponding log
"""
file_path = para['path'] + para['log_file_name']
event_mapping_path = para['path'] + para['log_event_mapping']
# load data
data_df = pd.read_csv(file_path, delimiter=r'\s+', header=None, names=['month', 'day', 'hour'],
usecols=para['select_column']) # , parse_dates = [1], date_parser=dateparse)
# convert to date time format
data_df = data_df[['month', 'day', 'hour']].apply(lambda x: list(map(str, x)))
data_df['time'] = data_df[['month', 'day', 'hour']].apply(lambda x: '-'.join(x), axis=1) #
data_df['time'] = pd.to_datetime(data_df['time'], format="%b-%d-%H:%M:%S")
# calculate the time interval since the start time
data_df['seconds_since'] = (data_df['time'] - data_df['time'][0]).dt.total_seconds().astype(int)
# get the label for each log
# data_df['label'] = (data_df['label'] != '-').astype(int)
raw_data = data_df[['seconds_since']].as_matrix()
# load the event mapping list
event_mapping = pd.read_csv(event_mapping_path, delimiter=r'\s+', header=None, usecols = [0], dtype =int)
event_mapping_data = event_mapping.as_matrix()
print("The raw data shape is {} and label shape is {}".format(raw_data.shape, event_mapping_data.shape))
assert raw_data.shape[0] == event_mapping_data.shape[0]
#print('The number of anomaly logs is %d, but it requires further processing' % sum(raw_data[:, 0]))
return raw_data, event_mapping_data
def deepia_preprocess_data(para, raw_data, event_mapping_data):
""" split logs into sliding windows, built an event count matrix and get the corresponding label
Args:
--------
para: the parameters dictionary
raw_data: list of (label, time)
event_mapping_data: a list of event index, where each row index indicates a corresponding log
Returns:
--------
event_count_matrix: event count matrix, where each row is an instance (log sequence vector)
labels: a list of labels, 1 represents anomaly
"""
# create the directory for saving the sliding windows (start_index, end_index), which can be directly loaded in future running
if not os.path.exists(para['save_path']):
os.mkdir(para['save_path'])
log_size = raw_data.shape[0]
sliding_file_path = para['save_path']+'sliding_'+str(para['window_size'])+'h_'+str(para['step_size'])+'h.csv'
#=================divide into sliding windows=============#
start_end_index_list = [] # list of tuples, tuple contains two number, which represent the start and end of sliding time window
time_data = raw_data[:,0]
if not os.path.exists(sliding_file_path):
# split into sliding window
start_time = time_data[0]
start_index = 0
end_index = 0
# get the first start, end index, end time
for cur_time in time_data:
if cur_time < start_time + para['window_size']*3600:
end_index += 1
end_time = cur_time
else:
start_end_pair=tuple((start_index,end_index))
start_end_index_list.append(start_end_pair)
break
# move the start and end index until next sliding window
while end_index < log_size:
start_time = start_time + para['step_size']*3600
end_time = end_time + para['step_size']*3600
for i in range(start_index,end_index):
if time_data[i] < start_time:
i+=1
else:
break
for j in range(end_index, log_size):
if time_data[j] < end_time:
j+=1
else:
break
start_index = i
end_index = j
start_end_pair = tuple((start_index, end_index))
start_end_index_list.append(start_end_pair)
inst_number = len(start_end_index_list)
print('there are %d instances (sliding windows) in this dataset\n'%inst_number)
np.savetxt(sliding_file_path,start_end_index_list,delimiter=',',fmt='%d')
else:
print('Loading start_end_index_list from file')
start_end_index_list = pd.read_csv(sliding_file_path, header=None).as_matrix()
inst_number = len(start_end_index_list)
print('there are %d instances (sliding windows) in this dataset' % inst_number)
# get all the log indexes in each time window by ranging from start_index to end_index
expanded_indexes_list=[]
for t in range(inst_number):
index_list = []
expanded_indexes_list.append(index_list)
for i in range(inst_number):
start_index = start_end_index_list[i][0]
end_index = start_end_index_list[i][1]
for l in range(start_index, end_index):
expanded_indexes_list[i].append(l)
event_mapping_data = [row[0] for row in event_mapping_data]
event_num = len(list(set(event_mapping_data)))
print('There are %d log events'%event_num)
#=================get labels and event count of each sliding window =============#
event_count_matrix = np.zeros((inst_number,event_num))
for j in range(inst_number):
for k in expanded_indexes_list[j]:
event_index = event_mapping_data[k]
event_count_matrix[j, event_index] += 1
#print("Among all instances, %d are anomalies"%sum(labels))
return event_count_matrix
| 38.877966
| 168
| 0.717674
|
6a00c6e63b457a75c0424a247757123821cb24fb
| 1,230
|
py
|
Python
|
aspx2url/aspx2url.py
|
marcocucinato/aspx2url
|
985a0e51865bb7be15618155ff9844730c2eaaf6
|
[
"MIT"
] | null | null | null |
aspx2url/aspx2url.py
|
marcocucinato/aspx2url
|
985a0e51865bb7be15618155ff9844730c2eaaf6
|
[
"MIT"
] | null | null | null |
aspx2url/aspx2url.py
|
marcocucinato/aspx2url
|
985a0e51865bb7be15618155ff9844730c2eaaf6
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
import re, sys, glob, getopt, os
if __name__ == '__main__':
main()
| 29.285714
| 77
| 0.530081
|
6a00d6b8c83e85268bd294d4e512d54f000cfc8a
| 2,843
|
py
|
Python
|
pytype/tests/py2/test_stdlib.py
|
souravbadami/pytype
|
804fa97e7f9208df2711976085a96f756b3949e6
|
[
"Apache-2.0"
] | 1
|
2020-04-20T02:55:21.000Z
|
2020-04-20T02:55:21.000Z
|
pytype/tests/py2/test_stdlib.py
|
doc22940/pytype
|
4772ad6fe89f4df75ae3d08e7374f68074175d4a
|
[
"Apache-2.0"
] | null | null | null |
pytype/tests/py2/test_stdlib.py
|
doc22940/pytype
|
4772ad6fe89f4df75ae3d08e7374f68074175d4a
|
[
"Apache-2.0"
] | null | null | null |
"""Tests of selected stdlib functions."""
from pytype.tests import test_base
test_base.main(globals(), __name__ == "__main__")
| 21.869231
| 61
| 0.518115
|
6a00f01d33da6c470fd1f865044516a818d9c018
| 88
|
py
|
Python
|
smmips/__init__.py
|
oicr-gsi/pysmmips
|
ccf209b13862a5533a11fbe02e80d3265ccef313
|
[
"MIT"
] | null | null | null |
smmips/__init__.py
|
oicr-gsi/pysmmips
|
ccf209b13862a5533a11fbe02e80d3265ccef313
|
[
"MIT"
] | null | null | null |
smmips/__init__.py
|
oicr-gsi/pysmmips
|
ccf209b13862a5533a11fbe02e80d3265ccef313
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 20 16:04:52 2020
@author: rjovelin
"""
| 11
| 35
| 0.579545
|
6a00f65f8d9c6385beccc2cbd3c37ef660b0dc52
| 6,343
|
py
|
Python
|
tarentsocialwall/MongoDBClient.py
|
tarent/socialwall-backend
|
2f09b8ccdd62a15daaa281d6ff568cb6ef749ab6
|
[
"MIT"
] | null | null | null |
tarentsocialwall/MongoDBClient.py
|
tarent/socialwall-backend
|
2f09b8ccdd62a15daaa281d6ff568cb6ef749ab6
|
[
"MIT"
] | null | null | null |
tarentsocialwall/MongoDBClient.py
|
tarent/socialwall-backend
|
2f09b8ccdd62a15daaa281d6ff568cb6ef749ab6
|
[
"MIT"
] | 2
|
2019-08-06T14:14:44.000Z
|
2019-08-06T14:21:19.000Z
|
import random
from datetime import datetime
from passlib.handlers.sha2_crypt import sha256_crypt
from pymongo import MongoClient
from pymongo.errors import ConnectionFailure
from tarentsocialwall.SocialPost import SocialPost
from tarentsocialwall.User import User
from tarentsocialwall.Util import Util
| 33.036458
| 100
| 0.631405
|
6a0102385be6299942545100e581de23300db9a4
| 76,697
|
py
|
Python
|
src/mount_efs/__init__.py
|
Sodki/efs-utils
|
493d9ea0dde93b560519b184219f6f71e32a8fcf
|
[
"MIT"
] | null | null | null |
src/mount_efs/__init__.py
|
Sodki/efs-utils
|
493d9ea0dde93b560519b184219f6f71e32a8fcf
|
[
"MIT"
] | null | null | null |
src/mount_efs/__init__.py
|
Sodki/efs-utils
|
493d9ea0dde93b560519b184219f6f71e32a8fcf
|
[
"MIT"
] | 12
|
2020-10-22T03:47:51.000Z
|
2022-03-19T18:09:59.000Z
|
#!/usr/bin/env python
#
# Copyright 2017-2018 Amazon.com, Inc. and its affiliates. All Rights Reserved.
#
# Licensed under the MIT License. See the LICENSE accompanying this file
# for the specific language governing permissions and limitations under
# the License.
#
#
# Copy this script to /sbin/mount.efs and make sure it is executable.
#
# You will be able to mount an EFS file system by its short name, by adding it
# to /etc/fstab. The syntax of an fstab entry is:
#
# [Device] [Mount Point] [File System Type] [Options] [Dump] [Pass]
#
# Add an entry like this:
#
# fs-deadbeef /mount_point efs _netdev 0 0
#
# Using the 'efs' type will cause '/sbin/mount.efs' to be called by 'mount -a'
# for this file system. The '_netdev' option tells the init system that the
# 'efs' type is a networked file system type. This has been tested with systemd
# (Amazon Linux 2, CentOS 7, RHEL 7, Debian 9, and Ubuntu 16.04), and upstart
# (Amazon Linux 2017.09).
#
# Once there is an entry in fstab, the file system can be mounted with:
#
# sudo mount /mount_point
#
# The script will add recommended mount options, if not provided in fstab.
import base64
import errno
import hashlib
import hmac
import json
import logging
import os
import pwd
import random
import re
import socket
import subprocess
import sys
import threading
import time
from contextlib import contextmanager
from datetime import datetime, timedelta
from logging.handlers import RotatingFileHandler
try:
import ConfigParser
from ConfigParser import NoOptionError, NoSectionError
except ImportError:
from configparser import ConfigParser, NoOptionError, NoSectionError
try:
from urllib.parse import quote_plus
except ImportError:
from urllib import quote_plus
try:
from urllib2 import URLError, HTTPError, build_opener, urlopen, Request, HTTPHandler
from urllib import urlencode
except ImportError:
from urllib.request import urlopen, Request
from urllib.error import URLError, HTTPError
from urllib.parse import urlencode
try:
import botocore.session
from botocore.exceptions import ClientError, NoCredentialsError, EndpointConnectionError
BOTOCORE_PRESENT = True
except ImportError:
BOTOCORE_PRESENT = False
VERSION = '1.28.2'
SERVICE = 'elasticfilesystem'
CONFIG_FILE = '/etc/amazon/efs/efs-utils.conf'
CONFIG_SECTION = 'mount'
CLIENT_INFO_SECTION = 'client-info'
CLIENT_SOURCE_STR_LEN_LIMIT = 100
CLOUDWATCH_LOG_SECTION = 'cloudwatch-log'
DEFAULT_CLOUDWATCH_LOG_GROUP = '/aws/efs/utils'
DEFAULT_RETENTION_DAYS = 14
# Cloudwatchlog agent dict includes cloudwatchlog botocore client, cloudwatchlog group name, cloudwatchlog stream name
CLOUDWATCHLOG_AGENT = None
LOG_DIR = '/var/log/amazon/efs'
LOG_FILE = 'mount.log'
STATE_FILE_DIR = '/var/run/efs'
PRIVATE_KEY_FILE = '/etc/amazon/efs/privateKey.pem'
DATE_ONLY_FORMAT = '%Y%m%d'
SIGV4_DATETIME_FORMAT = '%Y%m%dT%H%M%SZ'
CERT_DATETIME_FORMAT = '%y%m%d%H%M%SZ'
AWS_CREDENTIALS_FILE = os.path.expanduser(os.path.join('~' + pwd.getpwuid(os.getuid()).pw_name, '.aws', 'credentials'))
AWS_CONFIG_FILE = os.path.expanduser(os.path.join('~' + pwd.getpwuid(os.getuid()).pw_name, '.aws', 'config'))
CA_CONFIG_BODY = """dir = %s
RANDFILE = $dir/database/.rand
[ ca ]
default_ca = local_ca
[ local_ca ]
database = $dir/database/index.txt
serial = $dir/database/serial
private_key = %s
cert = $dir/certificate.pem
new_certs_dir = $dir/certs
default_md = sha256
preserve = no
policy = efsPolicy
x509_extensions = v3_ca
[ efsPolicy ]
CN = supplied
[ req ]
prompt = no
distinguished_name = req_distinguished_name
[ req_distinguished_name ]
CN = %s
%s
%s
%s
"""
# SigV4 Auth
ALGORITHM = 'AWS4-HMAC-SHA256'
AWS4_REQUEST = 'aws4_request'
HTTP_REQUEST_METHOD = 'GET'
CANONICAL_URI = '/'
CANONICAL_HEADERS_DICT = {
'host': '%s'
}
CANONICAL_HEADERS = '\n'.join(['%s:%s' % (k, v) for k, v in sorted(CANONICAL_HEADERS_DICT.items())])
SIGNED_HEADERS = ';'.join(CANONICAL_HEADERS_DICT.keys())
REQUEST_PAYLOAD = ''
FS_ID_RE = re.compile('^(?P<fs_id>fs-[0-9a-f]+)$')
EFS_FQDN_RE = re.compile(r'^(?P<fs_id>fs-[0-9a-f]+)\.efs\.(?P<region>[a-z0-9-]+)\.(?P<dns_name_suffix>[a-z0-9.]+)$')
AP_ID_RE = re.compile('^fsap-[0-9a-f]{17}$')
CREDENTIALS_KEYS = ['AccessKeyId', 'SecretAccessKey', 'Token']
ECS_URI_ENV = 'AWS_CONTAINER_CREDENTIALS_RELATIVE_URI'
ECS_TASK_METADATA_API = 'http://169.254.170.2'
WEB_IDENTITY_ROLE_ARN_ENV = 'AWS_ROLE_ARN'
WEB_IDENTITY_TOKEN_FILE_ENV = 'AWS_WEB_IDENTITY_TOKEN_FILE'
STS_ENDPOINT_URL = 'https://sts.amazonaws.com/'
INSTANCE_METADATA_TOKEN_URL = 'http://169.254.169.254/latest/api/token'
INSTANCE_METADATA_SERVICE_URL = 'http://169.254.169.254/latest/dynamic/instance-identity/document/'
INSTANCE_IAM_URL = 'http://169.254.169.254/latest/meta-data/iam/security-credentials/'
SECURITY_CREDS_ECS_URI_HELP_URL = 'https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-iam-roles.html'
SECURITY_CREDS_WEBIDENTITY_HELP_URL = 'https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html'
SECURITY_CREDS_IAM_ROLE_HELP_URL = 'https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html'
DEFAULT_STUNNEL_VERIFY_LEVEL = 2
DEFAULT_STUNNEL_CAFILE = '/etc/amazon/efs/efs-utils.crt'
NOT_BEFORE_MINS = 15
NOT_AFTER_HOURS = 3
EFS_ONLY_OPTIONS = [
'accesspoint',
'awscredsuri',
'awsprofile',
'cafile',
'iam',
'netns',
'noocsp',
'ocsp',
'tls',
'tlsport',
'verify'
]
UNSUPPORTED_OPTIONS = [
'capath'
]
STUNNEL_GLOBAL_CONFIG = {
'fips': 'no',
'foreground': 'yes',
'socket': [
'l:SO_REUSEADDR=yes',
'a:SO_BINDTODEVICE=lo',
],
}
STUNNEL_EFS_CONFIG = {
'client': 'yes',
'accept': '127.0.0.1:%s',
'connect': '%s:2049',
'sslVersion': 'TLSv1.2',
'renegotiation': 'no',
'TIMEOUTbusy': '20',
'TIMEOUTclose': '0',
'TIMEOUTidle': '70',
'delay': 'yes',
}
WATCHDOG_SERVICE = 'amazon-efs-mount-watchdog'
SYSTEM_RELEASE_PATH = '/etc/system-release'
OS_RELEASE_PATH = '/etc/os-release'
RHEL8_RELEASE_NAME = 'Red Hat Enterprise Linux release 8'
CENTOS8_RELEASE_NAME = 'CentOS Linux release 8'
FEDORA_RELEASE_NAME = 'Fedora release'
SUSE_RELEASE_NAME = 'openSUSE Leap'
SKIP_NO_LIBWRAP_RELEASES = [RHEL8_RELEASE_NAME, CENTOS8_RELEASE_NAME, FEDORA_RELEASE_NAME, SUSE_RELEASE_NAME]
def get_region_from_legacy_dns_format(config):
"""
For backwards compatibility check dns_name_format to obtain the target region. This functionality
should only be used if region is not present in the config file and metadata calls fail.
"""
dns_name_format = config.get(CONFIG_SECTION, 'dns_name_format')
if '{region}' not in dns_name_format:
split_dns_name_format = dns_name_format.split('.')
if '{dns_name_suffix}' in dns_name_format:
return split_dns_name_format[-2]
elif 'amazonaws.com' in dns_name_format:
return split_dns_name_format[-3]
raise Exception('Region not found in dns_name_format')
def get_aws_security_credentials(use_iam, awsprofile=None, aws_creds_uri=None):
"""
Lookup AWS security credentials (access key ID and secret access key). Adapted credentials provider chain from:
https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html and
https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/credentials.html
"""
if not use_iam:
return None, None
# attempt to lookup AWS security credentials through the credentials URI the ECS agent generated
if aws_creds_uri:
return get_aws_security_credentials_from_ecs(aws_creds_uri, True)
# attempt to lookup AWS security credentials in AWS credentials file (~/.aws/credentials)
# and configs file (~/.aws/config) with given awsprofile
if awsprofile:
return get_aws_security_credentials_from_awsprofile(awsprofile, True)
# attempt to lookup AWS security credentials through AWS_CONTAINER_CREDENTIALS_RELATIVE_URI environment variable
if ECS_URI_ENV in os.environ:
credentials, credentials_source = get_aws_security_credentials_from_ecs(os.environ[ECS_URI_ENV], False)
if credentials and credentials_source:
return credentials, credentials_source
# attempt to lookup AWS security credentials through AssumeRoleWithWebIdentity
# (e.g. for IAM Role for Service Accounts (IRSA) approach on EKS)
if WEB_IDENTITY_ROLE_ARN_ENV in os.environ and WEB_IDENTITY_TOKEN_FILE_ENV in os.environ:
credentials, credentials_source = get_aws_security_credentials_from_webidentity(
os.environ[WEB_IDENTITY_ROLE_ARN_ENV],
os.environ[WEB_IDENTITY_TOKEN_FILE_ENV],
False
)
if credentials and credentials_source:
return credentials, credentials_source
# attempt to lookup AWS security credentials with IAM role name attached to instance
# through IAM role name security credentials lookup uri
iam_role_name = get_iam_role_name()
if iam_role_name:
credentials, credentials_source = get_aws_security_credentials_from_instance_metadata(iam_role_name)
if credentials and credentials_source:
return credentials, credentials_source
error_msg = 'AWS Access Key ID and Secret Access Key are not found in AWS credentials file (%s), config file (%s), ' \
'from ECS credentials relative uri, or from the instance security credentials service' % \
(AWS_CREDENTIALS_FILE, AWS_CONFIG_FILE)
fatal_error(error_msg, error_msg)
def write_stunnel_config_file(config, state_file_dir, fs_id, mountpoint, tls_port, dns_name, verify_level, ocsp_enabled,
options, log_dir=LOG_DIR, cert_details=None):
"""
Serializes stunnel configuration to a file. Unfortunately this does not conform to Python's config file format, so we have to
hand-serialize it.
"""
mount_filename = get_mount_specific_filename(fs_id, mountpoint, tls_port)
global_config = dict(STUNNEL_GLOBAL_CONFIG)
if config.getboolean(CONFIG_SECTION, 'stunnel_debug_enabled'):
global_config['debug'] = 'debug'
if config.has_option(CONFIG_SECTION, 'stunnel_logs_file'):
global_config['output'] = config.get(CONFIG_SECTION, 'stunnel_logs_file').replace('{fs_id}', fs_id)
else:
global_config['output'] = os.path.join(log_dir, '%s.stunnel.log' % mount_filename)
efs_config = dict(STUNNEL_EFS_CONFIG)
efs_config['accept'] = efs_config['accept'] % tls_port
efs_config['connect'] = efs_config['connect'] % dns_name
efs_config['verify'] = verify_level
if verify_level > 0:
add_stunnel_ca_options(efs_config, config, options)
if cert_details:
efs_config['cert'] = cert_details['certificate']
efs_config['key'] = cert_details['privateKey']
check_host_supported, ocsp_aia_supported = get_version_specific_stunnel_options()
tls_controls_message = 'WARNING: Your client lacks sufficient controls to properly enforce TLS. Please upgrade stunnel, ' \
'or disable "%%s" in %s.\nSee %s for more detail.' % (CONFIG_FILE,
'https://docs.aws.amazon.com/console/efs/troubleshooting-tls')
if config.getboolean(CONFIG_SECTION, 'stunnel_check_cert_hostname'):
if check_host_supported:
efs_config['checkHost'] = dns_name
else:
fatal_error(tls_controls_message % 'stunnel_check_cert_hostname')
# Only use the config setting if the override is not set
if ocsp_enabled:
if ocsp_aia_supported:
efs_config['OCSPaia'] = 'yes'
else:
fatal_error(tls_controls_message % 'stunnel_check_cert_validity')
system_release_version = get_system_release_version()
if not any(release in system_release_version for release in SKIP_NO_LIBWRAP_RELEASES):
efs_config['libwrap'] = 'no'
stunnel_config = '\n'.join(serialize_stunnel_config(global_config) + serialize_stunnel_config(efs_config, 'efs'))
logging.debug('Writing stunnel configuration:\n%s', stunnel_config)
stunnel_config_file = os.path.join(state_file_dir, 'stunnel-config.%s' % mount_filename)
with open(stunnel_config_file, 'w') as f:
f.write(stunnel_config)
return stunnel_config_file
def write_tls_tunnel_state_file(fs_id, mountpoint, tls_port, tunnel_pid, command, files, state_file_dir, cert_details=None):
"""
Return the name of the temporary file containing TLS tunnel state, prefixed with a '~'. This file needs to be renamed to a
non-temporary version following a successful mount.
"""
state_file = '~' + get_mount_specific_filename(fs_id, mountpoint, tls_port)
state = {
'pid': tunnel_pid,
'cmd': command,
'files': files,
}
if cert_details:
state.update(cert_details)
with open(os.path.join(state_file_dir, state_file), 'w') as f:
json.dump(state, f)
return state_file
def poll_tunnel_process(tunnel_proc, fs_id, mount_completed):
"""
poll the tunnel process health every .5s during the mount attempt to fail fast if the tunnel dies - since this is not called
from the main thread, if the tunnel fails, exit uncleanly with os._exit
"""
while not mount_completed.is_set():
try:
test_tunnel_process(tunnel_proc, fs_id)
except SystemExit as e:
os._exit(e.code)
mount_completed.wait(.5)
def parse_arguments_early_exit(args=None):
"""Parse arguments, checking for early exit conditions only"""
if args is None:
args = sys.argv
if '-h' in args[1:] or '--help' in args[1:]:
usage(out=sys.stdout, exit_code=0)
if '--version' in args[1:]:
sys.stdout.write('%s Version: %s\n' % (args[0], VERSION))
sys.exit(0)
def parse_arguments(config, args=None):
"""Parse arguments, return (fsid, path, mountpoint, options)"""
if args is None:
args = sys.argv
fsname = None
mountpoint = None
options = {}
if len(args) > 1:
fsname = args[1]
if len(args) > 2:
mountpoint = args[2]
if len(args) > 4 and '-o' in args[:-1]:
options_index = args.index('-o') + 1
options = parse_options(args[options_index])
if not fsname or not mountpoint:
usage(out=sys.stderr)
fs_id, path = match_device(config, fsname)
return fs_id, path, mountpoint, options
def get_private_key_path():
"""Wrapped for mocking purposes in unit tests"""
return PRIVATE_KEY_FILE
def create_ca_conf(config_path, common_name, directory, private_key, date,
region, fs_id, security_credentials, ap_id, client_info):
"""Populate ca/req configuration file with fresh configurations at every mount since SigV4 signature can change"""
public_key_path = os.path.join(directory, 'publicKey.pem')
ca_extension_body = ca_extension_builder(ap_id, security_credentials, fs_id, client_info)
efs_client_auth_body = efs_client_auth_builder(public_key_path, security_credentials['AccessKeyId'],
security_credentials['SecretAccessKey'], date, region, fs_id,
security_credentials['Token']) if security_credentials else ''
efs_client_info_body = efs_client_info_builder(client_info) if client_info else ''
full_config_body = CA_CONFIG_BODY % (directory, private_key, common_name, ca_extension_body,
efs_client_auth_body, efs_client_info_body)
with open(config_path, 'w') as f:
f.write(full_config_body)
return full_config_body
def subprocess_call(cmd, error_message):
"""Helper method to run shell openssl command and to handle response error messages"""
retry_times = 3
for retry in range(retry_times):
process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
(output, err) = process.communicate()
rc = process.poll()
if rc != 0:
logging.error('Command %s failed, rc=%s, stdout="%s", stderr="%s"' % (cmd, rc, output, err), exc_info=True)
try:
process.kill()
except OSError:
# Silently fail if the subprocess has exited already
pass
else:
return output, err
error_message = '%s, error is: %s' % (error_message, err)
fatal_error(error_message, error_message)
def ca_dirs_check(config, database_dir, certs_dir):
"""Check if mount's database and certs directories exist and if not, create directories (also create all intermediate
directories if they don't exist)."""
if not os.path.exists(database_dir):
create_required_directory(config, database_dir)
if not os.path.exists(certs_dir):
create_required_directory(config, certs_dir)
def ca_supporting_files_check(index_path, index_attr_path, serial_path, rand_path):
"""Recreate all supporting openssl ca and req files if they're not present in their respective directories"""
if not os.path.isfile(index_path):
open(index_path, 'w').close()
if not os.path.isfile(index_attr_path):
with open(index_attr_path, 'w+') as f:
f.write('unique_subject = no')
if not os.path.isfile(serial_path):
with open(serial_path, 'w+') as f:
f.write('00')
if not os.path.isfile(rand_path):
open(rand_path, 'w').close()
def get_utc_now():
"""
Wrapped for patching purposes in unit tests
"""
return datetime.utcnow()
def create_canonical_request(public_key_hash, date, access_key, region, fs_id, session_token=None):
"""
Create a Canonical Request - https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
"""
formatted_datetime = date.strftime(SIGV4_DATETIME_FORMAT)
credential = quote_plus(access_key + '/' + get_credential_scope(date, region))
request = HTTP_REQUEST_METHOD + '\n'
request += CANONICAL_URI + '\n'
request += create_canonical_query_string(public_key_hash, credential, formatted_datetime, session_token) + '\n'
request += CANONICAL_HEADERS % fs_id + '\n'
request += SIGNED_HEADERS + '\n'
sha256 = hashlib.sha256()
sha256.update(REQUEST_PAYLOAD.encode())
request += sha256.hexdigest()
return request
def create_string_to_sign(canonical_request, date, region):
"""
Create a String to Sign - https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html
"""
string_to_sign = ALGORITHM + '\n'
string_to_sign += date.strftime(SIGV4_DATETIME_FORMAT) + '\n'
string_to_sign += get_credential_scope(date, region) + '\n'
sha256 = hashlib.sha256()
sha256.update(canonical_request.encode())
string_to_sign += sha256.hexdigest()
return string_to_sign
def calculate_signature(string_to_sign, date, secret_access_key, region):
"""
Calculate the Signature - https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html
"""
key_date = _sign(('AWS4' + secret_access_key).encode('utf-8'), date.strftime(DATE_ONLY_FORMAT)).digest()
add_region = _sign(key_date, region).digest()
add_service = _sign(add_region, SERVICE).digest()
signing_key = _sign(add_service, 'aws4_request').digest()
return _sign(signing_key, string_to_sign).hexdigest()
def match_device(config, device):
"""Return the EFS id and the remote path to mount"""
try:
remote, path = device.split(':', 1)
except ValueError:
remote = device
path = '/'
if FS_ID_RE.match(remote):
return remote, path
try:
primary, secondaries, _ = socket.gethostbyname_ex(remote)
hostnames = list(filter(lambda e: e is not None, [primary] + secondaries))
except socket.gaierror:
create_default_cloudwatchlog_agent_if_not_exist(config)
fatal_error(
'Failed to resolve "%s" - check that the specified DNS name is a CNAME record resolving to a valid EFS DNS '
'name' % remote,
'Failed to resolve "%s"' % remote
)
if not hostnames:
create_default_cloudwatchlog_agent_if_not_exist(config)
fatal_error(
'The specified domain name "%s" did not resolve to an EFS mount target' % remote
)
for hostname in hostnames:
efs_fqdn_match = EFS_FQDN_RE.match(hostname)
if efs_fqdn_match:
fs_id = efs_fqdn_match.group('fs_id')
expected_dns_name = get_dns_name(config, fs_id)
# check that the DNS name of the mount target matches exactly the DNS name the CNAME resolves to
if hostname == expected_dns_name:
return fs_id, path
else:
create_default_cloudwatchlog_agent_if_not_exist(config)
fatal_error('The specified CNAME "%s" did not resolve to a valid DNS name for an EFS mount target. '
'Please refer to the EFS documentation for mounting with DNS names for examples: %s'
% (remote, 'https://docs.aws.amazon.com/efs/latest/ug/mounting-fs-mount-cmd-dns-name.html'))
if '__main__' == __name__:
main()
| 38.560583
| 130
| 0.680535
|
6a01fe7f065ff8fbb40e8cf44137b52463e1417c
| 1,010
|
py
|
Python
|
upcfcardsearch/c8.py
|
ProfessorSean/Kasutamaiza
|
7a69a69258f67bbb88bebbac6da4e6e1434947e6
|
[
"MIT"
] | null | null | null |
upcfcardsearch/c8.py
|
ProfessorSean/Kasutamaiza
|
7a69a69258f67bbb88bebbac6da4e6e1434947e6
|
[
"MIT"
] | null | null | null |
upcfcardsearch/c8.py
|
ProfessorSean/Kasutamaiza
|
7a69a69258f67bbb88bebbac6da4e6e1434947e6
|
[
"MIT"
] | null | null | null |
import discord
from discord.ext import commands
from discord.utils import get
| 43.913043
| 195
| 0.687129
|
6a023f8c8af70de4e0b8e937c5773e7da489fab5
| 2,627
|
py
|
Python
|
SVMmodel_withSKF.py
|
tameney22/DCI-Capstone
|
6f59541f16030bfa3f0a706fd9f0e4394e1ee974
|
[
"MIT"
] | null | null | null |
SVMmodel_withSKF.py
|
tameney22/DCI-Capstone
|
6f59541f16030bfa3f0a706fd9f0e4394e1ee974
|
[
"MIT"
] | null | null | null |
SVMmodel_withSKF.py
|
tameney22/DCI-Capstone
|
6f59541f16030bfa3f0a706fd9f0e4394e1ee974
|
[
"MIT"
] | null | null | null |
"""
This script is where the preprocessed data is used to train the SVM model to
perform the classification. I am using Stratified K-Fold Cross Validation to
prevent bias and/or any imbalance that could affect the model's accuracy.
REFERENCE: https://medium.com/@bedigunjit/simple-guide-to-text-classification-nlp-using-svm-and-naive-bayes-with-python-421db3a72d34
"""
import numpy as np
import pandas as pd
from sklearn import model_selection, svm
from sklearn.metrics import accuracy_score
from sklearn.preprocessing import LabelEncoder
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.model_selection import StratifiedKFold
# Open preproccessed csv
df = pd.read_csv("preprocessed.csv", index_col=0)
print(df.head())
print("SPLITTING TRAIN-TEST")
x = df["Text"]
y = df["PublicationTitle"]
train_x, test_x, train_y, test_y = model_selection.train_test_split(
df["Text"], df["PublicationTitle"], test_size=0.3)
# Label encode the target variable to transform categorical data of string
# type into numerical values the model can understand
encoder = LabelEncoder()
# train_y = encoder.fit_transform(train_y)
# test_y = encoder.fit_transform(test_y)
# Word vectorization
# turning a collection of text documents into numerical feature vectors
# We are using Term Frequency - Inverse Document
tfidf_vect = TfidfVectorizer(max_features=5000)
tfidf_vect.fit(df["Text"])
# train_x_tfidf = tfidf_vect.transform(train_x)
# test_x_tfidf = tfidf_vect.transform(test_x)
x_tfidf = tfidf_vect.transform(df["Text"])
y = encoder.fit_transform(y)
# print(tfidf_vect.vocabulary_)
# Fit the training dataset to the classifier
print("TRAINING THE MODEL")
SVM = svm.SVC(C=1.0, kernel='linear', degree=3, gamma='auto')
skf = StratifiedKFold(n_splits=10, shuffle=True, random_state=1)
accuracies = []
fold = 1
for train_idx, test_idx in skf.split(x, y):
print("Working on fold", fold)
x_train_fold, x_test_fold = x_tfidf[train_idx], x_tfidf[test_idx]
y_train_fold, y_test_fold = y[train_idx], y[test_idx]
SVM.fit(x_train_fold, y_train_fold)
acc = SVM.score(x_test_fold, y_test_fold)
print("Acc", fold, ":", acc)
accuracies.append(acc)
fold += 1
print("ACCURACIES:", accuracies)
print("Max Accuracy:", np.max(accuracies))
print("Min Accuracy:", np.min(accuracies))
print("Mean of Accuracies:", np.mean(accuracies))
print("STD of Accuracies:", np.std(accuracies))
# print("RUNNING TEST PREDICTIONS")
# predictions = SVM.predict(test_x_tfidf)
# # Calculate accuracy score
# accuracy = accuracy_score(test_y, predictions)
# print("Accuracy:", str(accuracy * 100) + "%")
| 31.650602
| 132
| 0.760183
|
6a046c97530ca1a780b9dd23d59ddcc8df166fa3
| 263
|
py
|
Python
|
red_dwarf/entrypoints/project_management.py
|
JesseMaitland/red-dwarf
|
f606ada43f4be72c5cab61049182b58c0c348602
|
[
"MIT"
] | null | null | null |
red_dwarf/entrypoints/project_management.py
|
JesseMaitland/red-dwarf
|
f606ada43f4be72c5cab61049182b58c0c348602
|
[
"MIT"
] | null | null | null |
red_dwarf/entrypoints/project_management.py
|
JesseMaitland/red-dwarf
|
f606ada43f4be72c5cab61049182b58c0c348602
|
[
"MIT"
] | null | null | null |
from rsterm import EntryPoint
from red_dwarf.project import provide_project_context, ProjectContext
| 23.909091
| 69
| 0.790875
|
6a048666edf3e5d75a0ded13639990b1d6bed2e8
| 33,554
|
py
|
Python
|
src/consensus.py
|
dschwoerer/samscripts
|
caee697e96a0639b7a4f9db02f70f4fd92b39ef9
|
[
"MIT"
] | null | null | null |
src/consensus.py
|
dschwoerer/samscripts
|
caee697e96a0639b7a4f9db02f70f4fd92b39ef9
|
[
"MIT"
] | null | null | null |
src/consensus.py
|
dschwoerer/samscripts
|
caee697e96a0639b7a4f9db02f70f4fd92b39ef9
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
# Copyright Ivan Sovic, 2015. www.sovic.org
#
# Creates a pileup from a given SAM/BAM file, and calls consensus bases (or variants).
import os
import sys
import operator
import subprocess
if __name__ == "__main__":
# if (len(sys.argv) < 5):
# sys.stderr.write('Usage:\n');
# sys.stderr.write('\t%s <reference_file_path> coverage_threshold <collective_output_file> <{sb}am_file_1> [<{sb}am_file_2> <{sb}am_file_3> ...]\n' % sys.argv[0]);
# sys.stderr.write('\t(If <collective_output_file> is equal to "-", no files will be written to disk.)\n');
# exit(1);
if len(sys.argv) < 5:
sys.stderr.write("Usage:\n")
sys.stderr.write(
"\t%s <reference_file_path> coverage_threshold <output_prefix> <{sb}am_file_> [position]\n"
% sys.argv[0]
)
sys.stderr.write(
'\t(If <collective_output_file> is equal to "-", no files will be written to disk.)\n'
)
sys.stderr.write(
'\tPosition parameter is a string specifying "chromosome:start-end"\n\n'
)
exit(1)
reference_file = sys.argv[1]
coverage_threshold = int(sys.argv[2])
output_prefix = sys.argv[3]
sam_file = sys.argv[4]
bed_position = ""
if len(sys.argv) > 5:
bed_position = sys.argv[5]
# sys.stderr.write('bed_position: "%s"\n\n' % bed_position);
processes = []
if output_prefix == "-":
output_prefix = os.path.splitext(sam_file)[0]
main(sam_file, reference_file, coverage_threshold, output_prefix, 0, bed_position)
# if (output_prefix != '-'):
# CollectSummaries([sam_file], output_prefix, output_prefix + '.variant.sum');
| 39.755924
| 436
| 0.550933
|
6a049ff78a91de998072b637d1639d25a433a194
| 5,867
|
py
|
Python
|
web/addons/account_payment/wizard/account_payment_populate_statement.py
|
diogocs1/comps
|
63df07f6cf21c41e4527c06e2d0499f23f4322e7
|
[
"Apache-2.0"
] | null | null | null |
web/addons/account_payment/wizard/account_payment_populate_statement.py
|
diogocs1/comps
|
63df07f6cf21c41e4527c06e2d0499f23f4322e7
|
[
"Apache-2.0"
] | null | null | null |
web/addons/account_payment/wizard/account_payment_populate_statement.py
|
diogocs1/comps
|
63df07f6cf21c41e4527c06e2d0499f23f4322e7
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from lxml import etree
from openerp.osv import fields, osv
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| 48.891667
| 250
| 0.592466
|
6a04d1fd425aed6effcc3e48e1eb103f0872ab5a
| 3,621
|
py
|
Python
|
libqtile/widget/imapwidget.py
|
akloster/qtile
|
bd21d0744e177b8ca01ac129081472577d53ed66
|
[
"MIT"
] | 1
|
2021-04-05T07:15:37.000Z
|
2021-04-05T07:15:37.000Z
|
libqtile/widget/imapwidget.py
|
akloster/qtile
|
bd21d0744e177b8ca01ac129081472577d53ed66
|
[
"MIT"
] | 1
|
2022-02-27T12:17:27.000Z
|
2022-02-27T12:17:27.000Z
|
libqtile/widget/imapwidget.py
|
akloster/qtile
|
bd21d0744e177b8ca01ac129081472577d53ed66
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2015 David R. Andersen
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import imaplib
import re
import keyring
from libqtile.log_utils import logger
from libqtile.widget import base
| 38.521277
| 98
| 0.67219
|
6a04e4f203740a253735948c968506f6632354e6
| 2,486
|
py
|
Python
|
game/views/tests/game_definition_view_test.py
|
dimadk24/english-fight-api
|
506a3eb2cb4cb91203b1e023b5248c27975df075
|
[
"MIT"
] | null | null | null |
game/views/tests/game_definition_view_test.py
|
dimadk24/english-fight-api
|
506a3eb2cb4cb91203b1e023b5248c27975df075
|
[
"MIT"
] | null | null | null |
game/views/tests/game_definition_view_test.py
|
dimadk24/english-fight-api
|
506a3eb2cb4cb91203b1e023b5248c27975df075
|
[
"MIT"
] | null | null | null |
from rest_framework.response import Response
from rest_framework.test import APIClient
from game.models import GameDefinition, AppUser
| 35.514286
| 77
| 0.79284
|
6a051324d6c23235da009880d6bcb0d30ed4d8dc
| 315
|
py
|
Python
|
2-Python-Fundamentals (Jan 2021)/Course-Exercises-and-Exams/08-Text-Processing/01_Lab/02-Repeat-Strings.py
|
karolinanikolova/SoftUni-Software-Engineering
|
7891924956598b11a1e30e2c220457c85c40f064
|
[
"MIT"
] | null | null | null |
2-Python-Fundamentals (Jan 2021)/Course-Exercises-and-Exams/08-Text-Processing/01_Lab/02-Repeat-Strings.py
|
karolinanikolova/SoftUni-Software-Engineering
|
7891924956598b11a1e30e2c220457c85c40f064
|
[
"MIT"
] | null | null | null |
2-Python-Fundamentals (Jan 2021)/Course-Exercises-and-Exams/08-Text-Processing/01_Lab/02-Repeat-Strings.py
|
karolinanikolova/SoftUni-Software-Engineering
|
7891924956598b11a1e30e2c220457c85c40f064
|
[
"MIT"
] | null | null | null |
# 2. Repeat Strings
# Write a Program That Reads a list of strings. Each string is repeated N times, where N is the length of the string. Print the concatenated string.
strings = input().split()
output_string = ""
for string in strings:
N = len(string)
output_string += string * N
print(output_string)
| 22.5
| 148
| 0.71746
|
6a05188139a9d21e9e36ba6e3d3eb0801c8187c7
| 416
|
py
|
Python
|
cloudkeeperV1/plugins/cleanup_aws_loadbalancers/test/test_args.py
|
mesosphere/cloudkeeper
|
11be262df5874c1033cfec9964bba1596cab6a36
|
[
"Apache-2.0"
] | 99
|
2020-04-15T22:56:34.000Z
|
2021-06-13T15:04:55.000Z
|
cloudkeeperV1/plugins/cleanup_aws_loadbalancers/test/test_args.py
|
mesosphere/cloudkeeper
|
11be262df5874c1033cfec9964bba1596cab6a36
|
[
"Apache-2.0"
] | null | null | null |
cloudkeeperV1/plugins/cleanup_aws_loadbalancers/test/test_args.py
|
mesosphere/cloudkeeper
|
11be262df5874c1033cfec9964bba1596cab6a36
|
[
"Apache-2.0"
] | 14
|
2020-04-14T22:13:59.000Z
|
2021-04-05T16:42:31.000Z
|
from cklib.args import get_arg_parser, ArgumentParser
from cloudkeeper_plugin_cleanup_aws_loadbalancers import CleanupAWSLoadbalancersPlugin
| 37.818182
| 86
| 0.834135
|
6a0593a2d9f168fbcc460c2d82964c99ec312e4a
| 911
|
py
|
Python
|
mayan/apps/metadata/migrations/0011_auto_20180917_0645.py
|
prezi/mayan-edms
|
e9bc10a056c3379b57115c6e83022f48c6298e1d
|
[
"Apache-2.0"
] | 4
|
2019-02-17T08:35:42.000Z
|
2019-03-28T06:02:11.000Z
|
mayan/apps/metadata/migrations/0011_auto_20180917_0645.py
|
zhoubear/mayan-edms
|
e9bc10a056c3379b57115c6e83022f48c6298e1d
|
[
"Apache-2.0"
] | 1
|
2018-10-11T13:01:34.000Z
|
2018-10-11T13:01:34.000Z
|
mayan/apps/metadata/migrations/0011_auto_20180917_0645.py
|
prezi/mayan-edms
|
e9bc10a056c3379b57115c6e83022f48c6298e1d
|
[
"Apache-2.0"
] | 3
|
2019-01-29T13:21:57.000Z
|
2019-10-27T03:20:15.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-09-17 06:45
from __future__ import unicode_literals
from django.db import migrations, models
| 35.038462
| 199
| 0.657519
|
6a0724ca0ed93e378a29473e0b6b5911cc4be4e6
| 944
|
py
|
Python
|
algorithm/dfs/boj_1260.py
|
ruslanlvivsky/python-algorithm
|
2b49bed33cd0e95b8a1e758008191f4392b3f667
|
[
"MIT"
] | 3
|
2021-07-18T14:40:24.000Z
|
2021-08-14T18:08:13.000Z
|
algorithm/dfs/boj_1260.py
|
jinsuSang/python-algorithm
|
524849a0a7e71034d329fef63c4f384930334177
|
[
"MIT"
] | null | null | null |
algorithm/dfs/boj_1260.py
|
jinsuSang/python-algorithm
|
524849a0a7e71034d329fef63c4f384930334177
|
[
"MIT"
] | null | null | null |
N, M, V = map(int, input().strip().split())
visited = [False] * (N + 1)
graph = [[] for _ in range(N + 1)]
for i in range(M):
a, b = map(int, input().strip().split())
graph[a].append(b)
graph[b].append(a)
for i in range(1, N + 1):
graph[i].sort()
dfs(V)
visited = [False] * (N + 1)
print()
bfs(V)
| 19.265306
| 44
| 0.470339
|
6a07aa532405a92d53e9ed5f46dcbcbd7a845cfa
| 634
|
py
|
Python
|
redirector.py
|
UKPLab/DiGAT
|
b044648a6c79428872a778908d3a8a689f0ac3e6
|
[
"Apache-2.0"
] | 8
|
2016-06-22T17:02:45.000Z
|
2020-11-16T23:46:13.000Z
|
redirector.py
|
UKPLab/DiGAT
|
b044648a6c79428872a778908d3a8a689f0ac3e6
|
[
"Apache-2.0"
] | null | null | null |
redirector.py
|
UKPLab/DiGAT
|
b044648a6c79428872a778908d3a8a689f0ac3e6
|
[
"Apache-2.0"
] | 1
|
2019-02-25T04:40:04.000Z
|
2019-02-25T04:40:04.000Z
|
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
__author__ = "Artem Vovk, Roland Kluge, and Christian Kirschner"
__copyright__ = "Copyright 2013-2015 UKP TU Darmstadt"
__credits__ = ["Artem Vovk", "Roland Kluge", "Christian Kirschner"]
__license__ = "ASL"
application = webapp.WSGIApplication(
[('/.*', Redirector)],
debug=True)
if __name__ == "__main__":
main()
| 22.642857
| 67
| 0.705047
|
6a07ddc6734dd5ce8f0853fa4326c144429dfb84
| 5,214
|
py
|
Python
|
imgaug/augmenters/flip.py
|
pAoenix/image-Augmented
|
4acaa7dc48c6167c1716e39e9e78b1cea2067b4a
|
[
"MIT"
] | 1
|
2020-12-30T02:48:40.000Z
|
2020-12-30T02:48:40.000Z
|
imgaug/augmenters/flip.py
|
pAoenix/image-Augmented
|
4acaa7dc48c6167c1716e39e9e78b1cea2067b4a
|
[
"MIT"
] | null | null | null |
imgaug/augmenters/flip.py
|
pAoenix/image-Augmented
|
4acaa7dc48c6167c1716e39e9e78b1cea2067b4a
|
[
"MIT"
] | 2
|
2020-01-14T14:29:49.000Z
|
2021-02-20T07:47:02.000Z
|
"""
Augmenters that apply mirroring/flipping operations to images.
Do not import directly from this file, as the categorization is not final.
Use instead ::
from imgaug import augmenters as iaa
and then e.g. ::
seq = iaa.Sequential([
iaa.Fliplr((0.0, 1.0)),
iaa.Flipud((0.0, 1.0))
])
List of augmenters:
* Fliplr
* Flipud
"""
from __future__ import print_function, division, absolute_import
from .. import parameters as iap
import numpy as np
import six.moves as sm
from .meta import Augmenter
| 32.185185
| 103
| 0.652091
|
6a0b84b7b59fd4b039d379ec665100c80b070e0d
| 1,347
|
py
|
Python
|
2. Add Two Numbers DC(12-1-21).py
|
Dharaneeshwar/Leetcode
|
cc3ed07f6ac5f4d6e3f60c57a94a06a8be2f5287
|
[
"MIT"
] | 4
|
2020-11-17T05:24:24.000Z
|
2021-06-14T21:01:45.000Z
|
2. Add Two Numbers DC(12-1-21).py
|
Dharaneeshwar/Leetcode
|
cc3ed07f6ac5f4d6e3f60c57a94a06a8be2f5287
|
[
"MIT"
] | null | null | null |
2. Add Two Numbers DC(12-1-21).py
|
Dharaneeshwar/Leetcode
|
cc3ed07f6ac5f4d6e3f60c57a94a06a8be2f5287
|
[
"MIT"
] | null | null | null |
# Time Complexity - O(n) ; Space Complexity - O(n)
| 31.325581
| 76
| 0.400148
|
6a0b98cc37e3d3bfecf8eba880eba829290a251c
| 1,862
|
py
|
Python
|
deepgp_dsvi/demos/step_function.py
|
dks28/Deep-Gaussian-Process
|
a7aace43e78aae81468849aee7d172742e6ecf86
|
[
"MIT"
] | 21
|
2020-03-07T15:40:13.000Z
|
2021-11-05T07:49:24.000Z
|
deepgp_dsvi/demos/step_function.py
|
dks28/Deep-Gaussian-Process
|
a7aace43e78aae81468849aee7d172742e6ecf86
|
[
"MIT"
] | 3
|
2021-02-03T13:32:45.000Z
|
2021-07-17T16:07:06.000Z
|
src/demos/step_function.py
|
FelixOpolka/Deep-Gaussian-Process
|
40181f210d7b09863c321d1a90335be77233df80
|
[
"MIT"
] | 2
|
2020-08-10T14:02:28.000Z
|
2020-12-28T16:03:09.000Z
|
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from gpflow.kernels import White, RBF
from gpflow.likelihoods import Gaussian
from deep_gp import DeepGP
np.random.seed(0)
tf.random.set_seed(0)
if __name__ == '__main__':
Xs, X_train, Y_train, Z = get_data()
dgp = make_deep_GP(3, X_train, Y_train, Z)
optimizer = tf.optimizers.Adam(learning_rate=0.01, epsilon=1e-08)
for _ in range(1500):
with tf.GradientTape(watch_accessed_variables=False) as tape:
tape.watch(dgp.trainable_variables)
objective = -dgp.elbo((X_train, Y_train))
gradients = tape.gradient(objective, dgp.trainable_variables)
optimizer.apply_gradients(zip(gradients, dgp.trainable_variables))
print(f"ELBO: {-objective.numpy()}")
samples, _, _ = dgp.predict_all_layers(Xs, num_samples=50, full_cov=True)
plt.plot(Xs, samples[-1].numpy()[:, :, 0].T, color='r', alpha=0.3)
plt.title('Deep Gaussian Process')
plt.scatter(X_train, Y_train)
plt.show()
| 31.033333
| 77
| 0.645005
|
6a0bd26d528523a33d941c1d0799a814a2b95dcf
| 5,343
|
py
|
Python
|
metaspace/engine/sm/engine/annotation_lithops/moldb_pipeline.py
|
METASPACE2020/METASPACE
|
e1acd9a409f84a78eed7ca9713258c09b0e137ca
|
[
"Apache-2.0"
] | 32
|
2018-08-13T15:49:42.000Z
|
2022-01-17T18:32:19.000Z
|
metaspace/engine/sm/engine/annotation_lithops/moldb_pipeline.py
|
METASPACE2020/METASPACE
|
e1acd9a409f84a78eed7ca9713258c09b0e137ca
|
[
"Apache-2.0"
] | 624
|
2018-07-02T15:18:22.000Z
|
2022-03-30T08:10:35.000Z
|
metaspace/engine/sm/engine/annotation_lithops/moldb_pipeline.py
|
METASPACE2020/METASPACE
|
e1acd9a409f84a78eed7ca9713258c09b0e137ca
|
[
"Apache-2.0"
] | 6
|
2021-01-10T22:24:30.000Z
|
2022-03-16T19:14:37.000Z
|
from __future__ import annotations
import json
import logging
from contextlib import contextmanager, ExitStack
from typing import List, Dict
import pandas as pd
from lithops.storage import Storage
from lithops.storage.utils import CloudObject, StorageNoSuchKeyError
from sm.engine.annotation_lithops.build_moldb import (
build_moldb,
InputMolDb,
DbFDRData,
)
from sm.engine.annotation_lithops.calculate_centroids import (
calculate_centroids,
validate_centroids,
)
from sm.engine.annotation_lithops.executor import Executor
from sm.engine.annotation_lithops.io import (
CObj,
save_cobj,
iter_cobjects_with_prefetch,
deserialize,
)
from sm.engine.annotation_lithops.utils import jsonhash
from sm.engine.utils.db_mutex import DBMutex
from sm.engine.ds_config import DSConfig
from sm.engine.annotation.isocalc_wrapper import IsocalcWrapper
logger = logging.getLogger('annotation-pipeline')
| 36.59589
| 99
| 0.668351
|
6a0dc9555ac01260e856ab868bd3c294497c065f
| 2,830
|
py
|
Python
|
gui/main_window/node_editor/items/connector_top_item.py
|
anglebinbin/Barista-tool
|
2d51507fb3566881923f0b273127f59d23ed317f
|
[
"MIT"
] | 1
|
2020-02-11T19:05:17.000Z
|
2020-02-11T19:05:17.000Z
|
gui/main_window/node_editor/items/connector_top_item.py
|
anglebinbin/Barista-tool
|
2d51507fb3566881923f0b273127f59d23ed317f
|
[
"MIT"
] | null | null | null |
gui/main_window/node_editor/items/connector_top_item.py
|
anglebinbin/Barista-tool
|
2d51507fb3566881923f0b273127f59d23ed317f
|
[
"MIT"
] | null | null | null |
from PyQt5.QtWidgets import QMenu
from gui.main_window.node_editor.items.connector_item import ConnectorItem
| 44.21875
| 118
| 0.673852
|