content stringlengths 5 1.05M |
|---|
# the kafka consumer, used primarily for sentiment analysis.
# this script reads the kafka stream, produced by our various producers.
# the stream is read continuously, and each entry is assigned to a time window.
# the idea is to have 60second (1 minute) windows which to which data is assigned.
# this allows us to categorize live data by the minute.
# This is necessary because we want to observe only real-time data, but also store a timestamp when the data was captured (with a minute precision).
# Another benefit of such a system is that we can process data in bulk. Instead of doing an insert/write/update query on the database within the producers (i.e. when new data is scraped), we do it in a batch.
# This is for performance benefits, batch processing is much faster and consumes less resources than the "basic" way.
# Depending on the flux of data, you may not see its benefits if there's too little data throughput, but once demand goes up you won't have a bottleneck with the database.
# You'll also notice that the data is categorized as either "social" or "news" type.
# Each type is processed in a different manner with a different purpose.
# The reason is simple: news articles are of a different kind than social media chatter.
import sys
import os
sys.path.insert(0, '/home/cryptopredicted/')
from mysettings import CRYPTO_socialKeywords, createLogger, kafkaServerEndPoint, kafkaAuth, kafkaTopic_mentionsSocial, kafkaTopic_mentionsNews
import DAL
from WordStatDict import DICT
import traceback
import html
import nltk
from langdetect import detect
import json
import time
import threading
import json
from datetime import datetime
from kafka import KafkaConsumer, TopicPartition
max_window_seconds = 60 # aggregate & update in a one minute window
max_buck_len = 1000 # unless the bucket reaches a threshold then we should flush it prematurely
MAX_MENTIONS_EXTENDED_PER_WINDOW = 500000 # how many mentions to keep within a single buffer/bucket (max_window_seconds || max_buck_len)
try:
_log = createLogger("consumerK_info", "consumerK_info")
_logErr = createLogger("consumerK_error", "consumerK_error")
client = DAL.openConnection()
except Exception as ex:
log("exception")
logErr(str(ex), traceback.format_exc())
exit()
def log(*params):
for p in params:
print(p)
_log.info(p)
def logErr(*params):
for p in params:
print(p)
_logErr.critical(p)
def main():
topics = (kafkaTopic_mentionsSocial, kafkaTopic_mentionsNews)
consumer = KafkaConsumer(*topics, group_id='consumerK_GRP', bootstrap_servers=kafkaServerEndPoint, sasl_mechanism=kafkaAuth['sasl_mechanism'], sasl_plain_username=kafkaAuth['sasl_plain_username'], sasl_plain_password=kafkaAuth['sasl_plain_password'] )
prev_ts = None
bucket_social = []
bucket_news = []
threads = []
for msg in consumer:
# log("partition: " + str(msg.partition))
try:
ts = msg.timestamp/1000
ts -= ts%(max_window_seconds)
val = json.loads(msg.value.decode('utf8'))
if prev_ts != None:
if (ts != prev_ts and len(bucket_social) > 0) or len(bucket_social) >= max_buck_len:
log("bucket_social: " + str(len(bucket_social)))
th = processor_social(prev_ts, bucket_social, threads)
threads.append(th)
th.start()
bucket_social = []
if (ts != prev_ts and len(bucket_news) > 0) or len(bucket_news) >= max_buck_len:
log("bucket_news: " + str(len(bucket_news)))
th = processor_news(prev_ts, bucket_news, threads)
threads.append(th)
th.start()
bucket_news = []
if val['type'] == 'social':
bucket_social.append(val)
elif val['type'] == 'news':
bucket_news.append(val)
prev_ts = ts
except Exception as ex:
log("exception")
logErr(str(ex), traceback.format_exc())
# {
# "url": "https://twit",
# "source": "twitter",
# "body": "body",
# "type": "social",
# "crypto": "BTC"
# }
class processor_social (threading.Thread):
def __init__(self, timestamp, arr, threads):
threading.Thread.__init__(self)
self.timestamp = timestamp
self.arr = arr
self.threads = threads
def run(self):
try:
print(str(datetime.fromtimestamp(self.timestamp)) + ": ("+str(len(self.threads))+") " + str(len(self.arr)))
process_rdd_social(self.timestamp, self.arr)
process_rdd_sentimentAnalysis_social(self.timestamp, self.arr)
except Exception as ex:
log("exception")
logErr(str(ex), traceback.format_exc())
self.threads.pop(0)
class processor_news (threading.Thread):
def __init__(self, timestamp, arr, threads):
threading.Thread.__init__(self)
self.timestamp = timestamp
self.arr = arr
self.threads = threads
def run(self):
try:
print(str(datetime.fromtimestamp(self.timestamp)) + ": ("+str(len(self.threads))+") " + str(len(self.arr)))
process_rdd_news(self.timestamp, self.arr)
process_rdd_sentimentAnalysis_news(self.timestamp, self.arr)
except Exception as ex:
log("exception")
logErr(str(ex), traceback.format_exc())
self.threads.pop(0)
# sentiment analysis (aggregating mentions per crypto and per source (e.g. fb, tw, ...))
def process_rdd_social(time, part_iterator):
try:
log("----------- %s : SOCIAL --" % str(datetime.fromtimestamp(time)))
buckets = {}
for part in part_iterator:
crypto = part['crypto']
source = part['source']
key = crypto+"|"+source
if not key in buckets:
buckets[key] = {'crypto':crypto, 'source': source, 'nums': 0}
buckets[key]['nums'] += 1
for key, b in buckets.items():
crypto = b['crypto']
source = b['source']
nums = b['nums']
log(crypto+" ("+source+"): "+str(nums))
DAL.store_mentions_social(client, nums, datetime.fromtimestamp(time), crypto, source)
DAL.liveness_IAmAlive(client, "worker: social")
except Exception as ex:
log("exception")
logErr(str(ex), traceback.format_exc())
def process_rdd_news(time, part_iterator):
try:
log("----------- %s : NEWS --" % str(datetime.fromtimestamp(time)))
buckets = {}
for part in part_iterator:
crypto = part['crypto']
source = part['source']
key = crypto+"|"+source
if not key in buckets:
buckets[key] = {'crypto':crypto, 'source': source, 'nums': 0}
buckets[key]['nums'] += 1
for key, b in buckets.items():
crypto = b['crypto']
source = b['source']
nums = b['nums']
log(crypto+" ("+source+"): "+str(nums))
DAL.store_mentions_news(client, nums, datetime.fromtimestamp(time), crypto, source)
DAL.liveness_IAmAlive(client, "worker: news")
except Exception as ex:
log("exception")
logErr(str(ex), traceback.format_exc())
# advanced sentiment analysis.
def process_rdd_sentimentAnalysis_social(time, part_iterator):
try:
log("----------- %s : Social Sentiment --" % str(datetime.fromtimestamp(time)))
keepers = [] # elements from 'out' which we should keep and display to users (batch insert)
crypto_sentiments = {}
count = 0;
for part in part_iterator:
count+=1
try:
crypto = part["crypto"]
body = part["body"]
result = processAndValidateSocialMention(body, crypto)
if not crypto in crypto_sentiments:
crypto_sentiments[crypto] = {}
for cat in result['cats']:
if not cat in crypto_sentiments[crypto]:
crypto_sentiments[crypto][cat] = 0
crypto_sentiments[crypto][cat] += result['cats'][cat]
if result['keeper'] == 1:
part["body"] = result['mention'] # the text has changed due to NLP
part["social_score"] = result['score']
keepers.append(part)
except Exception as ex:
log("exception")
logErr(str(ex), traceback.format_exc())
if len(keepers) > 0:
buckets = {}
for keeper in keepers:
if not keeper['crypto'] in buckets:
buckets[keeper['crypto']] = []
buckets[keeper['crypto']].append( keeper )
# Storing the social mentions in raw format (tweets, mentions, posts,...):
for key, val in buckets.items():
log(key + " keepers: " + str(len(val)))
val = sorted(val, key=lambda k: k['social_score'], reverse=True)
val = val[:MAX_MENTIONS_EXTENDED_PER_WINDOW] if len(val) > MAX_MENTIONS_EXTENDED_PER_WINDOW else val
log(key + " keepers: " + str(len(val)))
log("--")
DAL.store_mentions_social_extended_bulk(client, val, datetime.fromtimestamp(time));
# storing the results of sentiment analysis:
if len(crypto_sentiments) > 0:
for crypto, sentiments in crypto_sentiments.items():
if len(sentiments) > 0:
log(crypto + str(sentiments))
DAL.store_sentiments_social(client, sentiments, datetime.fromtimestamp(time), crypto);
DAL.liveness_IAmAlive(client, "worker: sentiments social")
except Exception as ex:
log("exception")
logErr(str(ex), traceback.format_exc())
def processAndValidateSocialMention(mention, crypto):
retobj = { 'cats':{}, 'mention':mention, 'keeper':0, 'score':0 }
try:
editedMention=html.unescape(mention)
words = nltk.wordpunct_tokenize(mention)
# sentiment analysis:
catMatrix = {}
for cat in DICT.keys():
count = len(DICT[cat].intersection([w.lower() for w in words]))
if count > 0:
catMatrix[cat] = count
retobj['cats'] = catMatrix # categories matched (if any), eg.: {positivity: 1, uncertainty: 2}
retobj['mention'] = editedMention # edited (nonASCII removed)
if len(set(CRYPTO_socialKeywords[crypto]).intersection([w.lower() for w in words])) <= 0: # make sure the text contains any of the terms
retobj['keeper']=0 # don't keep it
elif detect(editedMention) != 'en':
retobj['keeper']=0
else:
score = socialMentionScore(editedMention)
# if score < 0.90:
# retobj['keeper']=0 # don't keep it
# else:
# retobj['score'] = score
# retobj['keeper']=1
retobj['score'] = score
retobj['keeper']=1
except Exception as ex:
log("exception")
logErr(str(ex), traceback.format_exc())
return retobj
def socialMentionScore(cleantxt):
numbers = sum(c.isdigit() for c in cleantxt)
lowercases = sum(c.islower() for c in cleantxt)
uppercases = sum(c.isupper() for c in cleantxt)
nonAlphaNum = sum(c not in [' ','.',',','?'] and not c.isalnum() for c in cleantxt)
AlphaNum = sum(c.isalnum() for c in cleantxt)
tokens = len(nltk.wordpunct_tokenize(cleantxt))
divider = (tokens + numbers + uppercases + lowercases + nonAlphaNum + AlphaNum)
good = (tokens + lowercases + AlphaNum) / (1 if divider == 0 else divider)
return good #float : [0.0 ; 1.0]
def process_rdd_sentimentAnalysis_news(time, part_iterator):
try:
log("----------- %s : News Sentiment --" % str(datetime.fromtimestamp(time)))
keepers = [] # elements from 'out' which we should keep and display to users (batch insert)
crypto_sentiments = {}
count=0
for part in part_iterator:
count+=1
try:
crypto = part["crypto"]
body = part["body"]
title = part["title"]
result = processAndValidateNewsMention(title, body, crypto) # sentiment analysis on title+body and decide whether to keep title or not
if not crypto in crypto_sentiments:
crypto_sentiments[crypto] = {}
for cat in result['cats']:
if not cat in crypto_sentiments[crypto]:
crypto_sentiments[crypto][cat] = 0
crypto_sentiments[crypto][cat] += result['cats'][cat]
if result['keeper'] == 1:
del part['body'] # we don't want body
keepers.append(part)
except Exception as ex:
log("exception")
logErr(str(ex), traceback.format_exc())
# storing raw news articles:
if len(keepers) > 0:
DAL.store_mentions_news_extended_bulk(client, keepers, datetime.fromtimestamp(time));
# storing results of sentiment analysis:
if len(crypto_sentiments) > 0:
for crypto, sentiments in crypto_sentiments.items():
if len(sentiments) > 0:
log(crypto + str(sentiments))
DAL.store_sentiments_news(client, sentiments, datetime.fromtimestamp(time), crypto);
DAL.liveness_IAmAlive(client, "worker: sentiments news")
except Exception as ex:
log("exception")
logErr(str(ex), traceback.format_exc())
def processAndValidateNewsMention(title, mention, crypto):
retobj = { 'cats':{}, 'keeper':0, }
try:
# the body of news is already pre-processed with nltk in newsProducer
mention = title + "\n" + mention # let's prepend the title and analyze it in one shot
body_words = nltk.wordpunct_tokenize(mention)
title_words = nltk.wordpunct_tokenize(title)
# sentiment analysis:
catMatrix = {}
for cat in DICT.keys():
count = len(DICT[cat].intersection([w.lower() for w in body_words]))
if count > 0:
catMatrix[cat] = count
retobj['cats'] = catMatrix # categories matched (if any), eg.: {positivity: 1, uncertainty: 2}
if detect(mention) != 'en': # english only
retobj['keeper']=0 # don't keep it
else:
retobj['keeper']=1
except Exception as ex:
log("exception")
logErr(str(ex), traceback.format_exc())
return retobj
if __name__=="__main__":
main()
|
r"""Use TLV decoder and encoder to disassemble and assemble tag-length-value EMV data.
By default TLV data is broken down into bytes:
>>> import pyemv
>>> tlv_data = bytes.fromhex("9C0101E0055F2A0202089F0200")
>>> pyemv.tlv.decode(tlv_data)
{'9C': b'\x01', 'E0': {'5F2A': b'\x02\x08'}, '9F02': b''}
>>> pyemv.tlv.encode({'9C': b'\x01', 'E0': {'5F2A': b'\x02\x08'}, '9F02': b''}).hex().upper()
'9C0101E0055F2A0202089F0200'
It can also be converted to strings (among other things):
>>> import pyemv
>>> tlv_data = bytes.fromhex("9C0101E0055F2A0202089F0200")
>>> pyemv.tlv.decode(tlv_data, convert=lambda t, v: v.hex().upper())
{'9C': '01', 'E0': {'5F2A': '0208'}, '9F02': ''}
>>> pyemv.tlv.encode({'9C': '01', 'E0': {'5F2A': '0208'}, '9F02': ''}).hex().upper()
'9C0101E0055F2A0202089F0200'
"""
import typing as _t
__all__ = ["decode", "DecodeError", "encode", "EncodeError"]
class DecodeError(ValueError):
r"""Subclass of ValueError that describes TLV decoding error.
Attributes
----------
msg : str
The unformatted error message
tag : str
Tag where decoding stopped
offset : int
Offset in the input data where decoding stopped
tlv : dict
Dictionary with partially decoded data
"""
def __init__(
self,
msg: str,
tag: str,
offset: int,
tlv: _t.Dict[str, _t.Any],
):
errmsg = f"{msg}: tag '{tag}', offset {offset}."
ValueError.__init__(self, errmsg)
self.msg = msg
self.tag = tag
self.offset = offset
self.tlv = tlv
class EncodeError(ValueError):
r"""Subclass of ValueError that describes TLV encoding error.
Attributes
----------
msg : str
The unformatted error message
tag : str
Tag where decoding stopped
"""
def __init__(
self,
msg: str,
tag: str,
):
errmsg = f"{msg}: tag '{tag}'."
ValueError.__init__(self, errmsg)
self.msg = msg
self.tag = tag
# fmt: off
_S = _t.TypeVar("_S")
@_t.overload
def decode(data: bytes) -> _t.Dict[str, _t.Any]: ...
@_t.overload
def decode(data: bytes, *, simple: _t.Optional[bool]) -> _t.Dict[str, _t.Any]: ...
@_t.overload
def decode(data: bytes, *, convert: _t.Optional[_t.Callable[[str, _t.Union[bytes, bytearray]], _t.Any]]) -> _t.Dict[str, _t.Any]: ...
@_t.overload
def decode(data: bytes, *, simple: _t.Optional[bool], convert: _t.Optional[_t.Callable[[str, _t.Union[bytes, bytearray]], _t.Any]]) -> _t.Dict[str, _t.Any]: ...
@_t.overload
def decode(data: bytes, *, flatten: _t.Optional[bool] = True) -> _t.Dict[str, bytes]: ...
@_t.overload
def decode(data: bytes, *, flatten: _t.Optional[bool] = True, convert: _t.Optional[_t.Callable[[str, _t.Union[bytes, bytearray]], _S]]) -> _t.Dict[str, _S]: ...
@_t.overload
def decode(data: bytes, *, flatten: _t.Optional[bool] = True, simple: _t.Optional[bool]) -> _t.Dict[str, bytes]: ...
@_t.overload
def decode(data: bytes, *, flatten: _t.Optional[bool] = True, simple: _t.Optional[bool], convert: _t.Optional[_t.Callable[[str, _t.Union[bytes, bytearray]], _S]]) -> _t.Dict[str, _S]: ...
# fmt: on
def decode(
data: _t.Union[bytes, bytearray],
*,
flatten: _t.Optional[bool] = None,
simple: _t.Optional[bool] = None,
convert: _t.Optional[_t.Callable[[str, _t.Union[bytes, bytearray]], _t.Any]] = None,
) -> _t.Dict[str, _t.Any]:
r"""Decode TLV data.
Parameters
----------
data : bytes
Encoded TLV data
flatten : bool, optional
Flatten constructed tags and return one flat dictionary
with all tags together. Defaults to False.
simple : bool, optional
Some specification stipulate that TLV length is always
1 byte long with a maximum length of 255.
To enable this option set simple to True. Defaults to False.
convert : callable, optional
Apply this function to every primitive tag value and
return tag value in desired format.
Function must accept tag name as a first argument and
tag value as a second argument.
Defauls to 'lambda t, v: bytes(v)' to return bytes objects.
Returns
-------
tlv : dict
Dictionary with decoded data
Raises
------
DecodeError
Notes
-----
This decoder adheres to Rules for BER-TLV Data Objects in Annex B or
EMV 4.3 Book 3 Application Specification.
Examples
--------
>>> from pyemv import tlv
>>> tlv.decode(bytes.fromhex("9C0101E0055F2A0202089F0200"))
{'9C': b'\x01', 'E0': {'5F2A': b'\x02\x08'}, '9F02': b''}
>>> tlv.decode(bytes.fromhex("9C0101E0055F2A0202089F0200"), flatten=True)
{'9C': b'\x01', '5F2A': b'\x02\x08', '9F02': b''}
>>> tlv.decode(bytes.fromhex("9C0101E0055F2A0202089F0200"), convert=lambda t, v: v.hex().upper())
{'9C': '01', 'E0': {'5F2A': '0208'}, '9F02': ''}
>>> tlv.decode(bytes.fromhex("9C0101E0055F2A0202089F0200"), flatten=True, convert=lambda t, v: v.hex().upper())
{'9C': '01', '5F2A': '0208', '9F02': ''}
"""
if flatten is None:
flatten = False
if simple is None:
simple = False
if convert is None:
convert = lambda t, v: bytes(v)
dec: _t.Dict[str, _t.Any] = {}
try:
_decode(data, 0, len(data), dec, flatten, simple, convert)
except DecodeError as e:
# Catch the error here to provide reference
# to a partically decoded data.
e.tlv = dec
raise
return dec
def _decode(
data: _t.Union[bytes, bytearray],
ofst: int,
ofst_limit: int,
dec: _t.Dict[str, _t.Any],
flatten: bool,
simple: bool,
convert: _t.Callable[[str, _t.Union[bytes, bytearray]], _S],
) -> int:
while ofst < ofst_limit:
# Determine tag name length.
tag_name_len = 1
try:
# If b0-4 are on then a 2nd byte follows.
constructed = bool(data[ofst] & 0b00100000)
if (data[ofst] & 0b00011111) == 0b00011111:
# If b7 is on then another byte follows
while data[ofst + tag_name_len] & 0b10000000:
tag_name_len += 1
tag_name_len += 1
except IndexError:
raise DecodeError(
"Tag malformed, expecting more data",
data[ofst : ofst + tag_name_len].hex().upper(),
ofst,
dec,
) from None
# Check that tag name falls within parent tag
if ofst + tag_name_len > ofst_limit:
raise DecodeError(
"Tag malformed, expecting more data",
data[ofst : min(ofst + tag_name_len, ofst_limit)].hex().upper(),
ofst,
dec,
)
# Save tag name and move farther
tag = data[ofst : ofst + tag_name_len].hex().upper()
ofst += tag_name_len
# Determine tag length
tag_len_len = 1
# Check that tag length falls within parent tag
if ofst + tag_len_len > ofst_limit:
raise DecodeError(
f"Tag length malformed, expecting {str(tag_len_len)} byte(s)",
tag,
ofst,
dec,
)
if data[ofst] & 0b10000000 and not simple:
tag_len_len = data[ofst] & 0b01111111
ofst += 1
# Data does not have enough bytes to contain full
# length as indicated by the previous byte.
if ofst + tag_len_len > ofst_limit:
raise DecodeError(
f"Tag length malformed, expecting {str(tag_len_len)} byte(s)",
tag,
ofst,
dec,
)
tag_len = int.from_bytes(data[ofst : ofst + tag_len_len], "big")
ofst += tag_len_len
else:
tag_len = data[ofst]
ofst += tag_len_len
# Check that tag data falls within parent tag
if ofst + tag_len > ofst_limit:
raise DecodeError(
f"Tag value malformed, expecting {str(tag_len)} byte(s)",
tag,
ofst,
dec,
)
# Constructed data type (b5=on)
if constructed:
if flatten:
ofst = _decode(
data, ofst, ofst + tag_len, dec, flatten, simple, convert
)
else:
dec[tag] = {}
ofst = _decode(
data, ofst, ofst + tag_len, dec[tag], flatten, simple, convert
)
# Primitive data type
else:
dec[tag] = convert(tag, data[ofst : ofst + tag_len])
ofst += tag_len
return ofst
def encode(
tlv: _t.Mapping[str, _t.Any],
*,
simple: _t.Optional[bool] = None,
) -> bytes:
r"""Encode TLV data.
Parameters
----------
data : bytes
Encoded TLV data
simple : bool, optional
Some specification stipulate that TLV length is always
1 byte long with a maximum length of 255.
To enable this option set simple to True. Defaults to False.
Returns
-------
tlv : bytes
Encoded TLV data
Raises
------
EncodeError
Notes
-----
This encoder adheres to Rules for BER-TLV Data Objects in Annex B or
EMV 4.3 Book 3 Application Specification.
Examples
--------
>>> from pyemv import tlv
>>> tlv_data = {'9C': b'\x01', 'E0': {'5F2A': b'\x02\x08'}, '9F02': b''}
>>> tlv.encode(tlv_data).hex().upper()
'9C0101E0055F2A0202089F0200'
>>> tlv_data = {'9C': '01', 'E0': {'5F2A': '0208'}, '9F02': ''}
>>> tlv.encode(tlv_data).hex().upper()
'9C0101E0055F2A0202089F0200'
"""
if simple is None:
simple = False
return bytes(_encode(tlv, simple))
def _encode(tlv: _t.Mapping[str, _t.Any], simple: bool) -> bytearray:
data = bytearray()
for tag_s, value in tlv.items():
# Tag
try:
tag = bytes.fromhex(tag_s)
data += tag
except ValueError:
raise EncodeError("Invalid tag format, expecting hexchar string", tag_s)
# Check tag format
try:
# If b0-4 are on then a 2nd byte follows.
tag_name_len = 1
if (tag[0] & 0b00011111) == 0b00011111:
# If b7 is on then another byte follows
while tag[tag_name_len] & 0b10000000:
tag_name_len += 1
tag_name_len += 1
except IndexError:
raise EncodeError(
"Invalid tag format, expecting more data", tag_s
) from None
if len(tag) != tag_name_len:
raise EncodeError("Invalid tag format, extra data", tag_s)
# Value
# Constructed
if bool(tag[0] & 0b00100000):
if not isinstance(value, _t.Mapping):
raise EncodeError(
f"Invalid value type ({value.__class__.__name__}) "
"for a constructed tag, expecting a dict",
tag_s,
)
value = _encode(value, simple)
# Primitive
elif isinstance(value, str):
try:
value = bytes.fromhex(value)
except ValueError:
raise EncodeError(
"Invalid value format, expecting hexchar string", tag_s
)
elif not isinstance(value, (bytes, bytearray)):
raise EncodeError(
f"Invalid value type ({value.__class__.__name__}) "
"for a primitive tag, expecting bytes or str",
tag_s,
)
# Length
tag_len_len = 1
if len(value) > 255 and simple:
raise EncodeError(
f"Value length ({str(len(value))}) "
"cannot exceed 255 bytes when 'simple' is enabled",
tag_s,
)
# Multi-byte length required
if len(value) > 127 and not simple:
while len(value) > 2 ** (8 * tag_len_len) - 1:
tag_len_len += 1
data += int.to_bytes(tag_len_len | 0b10000000, 1, "big")
data += int.to_bytes(len(value), tag_len_len, "big") + value
return data
|
from django.views.generic.base import TemplateView
from regulations.generator.api_reader import ApiReader
class SearchView(TemplateView):
template_name = 'regulations/search.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
results = get_data(self.request.GET.get("q"))
context['results'] = results
return {**context, **self.request.GET.dict()}
def get_data(query):
return ApiReader().search(query)
|
import os
from django.conf import settings
from django.core import management
from ievv_opensource.ievvtasks_common.base_command import BaseIevvTasksCommand
from ievv_opensource.ievvtasks_development.management.commands.ievvtasks_dump_db_as_sql import \
get_dumpdata_filepath
class Command(BaseIevvTasksCommand):
help = 'Recreate the database using django_dbdev migrate the database ' \
'and load the json data dump created with ' \
'ievvtasks_dump_devdb_as_sql.'
def handle(self, *args, **options):
management.call_command('dbdev_reinit')
dumpdatafile = get_dumpdata_filepath()
if os.path.exists(dumpdatafile):
self.stdout.write('Loading data from {}.'.format(dumpdatafile))
management.call_command('dbdev_loaddump', dumpdatafile)
self.stdout.write('Running management command: migrate.')
management.call_command('migrate')
post_management_commands = getattr(settings, 'IEVVTASKS_RECREATE_DEVDB_POST_MANAGEMENT_COMMANDS', [])
self.run_management_commands(post_management_commands)
|
๏ปฟPrint Nth term in A.P
The first two terms (a1, a2) of a strictly increasing arithmetic progression (A.P) are passed as input. The program must print the Nth term (aN) in the arithmetic progression.
Input Format:
- The first line will contain the value of the first term a1
- The second line will contain the value of the second term a2
- The third line will contain the value of N.
Boundary Conditions:
-999999 <= a1 <= 999999
-999999 <= a2 <= 999999
3 <= N <= 1000
Output Format:
The Nth term (aN) in the arithmetic progression
Example Input/Output 1:
Input:
3
6
4
Output:
12
Explanation:
The arithmetic progression is 3, 6, 9, 12, 15, ...
So the fourth term is 12
Example Input/Output 2:
Input:
-200
-20
5
Output:
520
Explanation:
The arithmetic progression is -200, -20, 160, 340, 520...
So the 5th term is 520
a1=int(input())
a2=int(input())
n=int(input())
d=a2-a1
an=a1+(n-1)*d
print(an)
|
#!/usr/bin/env python3
"""
Functions for predicting time-integrated ionospheric Faraday rotation effects.
Uses RMextract package to get time-dependent ionospheric RMs for a given
observation, then performs the time-integration to work out the effective
change in polarization angle, and the effective depolarization (together,
called the ionospheric *modulation*, which is called Theta in the derivation).
The ionospheric prediction is currently derived from RMExtract
(https://github.com/lofar-astron/RMextract/).
Other ionosphere RM codes are available (ionFR, ALBUS) are available, but
RMextract was selected for its ease of install and use.
RMextract relies on external maps of Total Electron Content (TEC). Currently
the CODG TEC maps are used (this is default for RMextract), but other data
sources are available. Changing TEC sources would require changing the
RMExtract call in calculate_modulation().
"""
import RMextract.getRM as RME
from astropy.time import Time,TimeDelta
import numpy as np
from astropy.coordinates import EarthLocation,SkyCoord
import astropy.units as u
from FRion.correct import find_freq_axis
C = 2.99792458e8 # Speed of light [m/s]
def calculate_modulation(start_time, end_time, freq_array, telescope_location,
ra,dec, timestep=600.,ionexPath='./IONEXdata/'):
"""Calculate the ionospheric FR modulation, as a function of frequency,
for a given observation (time, location, target direction).
Args:
start_time (string readable by astropy.time.Time):
Starting time of observation.
Example time string\: '2010-01-02T00:00:00'
end_time (string readable by astropy.time.Time):
ending time of observation.
freq_array (array-like):
vector of channel frequencies (in Hz)
telescope_location (astropy.coordinates EarthLocation or string):
location of telescope, or name of telescope known to
get_telescope_coordinates() function.
ra (float):
right ascension of observation center (in deg, J2000)
dec (float):
declination of observation center (in deg, J2000)
timestep (float): time in seconds between ionosphere FR estimates
Returns:
tuple containing
-times (array): vector of times (in MJD seconds) of each ionospheric RM calculation
-RMs (array): vector of RM values computed for each time step
-theta (array) vector containing the (complex) ionospheric polarization
for each frequency channel.
"""
#If necessary, convert telescope name into telescope location object:
if type(telescope_location) != EarthLocation:
telescope_location=get_telescope_coordinates(telescope_location)
#RMextract wants time ranges in MJD seconds:
timerange=[Time(start_time).mjd*86400.0,
Time(end_time).mjd*86400.0]
#Extract telescope coordinates into expected format (geodetic x,y,z):
telescope_coordinates=[telescope_location.x.value,
telescope_location.y.value,
telescope_location.z.value]
#Get RMExract to generate it's RM predictions
predictions=RME.getRM(ionexPath=ionexPath, radec=np.deg2rad([ra,dec]),
timestep=timestep,
timerange = timerange,
stat_positions=[telescope_coordinates,])
#predictions dictionary contains STEC, Bpar, BField, AirMass, elev, azimuth
# RM, times, timestep, station_names, stat_pos, flags, reference_time
times=predictions['times']
RMs=np.squeeze(predictions['RM']['st1'])
#Compute the time-integrated change in polarization.
theta=numeric_integration(times,RMs,freq_array)
check_numeric_problems(RMs, freq_array,theta)
return times,RMs, theta
def numeric_integration(times,RMs,freq_array):
"""Numerical integration of the time-varying ionospheric polariation
modulation. Testing has shown that numerical integration is accurate
to better than 1% accuracy except where depolarization is extreme (>99%).
Args:
times (array): ionosphere sampling times (in MJD seconds)
RMs (array): ionospheric RMs at each time (in rad/m^2)
freq_array (array): channel frequencies (in Hz)
Returns: array: time-integrated ionospheric modulation per channel.
"""
from scipy.integrate import simps
l2_arr=(C/freq_array)**2
z=np.exp(2.j*np.outer(l2_arr,RMs))
#Scipy's numerical integrators can't handle complex numbers, so the
# integral needs to be broken into real and complex components.
real=simps(z.real,times,axis=1)
imag=simps(z.imag,times,axis=1)
theta=(real+1.j*imag)/(times[-1]-times[0])
return theta
def check_numeric_problems(RMs, freq_array,theta):
"""Checks for conditions that might cause numeric instability in the
time-integration, and warns the user if there might be concerns.
Specifically, checks for extreme jumps in polarization angle between
timesteps (will cause integration errorrs),
and for extreme depolarization (high liklihood of large errors).
Args:
RMs (array): ionospheric RMs per time step
freq_array (array): channel frequencies (in Hz)
theta (array): ionospheric modulation per channel.
"""
import warnings
#Check for large jumps in RM/polarization angle between steps.
#These can cause the numeric integrator to not catch angle wraps.
longest_l2=(C/np.min(freq_array))**2
max_deltaRM=np.max(np.diff(RMs))
max_delta_polangle=longest_l2*max_deltaRM #in radians
if max_delta_polangle > 0.5:
warnings.warn(("\nLarge variations in RM between points, which may "
"introduce numerical errors.\n"
"Consider trying a smaller timestep."))
#Warn about very low values of theta (very strong depolarization)
# as these can probably not be corrected reliably.
if np.min(np.abs(theta)) < 0.02:
warnings.warn(("\nExtreme depolarization predicted (>98%). "
"Corrected polarization will almost certainly not "
"be trustworthy in affected channels."))
elif np.min(np.abs(theta)) < 0.1:
warnings.warn(("\nSignificant depolarization predicted (>90%). "
"Errors in corrected polarization are likely to be "
"very large in some channels."))
def get_telescope_coordinates(telescope):
"""Return the astropy.coordinates EarthLocation object associated
with the position of the telescope.
The input must be either a string with the name of a pre-programmed
telescope a 3-component tuple with the latitude [deg], longitude [deg],
and height [m], or an EarthLocation object.
Known telescopes:
* ASKAP
"""
if type(telescope) == EarthLocation: #Pass EarthLocations through without processing
return telescope
elif type(telescope) == str: #Hardcoded coordinates for some telescopes.
if telescope == 'ASKAP':
lat = -1*26+42/60+15/3600 #degree
long = +1*116+39/60+32/3600 # degree
height = 381.0 #
else:
raise Exception("Telescope name not recognized.")
return EarthLocation(lat=lat*u.deg, lon=long*u.deg, height=height*u.m)
elif (type(telescope) == tuple) or (type(telescope) == list):
return EarthLocation(lat=telescope[0]*u.deg, lon=telescope[1]*u.deg,
height=telescope[2]*u.m)
def write_modulation(freq_array,theta,filename):
"""Saves predicted ionospheric modulation to a text file.
Args:
freq_array (array): channel frequencies (in Hz)
theta (array): ionospheric (complex) modulation at each frequency
filename (str): file path to save data to.
"""
np.savetxt(filename, list(zip(freq_array,theta.real,theta.imag)))
def generate_plots(times,RMs,theta,freq_array,position=None,savename=None):
"""Makes a figure with two plots: the RM variation over time,
and the (modulus of the) modulation as a function of frequency.
If savename contains a string it will save the plots to that filename,
otherwise the plots are not saved.
If position ([ra,dec]) is given, it will be printed above the plots.
Args:
times (array): ionosphere sampling times (in MJD seconds)
RMs (array): ionospheric RMs at each time (in rad/m^2)
theta (array): ionospheric (complex) modulation at each frequency
freq_array (array): channel frequencies (in Hz)
position (list): [ra, dec] in degrees, left blank if not supplied.
savename (str): File path to save plot to; if 'screen' will send to display.
"""
from matplotlib import pyplot as plt
from matplotlib import dates as mdates
plot_times=Time(times/86400.0,format='mjd').plot_date
fig,(ax1,ax2)=plt.subplots(2,1,figsize=(8,8))
ax1.plot_date(plot_times,RMs,fmt='k.')
locator=mdates.AutoDateLocator(minticks=3,maxticks=7)
formatter = mdates.ConciseDateFormatter(locator)
ax1.xaxis.set_major_locator(locator)
ax1.xaxis.set_major_formatter(formatter)
ax1.set_ylabel('$\phi_\mathrm{ion}$ [rad m$^{-2}$]')
ax2.plot(freq_array,np.abs(theta),'k.')
ax2.set_xlabel('Frequency [Hz]')
ax2.set_ylabel('|$\Theta(\lambda^2)$|')
if position is not None:
ax1.set_title("RA: {:.2f}ยฐ, Dec: {:.2f}ยฐ".format(position[0],position[1]))
if savename is not None:
if savename == "screen":
plt.show()
else:
plt.savefig(savename,bbox_inches='tight')
def predict():
"""Wrapper for command line interface. Gets command line arguments,
calculates ionospheric effects, and saves modulation and/or figure if
specified.
"""
import argparse
descStr = """
Calculate ionospheric Faraday rotation and predict time-integrated effect
as a function of frequency.
Can determine the frequency and observation time parameters from a
supplied FITS cube, if it has the correct keywords,
otherwise from those parameters must be supplied on the command line.
"""
parser = argparse.ArgumentParser(description=descStr,
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument("-F",dest="fits",default=None,metavar='FILENAME',
help="FITS cube relevant information in header.")
parser.add_argument("-d", dest="times", nargs=2,type=str,default=None,
metavar=('START','END'),
help="start and end time strings.")
parser.add_argument("-c", dest=("freq_parms"),nargs=3,default=None,type=float,
metavar=('MINFREQ','MAXFREQ','CHANNELWIDTH'),
help=("Generate channel frequencies (in Hz): \n"
" minfreq, maxfreq, channel_width"))
parser.add_argument("-t",dest='telescope_name',type=str,default=None,
help="Telescope name")
parser.add_argument("-T",dest='telescope_coords',nargs=3,type=float,default=None,
metavar=('LONG','LAT','ALT'),
help="Telescope coordinates:\n lat[deg],long[deg], altitude[m].")
parser.add_argument('-p',dest='pointing',nargs=2,type=float,default=None,
metavar=('RA','DEC'),
help="Pointing center: RA[deg], Dec[deg]")
parser.add_argument("-s", dest='savefile',type=str,default=None,metavar='POLFILE',
help="Filename to save ionosphere data to.")
parser.add_argument("-S", dest='savefig',type=str,default=None,metavar='FIGFILE',
help="Filename to save the plots to. Entering 'screen' plots to the screen.")
parser.add_argument("--timestep",dest='timestep',default=600.,type=float,
help="Timestep for ionospheric prediction, in seconds. Default = 600")
args = parser.parse_args()
start_time=None
end_time=None
freq_arr=None
telescope=None
ra=None
dec=None
#If a FITS file is present, try to fill in any missing keywords.
#But since FITS headers can be very different, it may be that not all
#keywords can be found.
if args.fits is not None:
import astropy.io.fits as pf
header=pf.getheader(args.fits)
if 'DATE-OBS' in header.keys():
start_time=header['DATE-OBS']
if 'DATE-OBS' in header.keys() and 'DURATION' in header.keys():
end_time=Time(header['DATE-OBS'])+TimeDelta(header['DURATION'],format="sec")
#For coordinates, the code will always use the middle pixel to derive
#the RA and Dec. Assumes position coordinates are in first 2 axes.
if 'RA' in header['CTYPE1']:
ra=header['CRVAL1']+header['CDELT1']*(header['NAXIS1']/2-header['CRPIX1'])
if 'DEC' in header['CTYPE2']:
dec=header['CRVAL2']+header['CDELT2']*(header['NAXIS2']/2-header['CRPIX2'])
if 'GLON' in header['CTYPE1'] and 'GLAT' in header['CTYPE2']:
#Support galactic coordinates, just in case:
gl=header['CRVAL1']+header['CDELT1']*(header['NAXIS1']/2-header['CRPIX1'])
gb=header['CRVAL2']+header['CDELT2']*(header['NAXIS2']/2-header['CRPIX2'])
position=SkyCoord(gl,gb,frame='galactic',unit='deg')
ra=position.fk5.ra.deg
dec=position.fk5.dec.deg
if 'TELESCOP' in header.keys():
telescope=header['TELESCOP']
freq_axis=str(find_freq_axis(header))
if freq_axis != '0':
chan0=header['CRVAL'+freq_axis]-header['CDELT'+freq_axis]*(header['CRPIX'+freq_axis]-1)
chan_final=chan0+(header['NAXIS'+freq_axis]-1)*header['CDELT'+freq_axis]
freq_arr=np.linspace(chan0,chan_final,header['NAXIS'+freq_axis])
#Any parametrs taken from FITS header can be overridden by manual inputs:
if args.times is not None:
start_time=Time(args.times[0])
end_time=Time(args.times[1])
if args.freq_parms is not None:
freq_arr=np.arange(args.freq_parms[0],args.freq_parms[1],args.freq_parms[2])
if args.telescope_name is not None:
telescope = get_telescope_coordinates(args.telescope_name)
if args.telescope_coords is not None:
telescope = get_telescope_coordinates(args.telescope_coords)
if args.pointing is not None:
ra=args.pointing[0]
dec=args.pointing[1]
#Check that all parameters are set:
missing_parms=[]
if (start_time is None): missing_parms.append('Start time')
if (end_time is None): missing_parms.append('End time')
if (freq_arr is None): missing_parms.append('Frequency array')
if (telescope is None): missing_parms.append('Telescope')
if (ra is None): missing_parms.append('Pointing center')
if len(missing_parms) > 0:
print("Missing parameters:",missing_parms)
raise Exception("Cannot continue without those parameters.")
times,RMs,theta=calculate_modulation(start_time, end_time, freq_arr, telescope,
ra,dec, timestep=args.timestep,ionexPath='./IONEXdata/')
if args.savefile is not None:
write_modulation(freq_arr,theta,args.savefile)
if args.savefig is not None:
generate_plots(times,RMs,theta,freq_arr,position=[ra,dec],savename=args.savefig)
if __name__ == "__main__":
predict()
|
import unittest
import numpy as np
import gzip
import struct
class OpenNI2Test( unittest.TestCase ):
def test16bitArray( self ):
arr = np.array([1,2,3], dtype=np.uint16)
f = gzip.open("tmp.bin.gz", "wb")
f.write( "".join([struct.pack("H", x) for x in arr]) )
f.close()
f = gzip.open("tmp.bin.gz", "rb")
self.assertEqual( len(f.read()), 6 )
f.close()
if __name__ == "__main__":
unittest.main()
# vim: expandtab sw=4 ts=4
|
from telebot import TeleBot as Bot
from helpers.conversation import ConversationsStorage, Conversation
from helpers.messaging import parse_manifest, send_until_question
from api import UsersAPI, OrdersAPI, RequisitesAPI, BillsAPI
from constants import BOT_TOKEN
bot = Bot(BOT_TOKEN)
welcome, manifest = parse_manifest()
conversations = ConversationsStorage()
users = UsersAPI()
orders = OrdersAPI()
requisites = RequisitesAPI()
bills = BillsAPI()
@bot.message_handler(commands=["start"])
def on_start(command):
uid = command.from_user.id
user = users.get_by_messenger_id(uid)
if not user:
users.create(messenger_id=uid, messenger="Telegram")
elif user.phone:
users.partial_update(user.id, phone=None)
bot.send_message(command.chat.id, welcome)
@bot.message_handler(content_types=["text"])
def on_message(message):
uid = message.from_user.id
prev_answer = message.text
user = users.get_by_messenger_id(uid)
if not user:
user = users.create(messenger_id=uid, phone=prev_answer, messenger="Telegram")
prev_answer = None
elif not user.phone:
users.partial_update(user.id, phone=prev_answer)
prev_answer = None
send = lambda text: bot.send_message(message.chat.id, text)
if not conversations.exists(uid):
conversations.add(uid, manifest, default_answers=user.convers_answers_data)
conversation = conversations.get(uid)
conversation, question = send_until_question(send, conversation, prev_answer)
if conversation.answers.stopped:
users.partial_update(user.id, convers_answers_data={})
conversation, _ = send_until_question(send, Conversation(manifest, default_answers={}), None)
elif not question:
update_data = {"convers_answers_data": {}}
action = conversation.answers.get("action")
if action == "order":
orders.create(
books=conversation.answers.get("books"),
user=user.id
)
elif action == "requisites":
result = requisites.create(
delivery_name=conversation.answers.get("delivery_name"),
delivery_phone=conversation.answers.get("delivery_phone"),
delivery_address=conversation.answers.get("delivery_address"),
post_service=conversation.answers.get("post_service")
)
update_data["requisites"] = result.id
elif action == "bill":
bills.create(
amount=conversation.answers.get("amount"),
comment=conversation.answers.get("comment"),
user=user.id
)
conversation, _ = send_until_question(send, Conversation(manifest, default_answers={}), None)
users.partial_update(user.id, **update_data)
elif not question.skip:
users.partial_update(user.id, convers_answers_data=conversation.answers.data)
conversations.set(uid, conversation)
if __name__ == "__main__":
print("Bot started!")
bot.polling()
|
"""greensManager URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
admin.site.site_header = 'GreensManager admin'
admin.site.site_title = 'GreensManager admin'
urlpatterns = [
url(r'^courses/', include('courses.urls', namespace="courses")),
url(r'^irrigation/', include('irrigation.urls', namespace="irr")),
url(r'^shop/', include('machines.urls', namespace="shop")),
url(r'^mow/', include('mowing.urls', namespace="mow")),
url(r'^roll/', include('rolling.urls', namespace="roll")),
url(r'^parts/', include('parts.urls', namespace="parts")),
url(r'^maint/', include('maintenance.urls', namespace="maint")),
url(r'^turfs/', include('turfs.urls', namespace="turfs")),
url(r'^fert/', include('fertilizing.urls', namespace="fert")),
url(r'^aerate/', include('aerating.urls', namespace="aerate")),
url(r'^notes/', include('notes.urls', namespace="notes")),
url(r'^build/', include('building.urls', namespace="build")),
url(r'^stimp/', include('stimping.urls', namespace="stimp")),
url(r'^admin/', admin.site.urls),
url(r'^blog/', include('blog.urls', namespace="blog")),
url(r'^', include('welcome.urls', namespace="welcome"))
]
|
from django.urls import path
from .views import LoginProfile, UserProfiles
urlpatterns = [
path('login/', LoginProfile.as_view(), name="loginuser"),
path('logout/<str:token>/', LoginProfile.as_view(), name="logoutuser"),
path('create/', UserProfiles.as_view(), name="createuser"),
]
|
import string
word_usage = dict()
d = open('words.txt') # open a text file of dictionary words
word_list = dict()
for i in d:
word_list[i.strip()] = None
word_list['i'] = None
word_list['a'] = None
def process_line(line):
"""Processes lines of text from Project Gutenberg, stripping punctuation,
converting to lower case and splitting into words. For each word found,
increments the word count in the global dictionary word_usage"""
line = line.replace('-', ' ' )
line = line.replace('โ', ' ' )
for word in line.split():
word = (word.strip(string.whitespace + string.punctuation + 'โ' + 'โ' + 'โ' + 'โ'))
word = word.lower()
if word not in word_usage:
word_usage[word] = 1
else:
word_usage[word] += 1
def remove_header(gutenberg_file):
"""Scans through a text file from Project Gutenberg until the marker
for the start of the book is found"""
for line in gutenberg_file:
if '*** START OF' in line:
break
def analyse_book(gutenberg_file):
"""Takes a text file from the Gutenberg Project, returns a list of words
in book that are not in word_list"""
result = []
word_usage.clear()
fin = open(gutenberg_file)
remove_header(fin)
for line in fin:
if '*** END OF' in line:
break
process_line(line.strip())
for word in sorted(word_usage, key=word_usage.get, reverse = True):
if word not in word_list:
result.append(word)
return result
d = analyse_book('pride_and_prejudice.txt')
print('Pride and Prejudice words not in word list are:\n')
for word in d:
print(word)
d = analyse_book('gatsby.txt')
print('The Great Gatsby words not in word list are:\n')
for word in d:
print(word)
d = analyse_book('defoe.txt')
print('Robinson Crusoe words not in word list are:\n')
for word in d:
print(word)
d = analyse_book('maugham.txt')
print('Of Human Bondage words not in word list are:\n')
for word in d:
print(word)
|
import json
import os
import sys
import mxnet as mx
import matplotlib.pyplot as plt
from mxnet import gluon, nd
from mxnet.gluon.model_zoo import vision
import numpy as np
import cv2
ctx = mx.cpu()
densenet121 = vision.densenet121(pretrained=True, ctx=ctx)
mobileNet = vision.mobilenet0_5(pretrained=True, ctx=ctx)
resnet18 = vision.resnet18_v1(pretrained=True, ctx=ctx)
print(mobileNet)
print(mobileNet.features[0].params)
print(mobileNet.output)
mx.test_utils.download('https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/doc/tutorials/onnx/image_net_labels.json')
categories = np.array(json.load(open('image_net_labels.json', 'r')))
print(categories[4])
# get a test image
filename = mx.test_utils.download('https://github.com/dmlc/web-data/blob/master/mxnet/doc/tutorials/onnx/images/dog.jpg?raw=true', fname='dog.jpg')
# load the image as a ndarray
image = mx.image.imread(filename)
# plt.imshow(image.asnumpy())
cv2.imshow('preview', cv2.cvtColor(image.asnumpy(), cv2.COLOR_RGB2BGR))
cv2.waitKey(0)
def transform(image):
resized = mx.image.resize_short(image, 224)
cropped, crop_info = mx.image.center_crop(resized, (224, 224))
normalized = mx.image.color_normalize(cropped.astype(np.float32) / 255,
mean=mx.nd.array([0.485, 0.456, 0.406]),
std=mx.nd.array([0.229, 0.224, 0.225]))
transposed = normalized.transpose((2,0,1)) # transposing from (224, 224, 3) to (3, 224, 224)
batchified = transposed.expand_dims(axis=0) # change the shape from (3, 224, 224) to (1, 3, 224, 224)
return batchified
predictions = resnet18(transform(image)).softmax()
print(predictions.shape)
top_pred = predictions.topk(k=3)[0].asnumpy()
for index in top_pred:
probability = predictions[0][int(index)]
category = categories[int(index)]
print("{}: {:.2f}%".format(category, probability.asscalar()*100))
def predict(model, image, categories, k):
predictions = model(transform(image)).softmax()
top_pred = predictions.topk(k=k)[0].asnumpy()
for index in top_pred:
probability = predictions[0][int(index)]
category = categories[int(index)]
print("{}: {:.2f}%".format(category, probability.asscalar()*100))
print('')
predict(densenet121, image, categories, 3)
predict(mobileNet, image, categories, 3)
predict(resnet18, image, categories, 3)
# fine tuning
NUM_CLASSES=10
with resnet18.name_scope():
resnet18.output = gluon.nn.Dense(NUM_CLASSES)
print(resnet18.output)
|
import pytest
from bs4 import BeautifulSoup
from django.contrib.auth import get_user_model
from django.forms import fields
from django.forms.models import ModelChoiceField, ModelMultipleChoiceField
from entangled.forms import EntangledModelForm
from .models import Product, Category
class ProductForm(EntangledModelForm):
name = fields.CharField()
tenant = ModelChoiceField(queryset=get_user_model().objects.all(), empty_label=None)
active = fields.BooleanField()
color = fields.CharField()
size = fields.ChoiceField(
choices=[
('s', "Small"),
('m', "Medium"),
('l', "Large"),
]
)
categories = ModelMultipleChoiceField(queryset=Category.objects.all(), required=False)
class Meta:
model = Product
untangled_fields = ['name']
entangled_fields = {'properties': ['tenant', 'active', 'color', 'size', 'categories']}
retangled_fields = {'color': 'extra.variants.color', 'size': 'extra.variants.size',
'tenant': 'ownership.tenant', 'categories': 'extra.categories'}
@pytest.mark.django_db
def test_unbound_form():
product_form = ProductForm()
assert product_form.is_bound is False
expected = BeautifulSoup("""
<li><label for="id_name">Name:</label> <input type="text" name="name" required id="id_name"></li>
<li><label for="id_tenant">Tenant:</label> <select name="tenant" id="id_tenant">
<option value="1">John</option>
<option value="2">Mary</option>
</select></li>
<li><label for="id_active">Active:</label> <input type="checkbox" name="active" required id="id_active"></li>
<li><label for="id_color">Color:</label> <input type="text" name="color" required id="id_color"></li>
<li><label for="id_size">Size:</label> <select name="size" id="id_size">
<option value="s">Small</option>
<option value="m">Medium</option>
<option value="l">Large</option>
</select></li>
<li><label for="id_categories">Categories:</label> <select name="categories" id="id_categories" multiple>
<option value="1">Paraphernalia</option>
<option value="2">Detergents</option>
</select></li>""", features='lxml')
print(product_form.as_ul())
assert BeautifulSoup(product_form.as_ul(), features='lxml') == expected
@pytest.mark.django_db
def test_bound_form():
data = {'name': "Colander", 'tenant': 2, 'active': True, 'color': "red", 'size': "m", 'categories': [1, 2]}
product_form = ProductForm(data=data)
assert product_form.is_bound
assert product_form.is_valid()
instance = product_form.save()
extra = {
'categories': {'model': 'tests.category', 'p_keys': data.pop('categories')},
'variants': {k: data.pop(k) for k in ['color', 'size']},
}
expected = dict(data, extra=extra, ownership={'tenant': {'model': 'auth.user', 'pk': data.pop('tenant')}})
assert instance.name == expected.pop('name')
assert instance.properties == expected
@pytest.mark.django_db
def test_instance_form():
properties = {
'active': True,
'extra': {
'variants': {
'color': 'silver',
'size': 's',
},
'categories': {'model': 'tests.category', 'p_keys': [1, 2]},
},
'ownership': {
'tenant': {'model': 'auth.user', 'pk': 1},
},
}
instance = Product.objects.create(name="Grater", properties=properties)
product_form = ProductForm(instance=instance)
assert product_form.is_bound is False
expected = BeautifulSoup("""
<li><label for="id_name">Name:</label> <input type="text" name="name" value="Grater" required id="id_name"></li>
<li><label for="id_tenant">Tenant:</label> <select name="tenant" id="id_tenant">
<option value="1" selected>John</option>
<option value="2">Mary</option>
</select></li>
<li><label for="id_active">Active:</label> <input type="checkbox" name="active" required id="id_active" checked></li>
<li><label for="id_color">Color:</label> <input type="text" name="color" value="silver" required id="id_color"></li>
<li><label for="id_size">Size:</label> <select name="size" id="id_size">
<option value="s" selected>Small</option>
<option value="m">Medium</option>
<option value="l">Large</option>
</select></li>
<li><label for="id_categories">Categories:</label> <select name="categories" id="id_categories" multiple>
<option value="1" selected>Paraphernalia</option>
<option value="2" selected>Detergents</option>
</select></li>""", features='lxml')
assert BeautifulSoup(product_form.as_ul(), features='lxml') == expected
|
from __future__ import absolute_import, division, print_function
from cctbx import translation_search
from cctbx import crystal
from cctbx import miller
from cctbx import xray
from cctbx import maptbx
from cctbx.development import random_structure
from cctbx.development import debug_utils
from cctbx.array_family import flex
from libtbx.test_utils import approx_equal
from scitbx import matrix
import random
import sys
from six.moves import range
from six.moves import zip
def run_fast_terms(structure_fixed, structure_p1,
f_obs, f_calc_fixed, f_calc_p1,
symmetry_flags, gridding, grid_tags,
n_sample_grid_points=10,
test_origin=False,
verbose=0):
if (f_calc_fixed is None):
f_part = flex.complex_double()
else:
f_part = f_calc_fixed.data()
m = flex.double()
for i in range(f_obs.indices().size()):
m.append(random.random())
assert f_obs.anomalous_flag() == f_calc_p1.anomalous_flag()
fast_terms = translation_search.fast_terms(
gridding=gridding,
anomalous_flag=f_obs.anomalous_flag(),
miller_indices_p1_f_calc=f_calc_p1.indices(),
p1_f_calc=f_calc_p1.data())
for squared_flag in (False, True):
map = fast_terms.summation(
space_group=f_obs.space_group(),
miller_indices_f_obs=f_obs.indices(),
m=m,
f_part=f_part,
squared_flag=squared_flag).fft().accu_real_copy()
assert map.all() == gridding
map_stats = maptbx.statistics(map)
if (0 or verbose):
map_stats.show_summary()
grid_tags.build(f_obs.space_group_info().type(), symmetry_flags)
assert grid_tags.n_grid_misses() == 0
assert grid_tags.verify(map)
for i_sample in range(n_sample_grid_points):
run_away_counter = 0
while 1:
run_away_counter += 1
assert run_away_counter < 1000
if (i_sample == 0 and test_origin):
grid_point = [0,0,0]
else:
grid_point = [random.randrange(g) for g in gridding]
grid_site = [float(x)/g for x,g in zip(grid_point,gridding)]
structure_shifted = structure_fixed.deep_copy_scatterers()
assert structure_shifted.special_position_indices().size() == 0
structure_shifted.add_scatterers(
scatterers=structure_p1.apply_shift(grid_site).scatterers())
if (structure_shifted.special_position_indices().size() == 0):
break
if (test_origin):
assert i_sample != 0
i_grid = flex.norm(f_obs.structure_factors_from_scatterers(
xray_structure=structure_shifted, algorithm="direct").f_calc().data())
if (squared_flag): p = 4
else: p = 2
map_value = map[grid_point] * f_obs.space_group().n_ltr()**p
if (not squared_flag):
sum_m_i_grid = flex.sum(m * i_grid)
else:
sum_m_i_grid = flex.sum(m * flex.pow2(i_grid))
assert "%.6g" % sum_m_i_grid == "%.6g" % map_value, (
sum_m_i_grid, map_value)
def run_fast_nv1995(f_obs, f_calc_fixed, f_calc_p1,
symmetry_flags, gridding, grid_tags, verbose):
if (f_calc_fixed is None):
f_part = flex.complex_double()
else:
f_part = f_calc_fixed.data()
assert f_obs.anomalous_flag() == f_calc_p1.anomalous_flag()
fast_nv1995 = translation_search.fast_nv1995(
gridding=gridding,
space_group=f_obs.space_group(),
anomalous_flag=f_obs.anomalous_flag(),
miller_indices_f_obs=f_obs.indices(),
f_obs=f_obs.data(),
f_part=f_part,
miller_indices_p1_f_calc=f_calc_p1.indices(),
p1_f_calc=f_calc_p1.data())
assert fast_nv1995.target_map().all() == gridding
map_stats = maptbx.statistics(fast_nv1995.target_map())
if (0 or verbose):
map_stats.show_summary()
grid_tags.build(f_obs.space_group_info().type(), symmetry_flags)
assert grid_tags.n_grid_misses() == 0
assert grid_tags.verify(fast_nv1995.target_map())
peak_list = maptbx.peak_list(
data=fast_nv1995.target_map(),
tags=grid_tags.tag_array(),
peak_search_level=1,
max_peaks=10,
interpolate=True)
if (0 or verbose):
print("gridding:", gridding)
for i,site in enumerate(peak_list.sites()):
print("(%.4f,%.4f,%.4f)" % site, "%.6g" % peak_list.heights()[i])
assert approx_equal(map_stats.max(), flex.max(peak_list.grid_heights()))
return peak_list
def test_atom(space_group_info, use_primitive_setting,
n_elements=3, d_min=3.,
grid_resolution_factor=0.48, max_prime=5, verbose=0):
if (use_primitive_setting):
space_group_info = space_group_info.primitive_setting()
structure = random_structure.xray_structure(
space_group_info,
n_scatterers=n_elements,
volume_per_atom=150,
min_distance=1.,
general_positions_only=True)
miller_set_f_obs = miller.build_set(
crystal_symmetry=structure,
anomalous_flag=(random.random() < 0.5),
d_min=d_min)
symmetry_flags = translation_search.symmetry_flags(
is_isotropic_search_model=True,
have_f_part=(n_elements>=2))
gridding = miller_set_f_obs.crystal_gridding(
symmetry_flags=symmetry_flags,
resolution_factor=grid_resolution_factor,
max_prime=max_prime).n_real()
structure.build_scatterers(
elements=["Se"]*n_elements,
grid=gridding)
if (0 or verbose):
structure.show_summary().show_scatterers()
f_obs = abs(miller_set_f_obs.structure_factors_from_scatterers(
xray_structure=structure,
algorithm="direct").f_calc())
if (0 or verbose):
f_obs.show_summary()
if (0 or verbose):
f_obs.show_array()
miller_set_p1 = miller.set.expand_to_p1(f_obs)
special_position_settings_p1 = crystal.special_position_settings(
crystal_symmetry=miller_set_p1)
structure_fixed = xray.structure(special_position_settings=structure)
for scatterer in structure.scatterers():
structure_p1 = xray.structure(
special_position_settings=special_position_settings_p1)
scatterer_at_origin = scatterer.customized_copy(site=(0,0,0))
structure_p1.add_scatterer(scatterer_at_origin)
if (0 or verbose):
structure_p1.show_summary().show_scatterers()
f_calc_p1 = miller_set_p1.structure_factors_from_scatterers(
xray_structure=structure_p1,
algorithm="direct").f_calc()
if (0 or verbose):
f_calc_p1.show_array()
f_calc_fixed = None
if (structure_fixed.scatterers().size() > 0):
f_calc_fixed = f_obs.structure_factors_from_scatterers(
xray_structure=structure_fixed,
algorithm="direct").f_calc()
symmetry_flags = translation_search.symmetry_flags(
is_isotropic_search_model=True,
have_f_part=(f_calc_fixed is not None))
if (structure_fixed.scatterers().size() <= 1):
gridding = miller_set_f_obs.crystal_gridding(
symmetry_flags=symmetry_flags,
resolution_factor=grid_resolution_factor,
max_prime=max_prime).n_real()
grid_tags = maptbx.grid_tags(gridding)
run_fast_terms(
structure_fixed, structure_p1,
f_obs, f_calc_fixed, f_calc_p1,
symmetry_flags, gridding, grid_tags,
verbose=verbose)
peak_list = run_fast_nv1995(
f_obs, f_calc_fixed, f_calc_p1,
symmetry_flags, gridding, grid_tags, verbose)
structure_fixed.add_scatterer(scatterer)
if (0 or verbose):
structure_fixed.show_summary().show_scatterers()
if (structure_fixed.scatterers().size() < n_elements):
assert peak_list.heights()[0] < 1
else:
assert peak_list.heights()[0] > 0.99
assert peak_list.heights()[0] > 0.99
def test_molecule(space_group_info, use_primitive_setting, flag_f_part,
d_min=3., grid_resolution_factor=0.48, max_prime=5,
verbose=0):
if (use_primitive_setting):
space_group_info = space_group_info.primitive_setting()
elements = ("N", "C", "C", "O", "N", "C", "C", "O")
structure = random_structure.xray_structure(
space_group_info,
elements=elements,
volume_per_atom=50,
min_distance=1.,
general_positions_only=True,
random_u_iso=True,
random_occupancy=True)
if (0 or verbose):
structure.show_summary().show_scatterers()
miller_set_f_obs = miller.build_set(
crystal_symmetry=structure,
anomalous_flag=(random.random() < 0.5),
d_min=d_min)
f_obs = abs(miller_set_f_obs.structure_factors_from_scatterers(
xray_structure=structure,
algorithm="direct").f_calc())
if (0 or verbose):
f_obs.show_summary()
if (0 or verbose):
f_obs.show_array()
miller_set_p1 = miller.set.expand_to_p1(f_obs)
special_position_settings_p1 = crystal.special_position_settings(
crystal_symmetry=miller_set_p1)
structure_p1 = xray.structure(
special_position_settings=special_position_settings_p1)
structure_fixed = xray.structure(special_position_settings=structure)
for scatterer in structure.scatterers():
if (flag_f_part and structure_fixed.scatterers().size()
< structure.scatterers().size()//2):
structure_fixed.add_scatterer(scatterer)
else:
structure_p1.add_scatterer(scatterer)
if (0 or verbose):
if (flag_f_part):
structure_fixed.show_summary().show_scatterers()
structure_p1.show_summary().show_scatterers()
f_calc_fixed = None
if (flag_f_part):
f_calc_fixed = f_obs.structure_factors_from_scatterers(
xray_structure=structure_fixed,
algorithm="direct").f_calc()
f_calc_p1 = miller_set_p1.structure_factors_from_scatterers(
xray_structure=structure_p1,
algorithm="direct").f_calc()
symmetry_flags = translation_search.symmetry_flags(
is_isotropic_search_model=False,
have_f_part=flag_f_part)
gridding = miller_set_f_obs.crystal_gridding(
symmetry_flags=symmetry_flags,
resolution_factor=grid_resolution_factor,
max_prime=max_prime).n_real()
grid_tags = maptbx.grid_tags(gridding)
run_fast_terms(
structure_fixed, structure_p1,
f_obs, f_calc_fixed, f_calc_p1,
symmetry_flags, gridding, grid_tags,
test_origin=True,
verbose=verbose)
peak_list = run_fast_nv1995(
f_obs, f_calc_fixed, f_calc_p1,
symmetry_flags, gridding, grid_tags, verbose)
assert peak_list.heights()[0] > 0.99
def test_shift(space_group_info,
d_min=0.8, grid_resolution_factor=0.48, max_prime=5,
verbose=0):
n = 12 // len(space_group_info.group()) or 1
target_structure = random_structure.xray_structure(
space_group_info=space_group_info,
elements=['C']*n,
use_u_iso=False,
use_u_aniso=False,
)
f_target = miller.build_set(
crystal_symmetry=target_structure,
anomalous_flag=False,
d_min=d_min
).structure_factors_from_scatterers(
xray_structure=target_structure,
algorithm="direct").f_calc()
f_obs = abs(f_target)
indices_in_p1 = miller.set.expand_to_p1(f_target)
target_structure_in_p1 = target_structure.expand_to_p1()
reference_translation = matrix.col((0.1, 0.2, 0.7))
structure_in_p1 = target_structure_in_p1.apply_shift(reference_translation)
f_structure_in_p1 = indices_in_p1.structure_factors_from_scatterers(
xray_structure=structure_in_p1,
algorithm="direct").f_calc()
symmetry_flags = translation_search.symmetry_flags(
is_isotropic_search_model=False,
have_f_part=False)
gridding = f_target.crystal_gridding(
symmetry_flags=symmetry_flags,
resolution_factor=grid_resolution_factor,
max_prime=max_prime).n_real()
grid_tags = maptbx.grid_tags(gridding)
for f_calc_in_p1 in (f_structure_in_p1,):
peak_list = run_fast_nv1995(
f_obs=f_obs, f_calc_fixed=None, f_calc_p1=f_calc_in_p1,
symmetry_flags=symmetry_flags, gridding=gridding,
grid_tags=grid_tags, verbose=verbose)
assert peak_list.heights()[0] > 0.9
shift = matrix.col(peak_list.sites()[0])
assert f_target.space_group_info().is_allowed_origin_shift(
shift + reference_translation, tolerance=0.04)
def run_call_back(flags, space_group_info):
if (space_group_info.group().order_p() > 24 and not flags.HighSymmetry):
print("High symmetry space group skipped.")
return
if (not (flags.Atom or flags.Molecule or flags.Shift)):
flags.Atom = True
flags.Molecule = True
use_primitive_setting_flags = [False]
if (space_group_info.group().conventional_centring_type_symbol() != "P"):
use_primitive_setting_flags.append(True)
if (flags.Atom):
for use_primitive_setting in use_primitive_setting_flags:
test_atom(space_group_info, use_primitive_setting,
verbose=flags.Verbose)
if (flags.Molecule):
for flag_f_part in (False, True)[:]: #SWITCH
for use_primitive_setting in use_primitive_setting_flags:
test_molecule(space_group_info, use_primitive_setting, flag_f_part,
verbose=flags.Verbose)
if flags.Shift:
for i in range(1):
test_shift(space_group_info, verbose=flags.Verbose)
def run():
debug_utils.parse_options_loop_space_groups(sys.argv[1:], run_call_back, (
"HighSymmetry",
"Atom",
"Molecule",
"Shift"))
if (__name__ == "__main__"):
run()
|
###############################################################################
##
## Copyright (C) 2014-2016, New York University.
## Copyright (C) 2013-2014, NYU-Poly.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
from __future__ import division
import os
from PyQt4 import QtCore, QtGui
from vistrails.packages.spreadsheet.basic_widgets import SpreadsheetCell, \
SpreadsheetMode
from vistrails.packages.spreadsheet.spreadsheet_cell import QCellWidget
class TableToSpreadsheetMode(SpreadsheetMode):
def compute_output(self, output_module, configuration):
table = output_module.get_input('value')
self.display_and_wait(output_module, configuration,
TableCellWidget, (table,))
class TableCell(SpreadsheetCell):
"""Shows a table in a spreadsheet cell.
"""
_input_ports = [('table', '(org.vistrails.vistrails.tabledata:Table)')]
def compute(self):
table = self.get_input('table')
self.displayAndWait(TableCellWidget, (table,))
class TableModel(QtCore.QAbstractTableModel):
def __init__(self, parent=None, *args):
super(TableModel, self).__init__()
self.datatable = None
self.detTextColor = QtGui.QColor(255, 96, 96)
self.rowDetBGColor = QtGui.QColor(200, 200, 200)
self.headerNames = None
def update(self, dataIn):
self.datatable = dataIn
if self.datatable.names is not None:
self.headerNames = self.datatable.names
def rowCount(self, parent=QtCore.QModelIndex()):
return self.datatable.rows
def columnCount(self, parent=QtCore.QModelIndex()):
return self.datatable.columns
def data(self, index, role=QtCore.Qt.DisplayRole):
if role == QtCore.Qt.DisplayRole:
row = index.row()
col = index.column()
return self.datatable.get_column(col)[row]
if role == QtCore.Qt.BackgroundColorRole and hasattr(self.datatable, 'get_col_det'):
row = index.row()
col = index.column()
if not self.datatable.get_row_det(row):
return self.rowDetBGColor
if role == QtCore.Qt.ForegroundRole and hasattr(self.datatable, 'get_col_det'):
row = index.row()
col = index.column()
if not self.datatable.get_col_det(row, col):
return self.detTextColor
#return QtCore.QAbstractTableModel.data(self,index,role)
def flags(self, index):
return QtCore.Qt.ItemIsEnabled
def headerData(self, section, orientation, role=QtCore.Qt.DisplayRole):
if role == QtCore.Qt.DisplayRole and orientation == QtCore.Qt.Horizontal:
return self.headerNames[section]
return QtCore.QAbstractTableModel.headerData(self, section, orientation, role)
class TableCellWidget(QCellWidget):
save_formats = QCellWidget.save_formats + ["HTML files (*.html)"]
def __init__(self, parent=None):
QCellWidget.__init__(self, parent)
layout = QtGui.QVBoxLayout()
layout.setContentsMargins(0,0,0,0)
self.table = QtGui.QTableView()
scrollarea = QtGui.QScrollArea(self)
scrollarea.setWidgetResizable(True)
scrollarea.setWidget(self.table)
layout.addWidget(scrollarea)
self.setLayout(layout)
def updateContents(self, inputPorts):
table, = inputPorts
self.orig_table = table
self.datamodel = TableModel(self.table)
self.table.setSortingEnabled(False)
if hasattr(table, 'get_cell_reason') and not self.table.hasMouseTracking():
self.table.setMouseTracking(True)
if hasattr(table, 'explain_cell_clicked'):
self.table.clicked.connect(table.explain_cell_clicked)
self.table.verticalHeader().sectionClicked.connect(table.explain_row_clicked)
try:
self.datamodel.update(table)
self.table.setModel(self.datamodel)
except:
raise
#self.table.setSortingEnabled(True)
#self.table.sortByColumn(0, QtCore.Qt.AscendingOrder)
self.table.resizeColumnsToContents()
def write_html(self):
document = ['<!DOCTYPE html>\n'
'<html>\n <head>\n'
' <meta http-equiv="Content-type" content="text/html; '
'charset=utf-8" />\n'
' <title>Exported table</title>\n'
' <style type="text/css">\n'
'table { border-collapse: collapse; }\n'
'td, th { border: 1px solid black; }\n'
' </style>\n'
' </head>\n <body>\n <table>\n']
table = self.orig_table
if table.names is not None:
names = table.names
else:
names = ['col %d' % n for n in xrange(table.columns)]
document.append('<tr>\n')
document.extend(' <th>%s</th>\n' % name for name in names)
document.append('</tr>\n')
columns = [table.get_column(col) for col in xrange(table.columns)]
for row in xrange(table.rows):
document.append('<tr>\n')
for col in xrange(table.columns):
elem = columns[col][row]
if isinstance(elem, bytes):
elem = elem.decode('utf-8', 'replace')
elif not isinstance(elem, unicode):
elem = unicode(elem)
document.append(' <td>%s</td>\n' % elem)
document.append('</tr>\n')
document.append(' </table>\n </body>\n</html>\n')
return ''.join(document)
def dumpToFile(self, filename):
ext = os.path.splitext(filename)[1].lower()
if ext in ('.html', '.htm'):
with open(filename, 'wb') as fp:
fp.write(self.write_html())
else:
super(TableCellWidget, self).dumpToFile(filename)
def saveToPDF(self, filename):
document = QtGui.QTextDocument()
document.setHtml(self.write_html())
printer = QtGui.QPrinter()
printer.setOutputFormat(QtGui.QPrinter.PdfFormat)
printer.setOutputFileName(filename)
document.print_(printer)
_modules = [TableCell]
|
import sys
import heapq
def sol():
# sys.stdin = open("./1261/input.txt")
input = sys.stdin.readline
M, N = map(int, input().split())
rooms = [list(map(int, list(input())[:-1])) for _ in range(N)]
print(dijkstra(rooms, N, M))
def dijkstra(rooms, N, M):
max_int = sys.maxsize
dist = [max_int] * (N * M)
dist[0] = rooms[0][0]
heap = []
for i in range(N * M):
heapq.heappush(heap, (dist[i], i))
dr = [0, 0, 1, -1]
dc = [1, -1, 0, 0]
while heap:
d, u = heapq.heappop(heap)
if d > dist[u]:
continue
cur_r, cur_c = u // M, u % M
for i in range(4):
nr = cur_r + dr[i]
nc = cur_c + dc[i]
if 0 <= nr < N and 0 <= nc < M:
v = nr * M + nc
if dist[v] > dist[u] + rooms[nr][nc]:
dist[v] = dist[u] + rooms[nr][nc]
heapq.heappush(heap, (dist[v], v))
return dist[-1]
if __name__ == "__main__":
sol()
|
#
# PySNMP MIB module JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB
# Produced by pysmi-0.3.4 at Wed May 1 14:00:25 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueSizeConstraint, SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueSizeConstraint", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion")
InetAddressPrefixLength, InetPortNumber, InetAddress, InetAddressType = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressPrefixLength", "InetPortNumber", "InetAddress", "InetAddressType")
Ipv6AddressIfIdentifier, Ipv6AddressPrefix, Ipv6Address = mibBuilder.importSymbols("IPV6-TC", "Ipv6AddressIfIdentifier", "Ipv6AddressPrefix", "Ipv6Address")
jnxMobileGatewaySgw, = mibBuilder.importSymbols("JUNIPER-MBG-SMI", "jnxMobileGatewaySgw")
EnabledStatus, = mibBuilder.importSymbols("JUNIPER-MIMSTP-MIB", "EnabledStatus")
jnxMbgGwName, jnxMbgGwIndex = mibBuilder.importSymbols("JUNIPER-MOBILE-GATEWAYS", "jnxMbgGwName", "jnxMbgGwIndex")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32, Counter64, Bits, TimeTicks, Integer32, iso, ObjectIdentity, MibIdentifier, Unsigned32, Gauge32, IpAddress, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter32", "Counter64", "Bits", "TimeTicks", "Integer32", "iso", "ObjectIdentity", "MibIdentifier", "Unsigned32", "Gauge32", "IpAddress", "NotificationType")
TextualConvention, RowStatus, TruthValue, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "RowStatus", "TruthValue", "DisplayString")
jnxMbgSgwGtpMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2))
jnxMbgSgwGtpMib.setRevisions(('2011-09-21 12:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: jnxMbgSgwGtpMib.setRevisionsDescriptions(('Initial version',))
if mibBuilder.loadTexts: jnxMbgSgwGtpMib.setLastUpdated('201109211200Z')
if mibBuilder.loadTexts: jnxMbgSgwGtpMib.setOrganization('Juniper Networks, Inc.')
if mibBuilder.loadTexts: jnxMbgSgwGtpMib.setContactInfo('Juniper Technical Assistance Center Juniper Networks, Inc. 1194 N. Mathilda Avenue Sunnyvale, CA 94089 E-mail: support@juniper.net')
if mibBuilder.loadTexts: jnxMbgSgwGtpMib.setDescription('This module defines some sample objects pertaining to GTP protocol.')
jnxMbgSgwGtpNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 0))
jnxMbgSgwGtpObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1))
jnxMbgSgwGtpCGlbStatsTable = MibTable((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2), )
if mibBuilder.loadTexts: jnxMbgSgwGtpCGlbStatsTable.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpCGlbStatsTable.setDescription('Each entry corresponds to a gateway level GTP Control statistic.')
jnxMbgSgwGtpGlbStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1), ).setIndexNames((0, "JUNIPER-MOBILE-GATEWAYS", "jnxMbgGwIndex"))
if mibBuilder.loadTexts: jnxMbgSgwGtpGlbStatsEntry.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpGlbStatsEntry.setDescription('A specification of the GTP gateway level control Statistics.')
jnxMbgSgwRxPacketsDropped = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 1), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwRxPacketsDropped.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwRxPacketsDropped.setDescription('Number of Received Packets Dropped.')
jnxMbgSgwPacketAllocFail = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPacketAllocFail.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPacketAllocFail.setDescription('Number of Packet allocation failures.')
jnxMbgSgwPacketSendFail = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPacketSendFail.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPacketSendFail.setDescription('Number of Packet Send failures.')
jnxMbgSgwIPVerErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIPVerErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIPVerErrRx.setDescription('Number of IP Version Error Packets Received.')
jnxMbgSgwIPProtoErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIPProtoErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIPProtoErrRx.setDescription('Number of IP protocol Error packets Received.')
jnxMbgSgwGTPPortErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGTPPortErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGTPPortErrRx.setDescription('Number of Port Error Packets Received.')
jnxMbgSgwGTPUnknVerRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGTPUnknVerRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGTPUnknVerRx.setDescription('Number of Unknown Version Packets Received.')
jnxMbgSgwPcktLenErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPcktLenErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPcktLenErrRx.setDescription('Number of Packet Length Error Packets Received.')
jnxMbgSgwUnknMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUnknMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUnknMsgRx.setDescription('Number of Unknown Messages Received.')
jnxMbgSgwProtocolErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwProtocolErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwProtocolErrRx.setDescription('Number of GTPv2 Protocol Errors Received.')
jnxMbgSgwUnSupportedMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUnSupportedMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUnSupportedMsgRx.setDescription('Number of GTPv2 Unsupported Messages received.')
jnxMbgSgwT3RespTmrExpRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwT3RespTmrExpRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwT3RespTmrExpRx.setDescription('Number of GTP V2 T3 timer expiries Received.')
jnxMbgSgwV2NumMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2NumMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2NumMsgRx.setDescription('Number of GTPv2 messages received.')
jnxMbgSgwV2NumMsgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 14), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2NumMsgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2NumMsgTx.setDescription('Number of V2 messages sent.')
jnxMbgSgwV2NumBytesRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 15), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2NumBytesRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2NumBytesRx.setDescription('Number of GTPv2 bytes received.')
jnxMbgSgwV2NumBytesTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 16), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2NumBytesTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2NumBytesTx.setDescription('Number of V2 bytes sent.')
jnxMbgSgwV2EchoReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 19), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2EchoReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2EchoReqRx.setDescription('Number of GTP V2 Echo Request received.')
jnxMbgSgwV2EchoReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 20), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2EchoReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2EchoReqTx.setDescription('Number of GTP V2 Echo Request Sent.')
jnxMbgSgwV2EchoRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 21), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2EchoRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2EchoRespRx.setDescription('Number of GTP V2 Echo Response received.')
jnxMbgSgwV2EchoRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 22), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2EchoRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2EchoRespTx.setDescription('Number of GTP V2 Echo Response Sent.')
jnxMbgSgwV2VerNotSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 23), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2VerNotSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2VerNotSupRx.setDescription('Number of GTP V2 Version Not supported messages received')
jnxMbgSgwV2VerNotSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 24), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2VerNotSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2VerNotSupTx.setDescription('Number of GTP V2 version not supported messages sent.')
jnxMbgSgwCreateSessReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 25), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCreateSessReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCreateSessReqRx.setDescription('Number of GTP V2 Create Session Requests received.')
jnxMbgSgwCreateSessReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 26), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCreateSessReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCreateSessReqTx.setDescription('Number of GTP V2 Create Session Requests Sent.')
jnxMbgSgwCreateSessRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 27), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCreateSessRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCreateSessRspRx.setDescription('Number of GTP V2 Create Session Responses received.')
jnxMbgSgwCreateSessRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 28), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCreateSessRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCreateSessRspTx.setDescription('Number of GTP V2 Create Session Responses Sent.')
jnxMbgSgwModBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 29), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwModBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwModBrReqRx.setDescription('Number of GTP V2 Modify Bearer Requests received.')
jnxMbgSgwModBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 30), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwModBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwModBrReqTx.setDescription('Number of GTP V2 Modify Bearer Requests Sent.')
jnxMbgSgwModBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 31), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwModBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwModBrRspRx.setDescription('Number of GTP V2 Modify Bearer Responses received.')
jnxMbgSgwModBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 32), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwModBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwModBrRspTx.setDescription('Number of GTP V2 Modify Bearer Responses Sent.')
jnxMbgSgwDelSessReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 33), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelSessReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelSessReqRx.setDescription('Number of GTP V2 Delete Session Requests received.')
jnxMbgSgwDelSessReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 34), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelSessReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelSessReqTx.setDescription('Number of GTP V2 Delete Session Requests Sent.')
jnxMbgSgwDelSessRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 35), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelSessRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelSessRspRx.setDescription('Number of GTP V2 Delete Session Responses received.')
jnxMbgSgwDelSessRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 36), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelSessRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelSessRspTx.setDescription('Number of GTP V2 Delete Session Responses Sent.')
jnxMbgSgwCrtBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 37), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCrtBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCrtBrReqRx.setDescription('Number of GTP V2 Create Bearer Requests received.')
jnxMbgSgwCrtBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 38), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCrtBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCrtBrReqTx.setDescription('Number of GTP V2 Create Bearer Requests Sent.')
jnxMbgSgwCrtBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 39), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCrtBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCrtBrRspRx.setDescription('Number of GTP V2 Create Bearer Response received.')
jnxMbgSgwCrtBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 40), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCrtBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCrtBrRspTx.setDescription('Number of GTP V2 Create Bearer Response Sent.')
jnxMbgSgwUpdBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 41), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUpdBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUpdBrReqRx.setDescription('Number of GTP V2 Update Bearer Request received.')
jnxMbgSgwUpdBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 42), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUpdBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUpdBrReqTx.setDescription('Number of GTP V2 Update Bearer Request Sent.')
jnxMbgSgwUpdBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 43), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUpdBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUpdBrRspRx.setDescription('Number of GTP V2 Update Bearer Response received.')
jnxMbgSgwUpdBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 44), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUpdBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUpdBrRspTx.setDescription('Number of GTP V2 Update Bearer Response Sent.')
jnxMbgSgwDelBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 45), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelBrReqRx.setDescription('Number of GTP V2 Delete Bearer Request received.')
jnxMbgSgwDelBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 46), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelBrReqTx.setDescription('Number of GTP V2 Delete Bearer Request Sent.')
jnxMbgSgwDelBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 47), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelBrRspRx.setDescription('Number of GTP V2 Delete Bearer Response received.')
jnxMbgSgwDelBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 48), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelBrRspTx.setDescription('Number of GTP V2 Delete Bearer Response Sent.')
jnxMbgSgwDelConnSetReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 49), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelConnSetReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelConnSetReqRx.setDescription('Number of GTP V2 Delete PDN connection set Request received.')
jnxMbgSgwDelConnSetReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 50), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelConnSetReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelConnSetReqTx.setDescription('Number of GTP V2 Delete PDN connection set Request Sent.')
jnxMbgSgwDelConnSetRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 51), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelConnSetRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelConnSetRspRx.setDescription('Number of GTP V2 Delete PDN connection set Response received.')
jnxMbgSgwDelConnSetRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 52), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelConnSetRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelConnSetRspTx.setDescription('Number of GTP V2 Delete PDN connection set Response Sent.')
jnxMbgSgwUpdConnSetReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 53), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUpdConnSetReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUpdConnSetReqRx.setDescription('Number of GTP V2 Update Connection set Request received.')
jnxMbgSgwUpdConnSetReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 54), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUpdConnSetReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUpdConnSetReqTx.setDescription('Number of GTP V2 Update Connection set Request Sent.')
jnxMbgSgwUpdConnSetRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 55), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUpdConnSetRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUpdConnSetRspRx.setDescription('Number of GTP V2 Update Connection set Response received.')
jnxMbgSgwUpdConnSetRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 56), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUpdConnSetRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUpdConnSetRspTx.setDescription('Number of GTP V2 Update Connection set Response Sent.')
jnxMbgSgwModBrCmdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 57), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwModBrCmdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwModBrCmdRx.setDescription('Number of GTP V2 Modify Bearer Command received.')
jnxMbgSgwModBrCmdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 58), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwModBrCmdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwModBrCmdTx.setDescription('Number of GTP V2 Modify Bearer Command Sent.')
jnxMbgSgwModBrFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 59), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwModBrFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwModBrFlrIndRx.setDescription('Number of GTP V2 Modify Bearer Failure received.')
jnxMbgSgwModBrFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 60), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwModBrFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwModBrFlrIndTx.setDescription('Number of GTP V2 Modify Bearer Failure Sent.')
jnxMbgSgwDelBrCmdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 61), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelBrCmdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelBrCmdRx.setDescription('Number of GTP V2 Delete Bearer Command received.')
jnxMbgSgwDelBrCmdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 62), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelBrCmdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelBrCmdTx.setDescription('Number of GTP V2 Delete Bearer Command Sent.')
jnxMbgSgwDelBrFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 63), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelBrFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelBrFlrIndRx.setDescription('Number of GTP V2 Delete Bearer Failure received.')
jnxMbgSgwDelBrFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 64), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelBrFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelBrFlrIndTx.setDescription('Number of GTP V2 Delete Bearer Failure Sent.')
jnxMbgSgwBrResCmdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 65), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwBrResCmdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwBrResCmdRx.setDescription('Number of GTP V2 Bearer Response Command received.')
jnxMbgSgwBrResCmdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 66), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwBrResCmdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwBrResCmdTx.setDescription('Number of GTP V2 Bearer Response Command Sent.')
jnxMbgSgwBrResFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 67), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwBrResFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwBrResFlrIndRx.setDescription('Number of GTP V2 Bearer Resource Failure received.')
jnxMbgSgwBrResFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 68), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwBrResFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwBrResFlrIndTx.setDescription('Number of GTP V2 Bearer Resource Failure Sent.')
jnxMbgSgwRelAcsBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 69), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwRelAcsBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwRelAcsBrReqRx.setDescription('Number of GTP V2 Release Access Bearer Requests received.')
jnxMbgSgwRelAcsBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 70), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwRelAcsBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwRelAcsBrReqTx.setDescription('Number of GTP V2 Release Access Bearer Requests sent.')
jnxMbgSgwRelAcsBrRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 71), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwRelAcsBrRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwRelAcsBrRespRx.setDescription('Number of GTP V2 Release Access Bearer Response received.')
jnxMbgSgwRelAcsBrRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 72), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwRelAcsBrRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwRelAcsBrRespTx.setDescription('Number of GTP V2 Release Access Bearer Response sent.')
jnxMbgSgwCrIndTunReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 73), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCrIndTunReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCrIndTunReqRx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Request Received')
jnxMbgSgwCrIndTunReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 74), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCrIndTunReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCrIndTunReqTx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Request sent')
jnxMbgSgwCrIndTunRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 75), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCrIndTunRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCrIndTunRespRx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Response Received')
jnxMbgSgwCrIndTunRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 76), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCrIndTunRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCrIndTunRespTx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Response sent')
jnxMbgSgwDelIndTunReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 77), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelIndTunReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelIndTunReqRx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Request Received')
jnxMbgSgwDelIndTunReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 78), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelIndTunReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelIndTunReqTx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Request sent.')
jnxMbgSgwDelIndTunRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 79), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelIndTunRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelIndTunRespRx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Response Received')
jnxMbgSgwDelIndTunRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 80), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelIndTunRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelIndTunRespTx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Response sent.')
jnxMbgSgwDlDataNotifRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 81), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDlDataNotifRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDlDataNotifRx.setDescription('Number of GTP V2 Downlink Data Notify received.')
jnxMbgSgwDlDataNotifTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 82), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDlDataNotifTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDlDataNotifTx.setDescription('Number of GTP V2 Downlink Data Notify Sent.')
jnxMbgSgwDlDataAckRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 83), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDlDataAckRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDlDataAckRx.setDescription('Number of GTP V2 Downlink Data Notify Acknowledgement received.')
jnxMbgSgwDlDataAckTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 84), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDlDataAckTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDlDataAckTx.setDescription('Number of GTP V2 Downlink Data Notify Acknowledgement Sent.')
jnxMbgSgwDlDataNotiFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 85), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDlDataNotiFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDlDataNotiFlrIndRx.setDescription('Number of GTP V2 Downlink Data Notification fail received.')
jnxMbgSgwDlDataNotiFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 86), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDlDataNotiFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDlDataNotiFlrIndTx.setDescription('Number of GTP V2 Downlink Data Notification fail Sent.')
jnxMbgSgwStopPagingIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 87), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwStopPagingIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwStopPagingIndRx.setDescription('Number of GTP V2 Number of Stop Paging Indication Messages Received.')
jnxMbgSgwStopPagingIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 88), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwStopPagingIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwStopPagingIndTx.setDescription('Number of GTP V2 Number of Stop Paging Indicaton messages sent')
jnxMbgSgwGtpV2ICsPageRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 89), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPageRx.setStatus('obsolete')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPageRx.setDescription('Number of GTPV2 packets received with cause Page.')
jnxMbgSgwGtpV2ICsPageTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 90), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPageTx.setStatus('obsolete')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPageTx.setDescription('Number of GTP packets sent with cause Page.')
jnxMbgSgwGtpV2ICsReqAcceptRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 91), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsReqAcceptRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsReqAcceptRx.setDescription('Number of GTPV2 packets received with cause Request Accept.')
jnxMbgSgwGtpV2ICsReqAcceptTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 92), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsReqAcceptTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsReqAcceptTx.setDescription('Number of GTP packets sent with cause Request Accept.')
jnxMbgSgwGtpV2ICsAcceptPartRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 93), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAcceptPartRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAcceptPartRx.setDescription('Number of GTPV2 packets received with cause Accept Partial.')
jnxMbgSgwGtpV2ICsAcceptPartTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 94), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAcceptPartTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAcceptPartTx.setDescription('Number of GTP packets sent with cause Accept Partial.')
jnxMbgSgwGtpV2ICsNewPTNPrefRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 95), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNewPTNPrefRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNewPTNPrefRx.setDescription('Number of GTPV2 packets received with cause New PDN type due to Network Preference.')
jnxMbgSgwGtpV2ICsNewPTNPrefTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 96), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNewPTNPrefTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNewPTNPrefTx.setDescription('Number of GTP packets sent with cause New PDN type due to Network Preference')
jnxMbgSgwGtpV2ICsNewPTSIAdbrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 97), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNewPTSIAdbrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNewPTSIAdbrRx.setDescription('Number of GTPV2 packets received with cause New PDN type due to Single Address Bearer.')
jnxMbgSgwGtpV2ICsNewPTSIAdbrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 98), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNewPTSIAdbrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNewPTSIAdbrTx.setDescription('Number of GTP packets sent with cause New PDN type due to Single Address Bearer.')
jnxMbgSgwGtpV2ICsCtxNotFndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 99), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCtxNotFndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCtxNotFndRx.setDescription('Number of GTPV2 packets received with cause Context not found.')
jnxMbgSgwGtpV2ICsCtxNotFndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 100), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCtxNotFndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCtxNotFndTx.setDescription('Number of GTP packets sent with cause Context not found.')
jnxMbgSgwGtpV2ICsInvMsgFmtRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 101), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvMsgFmtRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvMsgFmtRx.setDescription('Number of GTPV2 packets received with cause Invalid Message Format.')
jnxMbgSgwGtpV2ICsInvMsgFmtTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 102), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvMsgFmtTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvMsgFmtTx.setDescription('Number of GTP packets sent with cause Invalid Message Format.')
jnxMbgSgwGtpV2ICsVerNotSuppRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 103), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsVerNotSuppRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsVerNotSuppRx.setDescription('Number of GTPV2 packets received with cause Version not Supported.')
jnxMbgSgwGtpV2ICsVerNotSuppTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 104), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsVerNotSuppTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsVerNotSuppTx.setDescription('Number of GTP packets sent with cause Version not Supported.')
jnxMbgSgwGtpV2ICsInvLenRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 105), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvLenRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvLenRx.setDescription('Number of GTPV2 packets received with cause Invalid Length.')
jnxMbgSgwGtpV2ICsInvLenTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 106), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvLenTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvLenTx.setDescription('Number of GTP packets sent with cause Invalid Length.')
jnxMbgSgwGtpV2ICsServNotSuppRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 107), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsServNotSuppRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsServNotSuppRx.setDescription('Number of GTPV2 packets received with cause Service Not supported.')
jnxMbgSgwGtpV2ICsServNotSuppTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 108), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsServNotSuppTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsServNotSuppTx.setDescription('Number of GTP packets sent with cause Service Not supported.')
jnxMbgSgwGtpV2ICsManIEIncorrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 109), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsManIEIncorrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsManIEIncorrRx.setDescription('Number of GTPV2 packets received with cause Mandatory IE incorrect.')
jnxMbgSgwGtpV2ICsManIEIncorrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 110), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsManIEIncorrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsManIEIncorrTx.setDescription('Number of GTP packets sent with cause Mandatory IE incorrect.')
jnxMbgSgwGtpV2ICsManIEMissRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 111), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsManIEMissRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsManIEMissRx.setDescription('Number of GTPV2 packets received with cause Mandatory IE Missing.')
jnxMbgSgwGtpV2ICsManIEMissTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 112), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsManIEMissTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsManIEMissTx.setDescription('Number of GTP packets sent with cause Mandatory IE Missing.')
jnxMbgSgwGtpV2ICsOptIEIncorrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 113), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsOptIEIncorrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsOptIEIncorrRx.setDescription('Number of GTPV2 packets received with cause Optional IE Incorrect.')
jnxMbgSgwGtpV2ICsOptIEIncorrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 114), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsOptIEIncorrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsOptIEIncorrTx.setDescription('Number of GTP packets sent with cause Optional IE Incorrect.')
jnxMbgSgwGtpV2ICsSysFailRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 115), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSysFailRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSysFailRx.setDescription('Number of GTPV2 packets received with cause System Failure.')
jnxMbgSgwGtpV2ICsSysFailTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 116), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSysFailTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSysFailTx.setDescription('Number of GTP packets sent with cause System Failure.')
jnxMbgSgwGtpV2ICsNoResRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 117), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNoResRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNoResRx.setDescription('Number of GTPV2 packets received with cause No Resource.')
jnxMbgSgwGtpV2ICsNoResTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 118), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNoResTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNoResTx.setDescription('Number of GTP packets sent with cause No Resource.')
jnxMbgSgwGtpV2ICsTFTSMANTErRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 119), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsTFTSMANTErRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsTFTSMANTErRx.setDescription('Number of GTPV2 packets received with cause TFT Symantic Error.')
jnxMbgSgwGtpV2ICsTFTSMANTErTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 120), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsTFTSMANTErTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsTFTSMANTErTx.setDescription('Number of GTP packets sent with cause TFT Symantic Error.')
jnxMbgSgwGtpV2ICsTFTSysErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 121), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsTFTSysErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsTFTSysErrRx.setDescription('Number of GTPV2 packets received with cause TFT System Error.')
jnxMbgSgwGtpV2ICsTFTSysErrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 122), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsTFTSysErrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsTFTSysErrTx.setDescription('Number of GTP packets sent with cause TFT System Error.')
jnxMbgSgwGtpV2ICsPkFltManErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 123), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPkFltManErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPkFltManErrRx.setDescription('Number of GTPV2 packets received with cause Packet Filter Symantic Error.')
jnxMbgSgwGtpV2ICsPkFltManErrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 124), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPkFltManErrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPkFltManErrTx.setDescription('Number of GTP packets sent with cause Packet Filter Symantic Error.')
jnxMbgSgwGtpV2ICsPkFltSynErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 125), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPkFltSynErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPkFltSynErrRx.setDescription('Number of GTPV2 packets received with cause Packet Filter Syntax Error.')
jnxMbgSgwGtpV2ICsPkFltSynErrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 126), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPkFltSynErrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPkFltSynErrTx.setDescription('Number of GTP packets sent with cause Packet Filter Syntax Error.')
jnxMbgSgwGtpV2ICsMisUnknAPNRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 127), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsMisUnknAPNRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsMisUnknAPNRx.setDescription('Number of GTPV2 packets received with cause Unknown APN.')
jnxMbgSgwGtpV2ICsMisUnknAPNTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 128), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsMisUnknAPNTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsMisUnknAPNTx.setDescription('Number of GTP packets sent with cause Unknown APN.')
jnxMbgSgwGtpV2ICsUnexpRptIERx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 129), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnexpRptIERx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnexpRptIERx.setDescription('Number of GTPV2 packets received with cause Unexpected Repeated IE.')
jnxMbgSgwGtpV2ICsUnexpRptIETx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 130), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnexpRptIETx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnexpRptIETx.setDescription('Number of GTP packets sent with cause Unexpected Repeated IE.')
jnxMbgSgwGtpV2ICsGREKeyNtFdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 131), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsGREKeyNtFdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsGREKeyNtFdRx.setDescription('Number of GTPV2 packets received with cause GRE Key Not Found.')
jnxMbgSgwGtpV2ICsGREKeyNtFdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 132), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsGREKeyNtFdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsGREKeyNtFdTx.setDescription('Number of GTP packets sent with cause GRE Key Not Found.')
jnxMbgSgwGtpV2ICsRelocFailRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 133), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRelocFailRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRelocFailRx.setDescription('Number of GTPV2 packets received with cause Relocation Failed.')
jnxMbgSgwGtpV2ICsRelocFailTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 134), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRelocFailTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRelocFailTx.setDescription('Number of GTP packets sent with cause Relocation Failed.')
jnxMbgSgwGtpV2ICsDeniedINRatRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 135), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsDeniedINRatRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsDeniedINRatRx.setDescription('Number of GTPV2 packets received with cause Denied in RAT.')
jnxMbgSgwGtpV2ICsDeniedINRatTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 136), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsDeniedINRatTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsDeniedINRatTx.setDescription('Number of GTP packets sent with cause Denied in RAT.')
jnxMbgSgwGtpV2ICsPTNotSuppRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 137), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPTNotSuppRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPTNotSuppRx.setDescription('Number of GTPV2 packets received with cause PDN Type Not Supported.')
jnxMbgSgwGtpV2ICsPTNotSuppTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 138), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPTNotSuppTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPTNotSuppTx.setDescription('Number of GTP packets sent with cause PDN Type Not Supported.')
jnxMbgSgwGtpV2ICsAllDynAdOccRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 139), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAllDynAdOccRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAllDynAdOccRx.setDescription('Number of GTPV2 packets received with cause Allocated Dynamic Address Occupied.')
jnxMbgSgwGtpV2ICsAllDynAdOccTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 140), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAllDynAdOccTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAllDynAdOccTx.setDescription('Number of GTP packets sent with cause Allocated Dynamic Address Occupied.')
jnxMbgSgwGtpV2ICsNOTFTUECTXRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 141), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNOTFTUECTXRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNOTFTUECTXRx.setDescription('Number of GTPV2 packets received with cause UE Context Without TFT Exists.')
jnxMbgSgwGtpV2ICsNOTFTUECTXTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 142), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNOTFTUECTXTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNOTFTUECTXTx.setDescription('Number of GTP packets sent with cause UE Context Without TFT Exists.')
jnxMbgSgwGtpV2ICsProtoNtSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 143), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsProtoNtSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsProtoNtSupRx.setDescription('Number of GTPV2 packets received with cause Protocol Not Supported.')
jnxMbgSgwGtpV2ICsProtoNtSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 144), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsProtoNtSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsProtoNtSupTx.setDescription('Number of GTP packets sent with cause Protocol Not Supported.')
jnxMbgSgwGtpV2ICsUENotRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 145), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUENotRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUENotRespRx.setDescription('Number of GTPV2 packets received with cause UE Not Responding.')
jnxMbgSgwGtpV2ICsUENotRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 146), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUENotRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUENotRespTx.setDescription('Number of GTP packets sent with cause UE Not Responding.')
jnxMbgSgwGtpV2ICsUERefusesRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 147), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUERefusesRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUERefusesRx.setDescription('Number of GTPV2 packets received with cause UE Refuses.')
jnxMbgSgwGtpV2ICsUERefusesTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 148), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUERefusesTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUERefusesTx.setDescription('Number of GTP packets sent with cause UE Refuses.')
jnxMbgSgwGtpV2ICsServDeniedRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 149), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsServDeniedRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsServDeniedRx.setDescription('Number of GTPV2 packets received with cause Service Denied.')
jnxMbgSgwGtpV2ICsServDeniedTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 150), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsServDeniedTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsServDeniedTx.setDescription('Number of GTP packets sent with cause Service Denied.')
jnxMbgSgwGtpV2ICsUnabPageUERx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 151), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnabPageUERx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnabPageUERx.setDescription('Number of GTPV2 packets received with cause Unable to Page UE.')
jnxMbgSgwGtpV2ICsUnabPageUETx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 152), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnabPageUETx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnabPageUETx.setDescription('Number of GTP packets sent with cause Unable to Page UE.')
jnxMbgSgwGtpV2ICsNoMemRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 153), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNoMemRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNoMemRx.setDescription('Number of GTPV2 packets received with cause No Memory.')
jnxMbgSgwGtpV2ICsNoMemTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 154), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNoMemTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNoMemTx.setDescription('Number of GTP packets sent with cause No Memory.')
jnxMbgSgwGtpV2ICsUserAUTHFlRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 155), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUserAUTHFlRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUserAUTHFlRx.setDescription('Number of GTPV2 packets received with cause User AUTH Failed.')
jnxMbgSgwGtpV2ICsUserAUTHFlTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 156), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUserAUTHFlTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUserAUTHFlTx.setDescription('Number of GTP packets sent with cause User AUTH Failed.')
jnxMbgSgwGtpV2ICsAPNAcsDenRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 157), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAPNAcsDenRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAPNAcsDenRx.setDescription('Number of GTPV2 packets received with cause APN Access Denied.')
jnxMbgSgwGtpV2ICsAPNAcsDenTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 158), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAPNAcsDenTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAPNAcsDenTx.setDescription('Number of GTP packets sent with cause APN Access Denied.')
jnxMbgSgwGtpV2ICsReqRejRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 159), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsReqRejRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsReqRejRx.setDescription('Number of GTPV2 packets received with cause Request Rejected.')
jnxMbgSgwGtpV2ICsReqRejTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 160), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsReqRejTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsReqRejTx.setDescription('Number of GTP packets sent with cause Request Rejected.')
jnxMbgSgwGtpV2ICsPTMSISigMMRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 161), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPTMSISigMMRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPTMSISigMMRx.setDescription('Number of GTPV2 packets received with cause P-TMSI Signature Mismatch.')
jnxMbgSgwGtpV2ICsPTMSISigMMTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 162), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPTMSISigMMTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPTMSISigMMTx.setDescription('Number of GTP packets sent with cause P-TMSI Signature Mismatch')
jnxMbgSgwGtpV2ICsIMSINotKnRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 163), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsIMSINotKnRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsIMSINotKnRx.setDescription('Number of GTPV2 packets received with cause IMSI Not Known.')
jnxMbgSgwGtpV2ICsIMSINotKnTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 164), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsIMSINotKnTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsIMSINotKnTx.setDescription('Number of GTP packets sent with cause IMSI Not Known.')
jnxMbgSgwGtpV2ICsCondIEMsRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 165), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCondIEMsRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCondIEMsRx.setDescription('Number of GTPV2 packets received with cause Conditional IE Missing.')
jnxMbgSgwGtpV2ICsCondIEMsTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 166), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCondIEMsTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCondIEMsTx.setDescription('Number of GTP packets sent with cause Conditional IE Missing.')
jnxMbgSgwGtpV2ICsAPNResTIncRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 167), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAPNResTIncRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAPNResTIncRx.setDescription('Number of GTPV2 packets received with cause APN Restriction Type Incompatible.')
jnxMbgSgwGtpV2ICsAPNResTIncTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 168), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAPNResTIncTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAPNResTIncTx.setDescription('Number of GTP packets sent with cause APN Restriction Type Incompatible.')
jnxMbgSgwGtpV2ICsUnknownRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 169), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnknownRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnknownRx.setDescription('Number of GTPV2 packets received with cause Unknown.')
jnxMbgSgwGtpV2ICsUnknownTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 170), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnknownTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnknownTx.setDescription('Number of GTP packets sent with cause Unknown.')
jnxMbgSgwGtpV2ICsLclDetRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 171), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsLclDetRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsLclDetRx.setDescription('Number of GTP packets received with cause Local Detach.')
jnxMbgSgwGtpV2ICsLclDetTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 172), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsLclDetTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsLclDetTx.setDescription('Number of GTP packets sent with cause Local Detach.')
jnxMbgSgwGtpV2ICsCmpDetRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 173), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCmpDetRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCmpDetRx.setDescription('Number of GTP packets received with cause Complete Detach.')
jnxMbgSgwGtpV2ICsCmpDetTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 174), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCmpDetTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCmpDetTx.setDescription('Number of GTP packets sent with cause Complete Detach.')
jnxMbgSgwGtpV2ICsRATChgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 175), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRATChgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRATChgRx.setDescription('Number of GTP packets received with cause RAT changed from 3GPP to non 3GPP.')
jnxMbgSgwGtpV2ICsRATChgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 176), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRATChgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRATChgTx.setDescription('Number of GTP packets sent with cause RAT changed from 3GPP to non 3GPP.')
jnxMbgSgwGtpV2ICsISRDeactRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 177), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsISRDeactRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsISRDeactRx.setDescription('Number of GTP packets received with cause ISR Deactivated.')
jnxMbgSgwGtpV2ICsISRDeactTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 178), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsISRDeactTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsISRDeactTx.setDescription('Number of GTP packets sent with cause ISR Deactivated.')
jnxMbgSgwGtpV2ICsEIFRNCEnRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 179), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsEIFRNCEnRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsEIFRNCEnRx.setDescription('Number of GTP packets received with cause Error Indication from RNC eNodeB.')
jnxMbgSgwGtpV2ICsEIFRNCEnTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 180), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsEIFRNCEnTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsEIFRNCEnTx.setDescription('Number of GTP packets sent with cause Error Indication from RNC eNodeB.')
jnxMbgSgwGtpV2ICsSemErTADRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 181), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSemErTADRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSemErTADRx.setDescription('Number of GTP packets received with cause Semantic Error in TAD Operation.')
jnxMbgSgwGtpV2ICsSemErTADTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 182), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSemErTADTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSemErTADTx.setDescription('Number of GTP packets sent with cause Semantic Error in TAD Operation.')
jnxMbgSgwGtpV2ICsSynErTADRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 183), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSynErTADRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSynErTADRx.setDescription('Number of GTP packets received with cause Syntactic Error in TAD Operation.')
jnxMbgSgwGtpV2ICsSynErTADTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 184), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSynErTADTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSynErTADTx.setDescription('Number of GTP packets sent with cause Syntactic Error in TAD Operation.')
jnxMbgSgwGtpV2ICsRMValRcvRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 185), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRMValRcvRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRMValRcvRx.setDescription('Number of GTP packets received with cause Reserved Message Value Received.')
jnxMbgSgwGtpV2ICsRMValRcvTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 186), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRMValRcvTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRMValRcvTx.setDescription('Number of GTP packets sent with cause Reserved Message Value Received.')
jnxMbgSgwGtpV2ICsRPrNtRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 187), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRPrNtRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRPrNtRspRx.setDescription('Number of GTP packets received with cause Remote peer not responding.')
jnxMbgSgwGtpV2ICsRPrNtRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 188), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRPrNtRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRPrNtRspTx.setDescription('Number of GTP packets sent with cause Remote peer not responding.')
jnxMbgSgwGtpV2ICsColNWReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 189), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsColNWReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsColNWReqRx.setDescription('Number of GTP packets received with cause Collision with network initiated request.')
jnxMbgSgwGtpV2ICsColNWReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 190), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsColNWReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsColNWReqTx.setDescription('Number of GTP packets sent with cause Collision with network initiated request.')
jnxMbgSgwGtpV2ICsUnPgUESusRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 191), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnPgUESusRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnPgUESusRx.setDescription('Number of GTP packets received with cause Unable to page UE due to suspension.')
jnxMbgSgwGtpV2ICsUnPgUESusTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 192), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnPgUESusTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnPgUESusTx.setDescription('Number of GTP packets sent with cause Unable to page UE due to suspension.')
jnxMbgSgwGtpV2ICsInvTotLenRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 193), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvTotLenRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvTotLenRx.setDescription('Number of GTP packets received with cause Invalid total len.')
jnxMbgSgwGtpV2ICsInvTotLenTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 194), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvTotLenTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvTotLenTx.setDescription('Number of GTP packets sent with cause Invalid total len.')
jnxMbgSgwGtpV2ICsDtForNtSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 195), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsDtForNtSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsDtForNtSupRx.setDescription('Number of GTP packets received with cause Data forwarding not supported.')
jnxMbgSgwGtpV2ICsDtForNtSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 196), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsDtForNtSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsDtForNtSupTx.setDescription('Number of GTP packets sent with cause Data forwarding not supported.')
jnxMbgSgwGtpV2ICsInReFRePrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 197), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInReFRePrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInReFRePrRx.setDescription('Number of GTP packets received with cause Invalid Reply from Remote peer.')
jnxMbgSgwGtpV2ICsInReFRePrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 198), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInReFRePrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInReFRePrTx.setDescription('Number of GTP packets sent with cause Invalid Reply from Remote peer.')
jnxMbgSgwGtpV2ICsInvPrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 199), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvPrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvPrRx.setDescription('Number of GTP packets received with cause Invalid peer.')
jnxMbgSgwGtpV2ICsInvPrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 200), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvPrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvPrTx.setDescription('Number of GTP packets sent with cause Invalid peer.')
jnxMbgSgwGtpV1ProtocolErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 201), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1ProtocolErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1ProtocolErrRx.setDescription('Number of GTPv1 Protocol Errors Received.')
jnxMbgSgwGtpV1UnSupMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 202), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1UnSupMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1UnSupMsgRx.setDescription('Number of GTPv1 Unsupported Messages received.')
jnxMbgSgwGtpV1T3RespTmrExpRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 203), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1T3RespTmrExpRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1T3RespTmrExpRx.setDescription('Number of GTP V1 T3 timer expiries Received.')
jnxMbgSgwGtpV1EndMarkerRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 204), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EndMarkerRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EndMarkerRx.setDescription('Number of GTP V1 end marker packets received.')
jnxMbgSgwGtpV1EndMarkerTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 205), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EndMarkerTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EndMarkerTx.setDescription('Number of GTP V1 end marker packets sent.')
jnxMbgSgwGtpV1EchoReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 206), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EchoReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EchoReqRx.setDescription('Number of GTP V1 echo request packets received.')
jnxMbgSgwGtpV1EchoReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 207), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EchoReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EchoReqTx.setDescription('Number of GTP V1 echo request packets sent.')
jnxMbgSgwGtpV1EchoRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 208), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EchoRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EchoRespRx.setDescription('Number of GTP V1 echo response packets received.')
jnxMbgSgwGtpV1EchoRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 209), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EchoRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EchoRespTx.setDescription('Number of GTP V1 echo response packets sent.')
jnxMbgSgwGtpV1ErrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 210), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1ErrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1ErrIndRx.setDescription('Number of GTP V1 Error Indication packets received.')
jnxMbgSgwGtpV1ErrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 211), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1ErrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1ErrIndTx.setDescription('Number of GTP V1 Error Indication packets sent.')
jnxMbgSgwSuspNotifRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 212), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwSuspNotifRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwSuspNotifRx.setDescription('Number of GTPv2 Suspend Notification messages received.')
jnxMbgSgwSuspNotifTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 213), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwSuspNotifTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwSuspNotifTx.setDescription('Number of GTPv2 Suspend Notification messages sent.')
jnxMbgSgwSuspAckRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 214), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwSuspAckRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwSuspAckRx.setDescription('Number of GTPv2 Suspend Acknowledgement messages received.')
jnxMbgSgwSuspAckTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 215), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwSuspAckTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwSuspAckTx.setDescription('Number of GTPv2 Suspend Acknowledgement messages sent.')
jnxMbgSgwResumeNotifRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 216), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwResumeNotifRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwResumeNotifRx.setDescription('Number of GTPv2 Resume Notification messages received.')
jnxMbgSgwResumeNotifTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 217), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwResumeNotifTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwResumeNotifTx.setDescription('Number of GTPv2 Resume Notification messages sent.')
jnxMbgSgwResumeAckRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 218), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwResumeAckRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwResumeAckRx.setDescription('Number of GTPv2 Resume Acknowledgement messages received.')
jnxMbgSgwResumeAckTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 219), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwResumeAckTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwResumeAckTx.setDescription('Number of GTPv2 Resume Acknowledgement messages sent.')
jnxMbgSgwS11PiggybackMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 220), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwS11PiggybackMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwS11PiggybackMsgRx.setDescription('Number of GTPv2 S11 Piggyback messages received.')
jnxMbgSgwS11PiggybackMsgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 221), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwS11PiggybackMsgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwS11PiggybackMsgTx.setDescription('Number of GTPv2 S11 Piggyback messages sent.')
jnxMbgSgwS4PiggybackMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 222), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwS4PiggybackMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwS4PiggybackMsgRx.setDescription('Number of GTPv2 S4 Piggyback messages received.')
jnxMbgSgwS4PiggybackMsgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 223), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwS4PiggybackMsgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwS4PiggybackMsgTx.setDescription('Number of GTPv2 S4 Piggyback messages sent.')
jnxMbgSgwS5PiggybackMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 224), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwS5PiggybackMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwS5PiggybackMsgRx.setDescription('Number of GTPv2 S5 Piggyback messages received.')
jnxMbgSgwS5PiggybackMsgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 225), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwS5PiggybackMsgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwS5PiggybackMsgTx.setDescription('Number of GTPv2 S5 Piggyback messages sent.')
jnxMbgSgwGtpCPerPeerStatsTable = MibTable((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1), )
if mibBuilder.loadTexts: jnxMbgSgwGtpCPerPeerStatsTable.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpCPerPeerStatsTable.setDescription('Each entry corresponds to a GTP per peer level control statistic.')
jnxMbgSgwGtpPerPeerStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1), ).setIndexNames((0, "JUNIPER-MOBILE-GATEWAYS", "jnxMbgGwIndex"), (0, "JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwPPGtpRmtAddr"), (0, "JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwPPGtpLclAddr"), (0, "JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwPPGtpRtgInst"))
if mibBuilder.loadTexts: jnxMbgSgwGtpPerPeerStatsEntry.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpPerPeerStatsEntry.setDescription('A specification of the GTPC peer level Statistics.')
jnxMbgSgwPPGtpRmtAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 1), IpAddress())
if mibBuilder.loadTexts: jnxMbgSgwPPGtpRmtAddr.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpRmtAddr.setDescription('The Remote IP address of this GTP peer entry.')
jnxMbgSgwPPGtpLclAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 2), IpAddress())
if mibBuilder.loadTexts: jnxMbgSgwPPGtpLclAddr.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpLclAddr.setDescription('The Local IP address of this GTP peer entry.')
jnxMbgSgwPPGtpRtgInst = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 3), Unsigned32())
if mibBuilder.loadTexts: jnxMbgSgwPPGtpRtgInst.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpRtgInst.setDescription('The Routing Instance for this Peer.')
jnxMbgSgwPPRxPacketsDropped = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPRxPacketsDropped.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPRxPacketsDropped.setDescription('Number of Received Packets Dropped.')
jnxMbgSgwPPPacketAllocFail = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPPacketAllocFail.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPPacketAllocFail.setDescription('Number of Packet allocation failures.')
jnxMbgSgwPPPacketSendFail = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPPacketSendFail.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPPacketSendFail.setDescription('Number of Packet Send failures.')
jnxMbgSgwPPIPVerErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPIPVerErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPIPVerErrRx.setDescription('Number of IP Version Error Packets Received.')
jnxMbgSgwPPIPProtoErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPIPProtoErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPIPProtoErrRx.setDescription('Number of IP Protocol Error packets Received.')
jnxMbgSgwPPGTPPortErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGTPPortErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGTPPortErrRx.setDescription('Number of Port Error Packets Received.')
jnxMbgSgwPPGTPUnknVerRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGTPUnknVerRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGTPUnknVerRx.setDescription('Number of Unknown Version Packets Received.')
jnxMbgSgwPPPcktLenErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPPcktLenErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPPcktLenErrRx.setDescription('Number of Packet Length Error Packets Received.')
jnxMbgSgwPPUnknMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUnknMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUnknMsgRx.setDescription('Number of Unknown Messages Received.')
jnxMbgSgwPPProtocolErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPProtocolErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPProtocolErrRx.setDescription('Number of GTPv2 Protocol Errors Received.')
jnxMbgSgwPPUnSupportedMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 14), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUnSupportedMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUnSupportedMsgRx.setDescription('Number of GTPv2 Unsupported Messages received.')
jnxMbgSgwPPT3RespTmrExpRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 15), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPT3RespTmrExpRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPT3RespTmrExpRx.setDescription('Number of GTP V2 T3 timer expiries Received.')
jnxMbgSgwPPV2NumMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 16), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2NumMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2NumMsgRx.setDescription('Number of GTPv2 messages received.')
jnxMbgSgwPPV2NumMsgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 17), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2NumMsgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2NumMsgTx.setDescription('Number of GTPV2 messages sent.')
jnxMbgSgwPPV2NumBytesRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 18), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2NumBytesRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2NumBytesRx.setDescription('Number of GTPv2 bytes received.')
jnxMbgSgwPPV2NumBytesTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 19), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2NumBytesTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2NumBytesTx.setDescription('Number of GTPV2 bytes sent.')
jnxMbgSgwPPV2EchoReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 20), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2EchoReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2EchoReqRx.setDescription('Number of GTP V2 Echo Request received.')
jnxMbgSgwPPV2EchoReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 21), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2EchoReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2EchoReqTx.setDescription('Number of GTP V2 Echo Request Sent.')
jnxMbgSgwPPV2EchoRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 22), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2EchoRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2EchoRespRx.setDescription('Number of GTP V2 Echo Response received.')
jnxMbgSgwPPV2EchoRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 23), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2EchoRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2EchoRespTx.setDescription('Number of GTP V2 Echo Response Sent.')
jnxMbgSgwPPV2VerNotSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 24), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2VerNotSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2VerNotSupRx.setDescription('Number of GTP V2 Version Not supported messages received')
jnxMbgSgwPPV2VerNotSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 25), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2VerNotSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2VerNotSupTx.setDescription('Number of GTP V2 Number of version not supported messages sent.')
jnxMbgSgwPPCreateSessReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 26), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCreateSessReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCreateSessReqRx.setDescription('Number of GTP V2 Create Session Requests received.')
jnxMbgSgwPPCreateSessReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 27), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCreateSessReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCreateSessReqTx.setDescription('Number of GTP V2 Create Session Requests Sent.')
jnxMbgSgwPPCreateSessRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 28), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCreateSessRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCreateSessRspRx.setDescription('Number of GTP V2 Create Session Responses received.')
jnxMbgSgwPPCreateSessRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 29), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCreateSessRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCreateSessRspTx.setDescription('Number of GTP V2 Create Session Responses Sent.')
jnxMbgSgwPPModBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 30), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPModBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPModBrReqRx.setDescription('Number of GTP V2 Modify Bearer Requests received.')
jnxMbgSgwPPModBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 31), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPModBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPModBrReqTx.setDescription('Number of GTP V2 Modify Bearer Requests Sent.')
jnxMbgSgwPPModBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 32), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPModBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPModBrRspRx.setDescription('Number of GTP V2 Modify Bearer Responses received.')
jnxMbgSgwPPModBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 33), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPModBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPModBrRspTx.setDescription('Number of GTP V2 Modify Bearer Responses Sent.')
jnxMbgSgwPPDelSessReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 34), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelSessReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelSessReqRx.setDescription('Number of GTP V2 Delete Session Requests received.')
jnxMbgSgwPPDelSessReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 35), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelSessReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelSessReqTx.setDescription('Number of GTP V2 Delete Session Requests Sent.')
jnxMbgSgwPPDelSessRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 36), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelSessRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelSessRspRx.setDescription('Number of GTP V2 Delete Session Responses received.')
jnxMbgSgwPPDelSessRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 37), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelSessRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelSessRspTx.setDescription('Number of GTP V2 Delete Session Responses Sent.')
jnxMbgSgwPPCrtBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 38), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCrtBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCrtBrReqRx.setDescription('Number of GTP V2 Create Bearer Requests received.')
jnxMbgSgwPPCrtBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 39), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCrtBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCrtBrReqTx.setDescription('Number of GTP V2 Create Bearer Requests Sent.')
jnxMbgSgwPPCrtBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 40), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCrtBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCrtBrRspRx.setDescription('Number of GTP V2 Create Bearer Response received.')
jnxMbgSgwPPCrtBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 41), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCrtBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCrtBrRspTx.setDescription('Number of GTP V2 Create Bearer Response Sent.')
jnxMbgSgwPPUpdBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 42), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUpdBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUpdBrReqRx.setDescription('Number of GTP V2 Update Bearer Request received.')
jnxMbgSgwPPUpdBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 43), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUpdBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUpdBrReqTx.setDescription('Number of GTP V2 Update Bearer Request Sent.')
jnxMbgSgwPPUpdBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 44), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUpdBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUpdBrRspRx.setDescription('Number of GTP V2 Update Bearer Response received.')
jnxMbgSgwPPUpdBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 45), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUpdBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUpdBrRspTx.setDescription('Number of GTP V2 Update Bearer Response Sent.')
jnxMbgSgwPPDelBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 46), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrReqRx.setDescription('Number of GTP V2 Delete Bearer Request received.')
jnxMbgSgwPPDelBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 47), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrReqTx.setDescription('Number of GTP V2 Delete Bearer Request Sent.')
jnxMbgSgwPPDelBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 48), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrRspRx.setDescription('Number of GTP V2 Delete Bearer Response received.')
jnxMbgSgwPPDelBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 49), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrRspTx.setDescription('Number of GTP V2 Delete Bearer Response Sent.')
jnxMbgSgwPPDelConnSetReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 50), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelConnSetReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelConnSetReqRx.setDescription('Number of GTP V2 Delete PDN connection set Request received.')
jnxMbgSgwPPDelConnSetReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 51), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelConnSetReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelConnSetReqTx.setDescription('Number of GTP V2 Delete PDN connection set Request Sent.')
jnxMbgSgwPPDelConnSetRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 52), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelConnSetRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelConnSetRspRx.setDescription('Number of GTP V2 Delete PDN connection set Response received.')
jnxMbgSgwPPDelConnSetRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 53), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelConnSetRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelConnSetRspTx.setDescription('Number of GTP V2 Delete PDN connection set Response Sent.')
jnxMbgSgwPPUpdConnSetReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 54), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUpdConnSetReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUpdConnSetReqRx.setDescription('Number of GTP V2 Update Connection set Request received.')
jnxMbgSgwPPUpdConnSetReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 55), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUpdConnSetReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUpdConnSetReqTx.setDescription('Number of GTP V2 Update Connection set Request Sent.')
jnxMbgSgwPPUpdConnSetRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 56), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUpdConnSetRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUpdConnSetRspRx.setDescription('Number of GTP V2 Update Connection set Response received.')
jnxMbgSgwPPUpdConnSetRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 57), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUpdConnSetRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUpdConnSetRspTx.setDescription('Number of GTP V2 Update Connection set Response Sent.')
jnxMbgSgwPPModBrCmdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 58), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPModBrCmdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPModBrCmdRx.setDescription('Number of GTP V2 Modify Bearer Command received.')
jnxMbgSgwPPModBrCmdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 59), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPModBrCmdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPModBrCmdTx.setDescription('Number of GTP V2 Modify Bearer Command Sent.')
jnxMbgSgwPPModBrFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 60), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPModBrFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPModBrFlrIndRx.setDescription('Number of GTP V2 Modify Bearer Failure received.')
jnxMbgSgwPPModBrFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 61), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPModBrFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPModBrFlrIndTx.setDescription('Number of GTP V2 Modify Bearer Failure Sent.')
jnxMbgSgwPPDelBrCmdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 62), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrCmdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrCmdRx.setDescription('Number of GTP V2 Delete Bearer Command received.')
jnxMbgSgwPPDelBrCmdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 63), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrCmdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrCmdTx.setDescription('Number of GTP V2 Delete Bearer Command Sent.')
jnxMbgSgwPPDelBrFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 64), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrFlrIndRx.setDescription('Number of GTP V2 Delete Bearer Failure received.')
jnxMbgSgwPPDelBrFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 65), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrFlrIndTx.setDescription('Number of GTP V2 Delete Bearer Failure Sent.')
jnxMbgSgwPPBrResCmdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 66), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPBrResCmdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPBrResCmdRx.setDescription('Number of GTP V2 Bearer Response Command received.')
jnxMbgSgwPPBrResCmdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 67), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPBrResCmdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPBrResCmdTx.setDescription('Number of GTP V2 Bearer Response Command Sent.')
jnxMbgSgwPPBrResFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 68), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPBrResFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPBrResFlrIndRx.setDescription('Number of GTP V2 Bearer Resource Failure received.')
jnxMbgSgwPPBrResFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 69), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPBrResFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPBrResFlrIndTx.setDescription('Number of GTP V2 Bearer Resource Failure Sent.')
jnxMbgSgwPPRelAcsBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 70), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPRelAcsBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPRelAcsBrReqRx.setDescription('Number of GTP V2 Release Access Bearer Requests received.')
jnxMbgSgwPPRelAcsBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 71), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPRelAcsBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPRelAcsBrReqTx.setDescription('Number of GTP V2 Release Access Bearer Requests sent.')
jnxMbgSgwPPRelAcsBrRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 72), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPRelAcsBrRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPRelAcsBrRespRx.setDescription('Number of GTP V2 Release Access Bearer Response received.')
jnxMbgSgwPPRelAcsBrRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 73), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPRelAcsBrRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPRelAcsBrRespTx.setDescription('Number of GTP V2 Release Access Bearer Response sent.')
jnxMbgSgwPPCrIndTunReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 74), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCrIndTunReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCrIndTunReqRx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Request Received')
jnxMbgSgwPPCrIndTunReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 75), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCrIndTunReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCrIndTunReqTx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Request sent')
jnxMbgSgwPPCrIndTunRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 76), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCrIndTunRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCrIndTunRespRx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Response Received')
jnxMbgSgwPPCrIndTunRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 77), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCrIndTunRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCrIndTunRespTx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Response sent')
jnxMbgSgwPPDelIndTunReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 78), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelIndTunReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelIndTunReqRx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Request Received')
jnxMbgSgwPPDelIndTunReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 79), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelIndTunReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelIndTunReqTx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Request sent.')
jnxMbgSgwPPDelIndTunRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 80), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelIndTunRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelIndTunRespRx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Response Received')
jnxMbgSgwPPDelIndTunRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 81), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelIndTunRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelIndTunRespTx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Response sent.')
jnxMbgSgwPPDlDataNotifRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 82), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataNotifRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataNotifRx.setDescription('Number of GTP V2 Downlink Data Notify received.')
jnxMbgSgwPPDlDataNotifTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 83), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataNotifTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataNotifTx.setDescription('Number of GTP V2 Downlink Data Notify Sent.')
jnxMbgSgwPPDlDataAckRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 84), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataAckRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataAckRx.setDescription('Number of GTP V2 Downlink Data Notify Acknowledgement received.')
jnxMbgSgwPPDlDataAckTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 85), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataAckTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataAckTx.setDescription('Number of GTP V2 Downlink Data Notify Acknowledgement Sent.')
jnxMbgSgwPPDlDataNotiFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 86), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataNotiFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataNotiFlrIndRx.setDescription('Number of GTP V2 Downlink Data Notification fail received.')
jnxMbgSgwPPDlDataNotiFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 87), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataNotiFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataNotiFlrIndTx.setDescription('Number of GTP V2 Downlink Data Notification fail Sent.')
jnxMbgSgwPPStopPagingIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 88), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPStopPagingIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPStopPagingIndRx.setDescription('Number of GTP V2 Number of Stop Paging Indication Messages Received.')
jnxMbgSgwPPStopPagingIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 89), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPStopPagingIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPStopPagingIndTx.setDescription('Number of GTP V2 Number of Stop Paging Indicaton messages sent')
jnxMbgSgwPPGtpV2ICsPageRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 90), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPageRx.setStatus('obsolete')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPageRx.setDescription('Number of GTPV2 packets received with cause Page.')
jnxMbgSgwPPGtpV2ICsPageTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 91), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPageTx.setStatus('obsolete')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPageTx.setDescription('Number of GTP packets sent with cause Page.')
jnxMbgSgwPPGtpV2ICsReqAcceptRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 92), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsReqAcceptRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsReqAcceptRx.setDescription('Number of GTPV2 packets received with cause Request Accept.')
jnxMbgSgwPPGtpV2ICsReqAcceptTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 93), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsReqAcceptTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsReqAcceptTx.setDescription('Number of GTP packets sent with cause Request Accept.')
jnxMbgSgwPPGtpV2ICsAcceptPartRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 94), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAcceptPartRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAcceptPartRx.setDescription('Number of GTPV2 packets received with cause Accept Partial.')
jnxMbgSgwPPGtpV2ICsAcceptPartTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 95), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAcceptPartTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAcceptPartTx.setDescription('Number of GTP packets sent with cause Accept Partial.')
jnxMbgSgwPPGtpV2ICsNewPTNPrefRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 96), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNewPTNPrefRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNewPTNPrefRx.setDescription('Number of GTPV2 packets received with cause New PDN type due to Network Preference.')
jnxMbgSgwPPGtpV2ICsNewPTNPrefTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 97), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNewPTNPrefTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNewPTNPrefTx.setDescription('Number of GTP packets sent with cause New PDN type due to Network Preference.')
jnxMbgSgwPPGtpV2ICsNPTSIAdbrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 98), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNPTSIAdbrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNPTSIAdbrRx.setDescription('Number of GTPV2 packets received with cause New PDN type due to Single Address Bearer.')
jnxMbgSgwPPGtpV2ICsNPTSIAdbrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 99), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNPTSIAdbrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNPTSIAdbrTx.setDescription('Number of GTP packets sent with cause New PDN type due to Single Address Bearer.')
jnxMbgSgwPPGtpV2ICsCtxNotFndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 100), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCtxNotFndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCtxNotFndRx.setDescription('Number of GTPV2 packets received with cause Context not found.')
jnxMbgSgwPPGtpV2ICsCtxNotFndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 101), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCtxNotFndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCtxNotFndTx.setDescription('Number of GTP packets sent with cause Context not found.')
jnxMbgSgwPPGtpV2ICsInvMsgFmtRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 102), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvMsgFmtRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvMsgFmtRx.setDescription('Number of GTPV2 packets received with cause Invalid Message Format.')
jnxMbgSgwPPGtpV2ICsInvMsgFmtTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 103), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvMsgFmtTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvMsgFmtTx.setDescription('Number of GTP packets sent with cause Invalid Message Format.')
jnxMbgSgwPPGtpV2ICsVerNotSuppRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 104), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsVerNotSuppRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsVerNotSuppRx.setDescription('Number of GTPV2 packets received with cause Version not Supported.')
jnxMbgSgwPPGtpV2ICsVerNotSuppTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 105), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsVerNotSuppTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsVerNotSuppTx.setDescription('Number of GTP packets sent with cause Version not Supported.')
jnxMbgSgwPPGtpV2ICsInvLenRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 106), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvLenRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvLenRx.setDescription('Number of GTPV2 packets received with cause Invalid Length.')
jnxMbgSgwPPGtpV2ICsInvLenTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 107), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvLenTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvLenTx.setDescription('Number of GTP packets sent with cause Invalid Length.')
jnxMbgSgwPPGtpV2ICsServNotSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 108), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsServNotSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsServNotSupRx.setDescription('Number of GTPV2 packets received with cause Service Not supported.')
jnxMbgSgwPPGtpV2ICsServNotSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 109), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsServNotSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsServNotSupTx.setDescription('Number of GTP packets sent with cause Service Not supported.')
jnxMbgSgwPPGtpV2ICsManIEIncorRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 110), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsManIEIncorRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsManIEIncorRx.setDescription('Number of GTPV2 packets received with cause Mandatory IE incorrect.')
jnxMbgSgwPPGtpV2ICsManIEIncorTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 111), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsManIEIncorTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsManIEIncorTx.setDescription('Number of GTP packets sent with cause Mandatory IE incorrect.')
jnxMbgSgwPPGtpV2ICsManIEMissRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 112), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsManIEMissRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsManIEMissRx.setDescription('Number of GTPV2 packets received with cause Mandatory IE Missing.')
jnxMbgSgwPPGtpV2ICsManIEMissTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 113), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsManIEMissTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsManIEMissTx.setDescription('Number of GTP packets sent with cause Mandatory IE Missing.')
jnxMbgSgwPPGtpV2ICsOptIEIncorRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 114), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsOptIEIncorRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsOptIEIncorRx.setDescription('Number of GTPV2 packets received with cause Optional IE Incorrect.')
jnxMbgSgwPPGtpV2ICsOptIEIncorTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 115), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsOptIEIncorTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsOptIEIncorTx.setDescription('Number of GTP packets sent with cause Optional IE Incorrect.')
jnxMbgSgwPPGtpV2ICsSysFailRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 116), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSysFailRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSysFailRx.setDescription('Number of GTPV2 packets received with cause System Failure.')
jnxMbgSgwPPGtpV2ICsSysFailTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 117), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSysFailTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSysFailTx.setDescription('Number of GTP packets sent with cause System Failure.')
jnxMbgSgwPPGtpV2ICsNoResRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 118), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNoResRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNoResRx.setDescription('Number of GTPV2 packets received with cause No Resource.')
jnxMbgSgwPPGtpV2ICsNoResTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 119), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNoResTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNoResTx.setDescription('Number of GTP packets sent with cause No Resource.')
jnxMbgSgwPPGtpV2ICsTFTSMANTErRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 120), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsTFTSMANTErRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsTFTSMANTErRx.setDescription('Number of GTPV2 packets received with cause TFT Symantic Error.')
jnxMbgSgwPPGtpV2ICsTFTSMANTErTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 121), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsTFTSMANTErTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsTFTSMANTErTx.setDescription('Number of GTP packets sent with cause TFT Symantic Error.')
jnxMbgSgwPPGtpV2ICsTFTSysErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 122), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsTFTSysErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsTFTSysErrRx.setDescription('Number of GTPV2 packets received with cause TFT System Error.')
jnxMbgSgwPPGtpV2ICsTFTSysErrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 123), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsTFTSysErrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsTFTSysErrTx.setDescription('Number of GTP packets sent with cause TFT System Error.')
jnxMbgSgwPPGtpV2ICsPkFltManErRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 124), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPkFltManErRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPkFltManErRx.setDescription('Number of GTPV2 packets received with cause Packet Filter Symantic Error.')
jnxMbgSgwPPGtpV2ICsPkFltManErTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 125), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPkFltManErTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPkFltManErTx.setDescription('Number of GTP packets sent with cause Packet Filter Symantic Error.')
jnxMbgSgwPPGtpV2ICsPkFltSynErRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 126), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPkFltSynErRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPkFltSynErRx.setDescription('Number of GTPV2 packets received with cause Packet Filter Syntax Error.')
jnxMbgSgwPPGtpV2ICsPkFltSynErTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 127), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPkFltSynErTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPkFltSynErTx.setDescription('Number of GTP packets sent with cause Packet Filter Syntax Error.')
jnxMbgSgwPPGtpV2ICsMisUnknAPNRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 128), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsMisUnknAPNRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsMisUnknAPNRx.setDescription('Number of GTPV2 packets received with cause Unknown APN.')
jnxMbgSgwPPGtpV2ICsMisUnknAPNTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 129), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsMisUnknAPNTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsMisUnknAPNTx.setDescription('Number of GTP packets sent with cause Unknown APN.')
jnxMbgSgwPPGtpV2ICsUnexpRptIERx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 130), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnexpRptIERx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnexpRptIERx.setDescription('Number of GTPV2 packets received with cause Unexpected Repeated IE.')
jnxMbgSgwPPGtpV2ICsUnexpRptIETx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 131), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnexpRptIETx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnexpRptIETx.setDescription('Number of GTP packets sent with cause Unexpected Repeated IE.')
jnxMbgSgwPPGtpV2ICsGREKeyNtFdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 132), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsGREKeyNtFdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsGREKeyNtFdRx.setDescription('Number of GTPV2 packets received with cause GRE Key Not Found.')
jnxMbgSgwPPGtpV2ICsGREKeyNtFdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 133), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsGREKeyNtFdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsGREKeyNtFdTx.setDescription('Number of GTP packets sent with cause GRE Key Not Found.')
jnxMbgSgwPPGtpV2ICsRelocFailRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 134), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRelocFailRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRelocFailRx.setDescription('Number of GTPV2 packets received with cause Relocation Failed.')
jnxMbgSgwPPGtpV2ICsRelocFailTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 135), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRelocFailTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRelocFailTx.setDescription('Number of GTP packets sent with cause Relocation Failed.')
jnxMbgSgwPPGtpV2ICsDenINRatRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 136), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsDenINRatRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsDenINRatRx.setDescription('Number of GTPV2 packets received with cause Denied in RAT.')
jnxMbgSgwPPGtpV2ICsDenINRatTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 137), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsDenINRatTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsDenINRatTx.setDescription('Number of GTP packets sent with cause Denied in RAT.')
jnxMbgSgwPPGtpV2ICsPTNotSuppRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 138), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPTNotSuppRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPTNotSuppRx.setDescription('Number of GTPV2 packets received with cause PDN Type Not Supported.')
jnxMbgSgwPPGtpV2ICsPTNotSuppTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 139), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPTNotSuppTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPTNotSuppTx.setDescription('Number of GTP packets sent with cause PDN Type Not Supported.')
jnxMbgSgwPPGtpV2ICsAllDynAdOcRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 140), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAllDynAdOcRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAllDynAdOcRx.setDescription('Number of GTPV2 packets received with cause Allocated Dynamic Address Occupied.')
jnxMbgSgwPPGtpV2ICsAllDynAdOcTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 141), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAllDynAdOcTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAllDynAdOcTx.setDescription('Number of GTP packets sent with cause Allocated Dynamic Address Occupied.')
jnxMbgSgwPPGtpV2ICsNOTFTUECTXRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 142), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNOTFTUECTXRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNOTFTUECTXRx.setDescription('Number of GTPV2 packets received with cause UE Context Without TFT Exists.')
jnxMbgSgwPPGtpV2ICsNOTFTUECTXTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 143), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNOTFTUECTXTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNOTFTUECTXTx.setDescription('Number of GTP packets sent with cause UE Context Without TFT Exists.')
jnxMbgSgwPPGtpV2ICsProtoNtSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 144), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsProtoNtSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsProtoNtSupRx.setDescription('Number of GTPV2 packets received with cause Protocol Not Supported.')
jnxMbgSgwPPGtpV2ICsProtoNtSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 145), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsProtoNtSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsProtoNtSupTx.setDescription('Number of GTP packets sent with cause Protocol Not Supported.')
jnxMbgSgwPPGtpV2ICsUENotRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 146), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUENotRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUENotRespRx.setDescription('Number of GTPV2 packets received with cause UE Not Responding.')
jnxMbgSgwPPGtpV2ICsUENotRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 147), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUENotRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUENotRespTx.setDescription('Number of GTP packets sent with cause UE Not Responding.')
jnxMbgSgwPPGtpV2ICsUERefusesRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 148), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUERefusesRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUERefusesRx.setDescription('Number of GTPV2 packets received with cause UE Refuses.')
jnxMbgSgwPPGtpV2ICsUERefusesTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 149), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUERefusesTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUERefusesTx.setDescription('Number of GTP packets sent with cause UE Refuses.')
jnxMbgSgwPPGtpV2ICsServDeniedRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 150), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsServDeniedRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsServDeniedRx.setDescription('Number of GTPV2 packets received with cause Service Denied.')
jnxMbgSgwPPGtpV2ICsServDeniedTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 151), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsServDeniedTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsServDeniedTx.setDescription('Number of GTP packets sent with cause Service Denied.')
jnxMbgSgwPPGtpV2ICsUnabPageUERx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 152), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnabPageUERx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnabPageUERx.setDescription('Number of GTPV2 packets received with cause Unable to Page UE.')
jnxMbgSgwPPGtpV2ICsUnabPageUETx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 153), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnabPageUETx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnabPageUETx.setDescription('Number of GTP packets sent with cause Unable to Page UE.')
jnxMbgSgwPPGtpV2ICsNoMemRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 154), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNoMemRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNoMemRx.setDescription('Number of GTPV2 packets received with cause No Memory.')
jnxMbgSgwPPGtpV2ICsNoMemTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 155), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNoMemTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNoMemTx.setDescription('Number of GTP packets sent with cause No Memory.')
jnxMbgSgwPPGtpV2ICsUserAUTHFlRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 156), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUserAUTHFlRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUserAUTHFlRx.setDescription('Number of GTPV2 packets received with cause User AUTH Failed.')
jnxMbgSgwPPGtpV2ICsUserAUTHFlTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 157), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUserAUTHFlTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUserAUTHFlTx.setDescription('Number of GTP packets sent with cause User AUTH Failed.')
jnxMbgSgwPPGtpV2ICsAPNAcsDenRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 158), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAPNAcsDenRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAPNAcsDenRx.setDescription('Number of GTPV2 packets received with cause APN Access Denied.')
jnxMbgSgwPPGtpV2ICsAPNAcsDenTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 159), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAPNAcsDenTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAPNAcsDenTx.setDescription('Number of GTP packets sent with cause APN Access Denied.')
jnxMbgSgwPPGtpV2ICsReqRejRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 160), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsReqRejRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsReqRejRx.setDescription('Number of GTPV2 packets received with cause Request Rejected.')
jnxMbgSgwPPGtpV2ICsReqRejTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 161), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsReqRejTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsReqRejTx.setDescription('Number of GTP packets sent with cause Request Rejected.')
jnxMbgSgwPPGtpV2ICsPTMSISigMMRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 162), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPTMSISigMMRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPTMSISigMMRx.setDescription('Number of GTPV2 packets received with cause P-TMSI Signature Mismatch.')
jnxMbgSgwPPGtpV2ICsPTMSISigMMTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 163), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPTMSISigMMTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPTMSISigMMTx.setDescription('Number of GTP packets sent with cause P-TMSI Signature Mismatch.')
jnxMbgSgwPPGtpV2ICsIMSINotKnRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 164), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsIMSINotKnRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsIMSINotKnRx.setDescription('Number of GTPV2 packets received with cause IMSI Not Known.')
jnxMbgSgwPPGtpV2ICsIMSINotKnTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 165), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsIMSINotKnTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsIMSINotKnTx.setDescription('Number of GTP packets sent with cause IMSI Not Known.')
jnxMbgSgwPPGtpV2ICsCondIEMsRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 166), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCondIEMsRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCondIEMsRx.setDescription('Number of GTPV2 packets received with cause Conditional IE Missing.')
jnxMbgSgwPPGtpV2ICsCondIEMsTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 167), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCondIEMsTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCondIEMsTx.setDescription('Number of GTP packets sent with cause Conditional IE Missing.')
jnxMbgSgwPPGtpV2ICsAPNResTIncRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 168), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAPNResTIncRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAPNResTIncRx.setDescription('Number of GTPV2 packets received with cause APN Restriction Type Incompatible.')
jnxMbgSgwPPGtpV2ICsAPNResTIncTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 169), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAPNResTIncTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAPNResTIncTx.setDescription('Number of GTP packets sent with cause APN Restriction Type Incompatible.')
jnxMbgSgwPPGtpV2ICsUnknownRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 170), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnknownRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnknownRx.setDescription('Number of GTPV2 packets received with cause Unknown.')
jnxMbgSgwPPGtpV2ICsUnknownTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 171), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnknownTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnknownTx.setDescription('Number of GTP packets sent with cause Unknown.')
jnxMbgSgwPPGtpV2ICsLclDetRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 172), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsLclDetRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsLclDetRx.setDescription('Number of GTP packets received with cause Local Detach.')
jnxMbgSgwPPGtpV2ICsLclDetTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 173), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsLclDetTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsLclDetTx.setDescription('Number of GTP packets sent with cause Local Detach.')
jnxMbgSgwPPGtpV2ICsCmpDetRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 174), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCmpDetRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCmpDetRx.setDescription('Number of GTP packets received with cause Complete Detach.')
jnxMbgSgwPPGtpV2ICsCmpDetTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 175), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCmpDetTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCmpDetTx.setDescription('Number of GTP packets sent with cause Complete Detach.')
jnxMbgSgwPPGtpV2ICsRATChgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 176), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRATChgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRATChgRx.setDescription('Number of GTP packets received with cause RAT changed from 3GPP to non 3GPP.')
jnxMbgSgwPPGtpV2ICsRATChgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 177), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRATChgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRATChgTx.setDescription('Number of GTP packets sent with cause RAT changed from 3GPP to non 3GPP.')
jnxMbgSgwPPGtpV2ICsISRDeactRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 178), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsISRDeactRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsISRDeactRx.setDescription('Number of GTP packets received with cause ISR Deactivated.')
jnxMbgSgwPPGtpV2ICsISRDeactTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 179), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsISRDeactTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsISRDeactTx.setDescription('Number of GTP packets sent with cause ISR Deactivated.')
jnxMbgSgwPPGtpV2ICsEIFRNCEnRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 180), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsEIFRNCEnRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsEIFRNCEnRx.setDescription('Number of GTP packets received with cause Error Indication from RNC eNodeB.')
jnxMbgSgwPPGtpV2ICsEIFRNCEnTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 181), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsEIFRNCEnTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsEIFRNCEnTx.setDescription('Number of GTP packets sent with cause Error Indication from RNC eNodeB.')
jnxMbgSgwPPGtpV2ICsSemErTADRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 182), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSemErTADRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSemErTADRx.setDescription('Number of GTP packets received with cause Semantic Error in TAD Operation.')
jnxMbgSgwPPGtpV2ICsSemErTADTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 183), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSemErTADTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSemErTADTx.setDescription('Number of GTP packets sent with cause Semantic Error in TAD Operation.')
jnxMbgSgwPPGtpV2ICsSynErTADRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 184), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSynErTADRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSynErTADRx.setDescription('Number of GTP packets received with cause Syntactic Error in TAD Operation.')
jnxMbgSgwPPGtpV2ICsSynErTADTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 185), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSynErTADTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSynErTADTx.setDescription('Number of GTP packets sent with cause Syntactic Error in TAD Operation.')
jnxMbgSgwPPGtpV2ICsRMValRcvRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 186), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRMValRcvRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRMValRcvRx.setDescription('Number of GTP packets received with cause Reserved Message Value Received.')
jnxMbgSgwPPGtpV2ICsRMValRcvTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 187), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRMValRcvTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRMValRcvTx.setDescription('Number of GTP packets sent with cause Reserved Message Value Received.')
jnxMbgSgwPPGtpV2ICsRPrNtRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 188), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRPrNtRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRPrNtRspRx.setDescription('Number of GTP packets received with cause Remote peer not responding.')
jnxMbgSgwPPGtpV2ICsRPrNtRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 189), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRPrNtRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRPrNtRspTx.setDescription('Number of GTP packets sent with cause Remote peer not responding.')
jnxMbgSgwPPGtpV2ICsColNWReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 190), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsColNWReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsColNWReqRx.setDescription('Number of GTP packets received with cause Collision with network initiated request.')
jnxMbgSgwPPGtpV2ICsColNWReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 191), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsColNWReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsColNWReqTx.setDescription('Number of GTP packets sent with cause Collision with network initiated request.')
jnxMbgSgwPPGtpV2ICsUnPgUESusRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 192), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnPgUESusRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnPgUESusRx.setDescription('Number of GTP packets received with cause Unable to page UE due to suspension.')
jnxMbgSgwPPGtpV2ICsUnPgUESusTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 193), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnPgUESusTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnPgUESusTx.setDescription('Number of GTP packets sent with cause Unable to page UE due to suspension.')
jnxMbgSgwPPGtpV2ICsInvTotLenRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 194), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvTotLenRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvTotLenRx.setDescription('Number of GTP packets received with cause Invalid total len.')
jnxMbgSgwPPGtpV2ICsInvTotLenTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 195), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvTotLenTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvTotLenTx.setDescription('Number of GTP packets sent with cause Invalid total len.')
jnxMbgSgwPPGtpV2ICsDtForNtSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 196), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsDtForNtSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsDtForNtSupRx.setDescription('Number of GTP packets received with cause Data forwarding not supported.')
jnxMbgSgwPPGtpV2ICsDtForNtSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 197), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsDtForNtSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsDtForNtSupTx.setDescription('Number of GTP packets sent with cause Data forwarding not supported.')
jnxMbgSgwPPGtpV2ICsInReFRePrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 198), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInReFRePrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInReFRePrRx.setDescription('Number of GTP packets received with cause Invalid Reply from Remote peer.')
jnxMbgSgwPPGtpV2ICsInReFRePrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 199), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInReFRePrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInReFRePrTx.setDescription('Number of GTP packets sent with cause Invalid Reply from Remote peer.')
jnxMbgSgwPPGtpV2ICsInvPrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 200), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvPrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvPrRx.setDescription('Number of GTP packets received with cause Invalid peer.')
jnxMbgSgwPPGtpV2ICsInvPrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 201), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvPrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvPrTx.setDescription('Number of GTP packets sent with cause Invalid peer.')
jnxMbgSgwPPGtpV1ProtocolErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 202), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1ProtocolErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1ProtocolErrRx.setDescription('Number of GTPv1 Protocol Errors Received.')
jnxMbgSgwPPGtpV1UnSupMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 203), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1UnSupMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1UnSupMsgRx.setDescription('Number of GTPv1 Unsupported Messages received.')
jnxMbgSgwPPGtpV1T3RespTmrExpRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 204), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1T3RespTmrExpRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1T3RespTmrExpRx.setDescription('Number of GTP V1 T3 timer expiries Received.')
jnxMbgSgwPPGtpV1EndMarkerRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 205), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EndMarkerRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EndMarkerRx.setDescription('Number of GTP V1 end marker packets received.')
jnxMbgSgwPPGtpV1EndMarkerTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 206), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EndMarkerTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EndMarkerTx.setDescription('Number of GTP V1 end marker packets sent.')
jnxMbgSgwPPGtpV1EchoReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 207), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EchoReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EchoReqRx.setDescription('Number of GTP V1 echo request packets received.')
jnxMbgSgwPPGtpV1EchoReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 208), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EchoReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EchoReqTx.setDescription('Number of GTP iV1 echo request packets sent.')
jnxMbgSgwPPGtpV1EchoRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 209), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EchoRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EchoRespRx.setDescription('Number of GTP V1 echo response packets received.')
jnxMbgSgwPPGtpV1EchoRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 210), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EchoRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EchoRespTx.setDescription('Number of GTP V1 echo response packets sent.')
jnxMbgSgwPPGtpV1ErrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 211), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1ErrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1ErrIndRx.setDescription('Number of GTP V1 Error Indication packets received.')
jnxMbgSgwPPGtpV1ErrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 212), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1ErrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1ErrIndTx.setDescription('Number of GTP V1 Error Indication packets sent.')
jnxMbgSgwPPSuspNotifRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 213), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPSuspNotifRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPSuspNotifRx.setDescription('Number of GTPv2 Suspend Notification messages received.')
jnxMbgSgwPPSuspNotifTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 214), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPSuspNotifTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPSuspNotifTx.setDescription('Number of GTPv2 Suspend Notification messages sent.')
jnxMbgSgwPPSuspAckRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 215), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPSuspAckRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPSuspAckRx.setDescription('Number of GTPv2 Suspend Acknowledgement messages received.')
jnxMbgSgwPPSuspAckTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 216), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPSuspAckTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPSuspAckTx.setDescription('Number of GTPv2 Suspend Acknowledgement messages sent.')
jnxMbgSgwPPResumeNotifRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 217), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPResumeNotifRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPResumeNotifRx.setDescription('Number of GTPv2 Resume Notification messages received.')
jnxMbgSgwPPResumeNotifTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 218), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPResumeNotifTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPResumeNotifTx.setDescription('Number of GTPv2 Resume Notification messages sent.')
jnxMbgSgwPPResumeAckRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 219), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPResumeAckRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPResumeAckRx.setDescription('Number of GTPv2 Resume Acknowledgement messages received.')
jnxMbgSgwPPResumeAckTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 220), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPResumeAckTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPResumeAckTx.setDescription('Number of GTPv2 Resume Acknowledgement messages sent.')
jnxMbgSgwPPPiggybackMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 221), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPPiggybackMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPPiggybackMsgRx.setDescription('Number of GTPv2 Piggyback messages received.')
jnxMbgSgwPPPiggybackMsgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 222), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPPiggybackMsgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPPiggybackMsgTx.setDescription('Number of GTPv2 S11 Piggyback messages sent.')
jnxMbgSgwGtpIfStatsTable = MibTable((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4), )
if mibBuilder.loadTexts: jnxMbgSgwGtpIfStatsTable.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpIfStatsTable.setDescription('Each entry corresponds to an interface level GTP statistic.')
jnxMbgSgwGtpIfStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1), ).setIndexNames((0, "JUNIPER-MOBILE-GATEWAYS", "jnxMbgGwIndex"), (0, "JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwIfIndex"))
if mibBuilder.loadTexts: jnxMbgSgwGtpIfStatsEntry.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpIfStatsEntry.setDescription('A specification of the GTP interface level control Statistics.')
jnxMbgSgwIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 1), Unsigned32())
if mibBuilder.loadTexts: jnxMbgSgwIfIndex.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfIndex.setDescription('GTP Interface Index')
jnxMbgSgwIfType = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfType.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfType.setDescription('Interface Name.')
jnxMbgSgwIfRxPacketsDropped = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfRxPacketsDropped.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfRxPacketsDropped.setDescription('Number of Received GTP Packets Dropped by the Gateway.')
jnxMbgSgwIfPacketAllocFail = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfPacketAllocFail.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfPacketAllocFail.setDescription('Number of Packet allocation failures in the Gateway.')
jnxMbgSgwIfPacketSendFail = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfPacketSendFail.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfPacketSendFail.setDescription('Number of GTP Packet Send failures in the Gateway.')
jnxMbgSgwIfIPVerErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfIPVerErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfIPVerErrRx.setDescription('Number of IP Version Error Packets Received.')
jnxMbgSgwIfIPProtoErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfIPProtoErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfIPProtoErrRx.setDescription('Number of IP Protocol Error packets Received.')
jnxMbgSgwIfGTPPortErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGTPPortErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGTPPortErrRx.setDescription('Number of Port Error Packets Received.')
jnxMbgSgwIfGTPUnknVerRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGTPUnknVerRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGTPUnknVerRx.setDescription('Number of Unknown Version Packets Received.')
jnxMbgSgwIfPcktLenErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfPcktLenErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfPcktLenErrRx.setDescription('Number of Packet Length Error Packets Received.')
jnxMbgSgwIfUnknMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUnknMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUnknMsgRx.setDescription('Number of Unknown Messages Received.')
jnxMbgSgwIfProtocolErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfProtocolErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfProtocolErrRx.setDescription('Number of GTPv2 Protocol Errors Received.')
jnxMbgSgwIfUnSupportedMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUnSupportedMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUnSupportedMsgRx.setDescription('Number of GTPv2 Unsupported Messages received.')
jnxMbgSgwIfT3RespTmrExpRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 14), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfT3RespTmrExpRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfT3RespTmrExpRx.setDescription('Number of GTP V2 T3 timer expiries Received.')
jnxMbgSgwIfV2NumMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 15), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2NumMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2NumMsgRx.setDescription('Number of GTPv2 messages received.')
jnxMbgSgwIfV2NumMsgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 16), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2NumMsgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2NumMsgTx.setDescription('Number of V2 messages sent.')
jnxMbgSgwIfV2NumBytesRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 17), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2NumBytesRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2NumBytesRx.setDescription('Number of GTPv2 bytes received.')
jnxMbgSgwIfV2NumBytesTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 18), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2NumBytesTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2NumBytesTx.setDescription('Number of V2 bytes sent.')
jnxMbgSgwIfV2EchoReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 19), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2EchoReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2EchoReqRx.setDescription('Number of GTP V2 Echo Request received.')
jnxMbgSgwIfV2EchoReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 20), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2EchoReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2EchoReqTx.setDescription('Number of GTP V2 Echo Request Sent.')
jnxMbgSgwIfV2EchoRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 21), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2EchoRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2EchoRespRx.setDescription('Number of GTP V2 Echo Response received.')
jnxMbgSgwIfV2EchoRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 22), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2EchoRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2EchoRespTx.setDescription('Number of GTP V2 Echo Response Sent.')
jnxMbgSgwIfV2VerNotSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 23), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2VerNotSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2VerNotSupRx.setDescription('Number of GTP V2 Version Not supported messages received')
jnxMbgSgwIfV2VerNotSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 24), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2VerNotSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2VerNotSupTx.setDescription('Number of GTP V2 version not supported messages sent.')
jnxMbgSgwIfCreateSessReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 25), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCreateSessReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCreateSessReqRx.setDescription('Number of GTP V2 Create Session Requests received.')
jnxMbgSgwIfCreateSessReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 26), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCreateSessReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCreateSessReqTx.setDescription('Number of GTP V2 Create Session Requests Sent.')
jnxMbgSgwIfCreateSessRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 27), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCreateSessRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCreateSessRspRx.setDescription('Number of GTP V2 Create Session Responses received.')
jnxMbgSgwIfCreateSessRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 28), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCreateSessRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCreateSessRspTx.setDescription('Number of GTP V2 Create Session Responses Sent.')
jnxMbgSgwIfModBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 29), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfModBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfModBrReqRx.setDescription('Number of GTP V2 Modify Bearer Requests received.')
jnxMbgSgwIfModBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 30), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfModBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfModBrReqTx.setDescription('Number of GTP V2 Modify Bearer Requests Sent.')
jnxMbgSgwIfModBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 31), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfModBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfModBrRspRx.setDescription('Number of GTP V2 Modify Bearer Responses received.')
jnxMbgSgwIfModBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 32), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfModBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfModBrRspTx.setDescription('Number of GTP V2 Modify Bearer Responses Sent.')
jnxMbgSgwIfDelSessReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 33), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelSessReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelSessReqRx.setDescription('Number of GTP V2 Delete Session Requests received.')
jnxMbgSgwIfDelSessReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 34), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelSessReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelSessReqTx.setDescription('Number of GTP V2 Delete Session Requests Sent.')
jnxMbgSgwIfDelSessRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 35), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelSessRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelSessRspRx.setDescription('Number of GTP V2 Delete Session Responses received.')
jnxMbgSgwIfDelSessRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 36), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelSessRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelSessRspTx.setDescription('Number of GTP V2 Delete Session Responses Sent.')
jnxMbgSgwIfCrtBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 37), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCrtBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCrtBrReqRx.setDescription('Number of GTP V2 Create Bearer Requests received.')
jnxMbgSgwIfCrtBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 38), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCrtBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCrtBrReqTx.setDescription('Number of GTP V2 Create Bearer Requests Sent.')
jnxMbgSgwIfCrtBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 39), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCrtBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCrtBrRspRx.setDescription('Number of GTP V2 Create Bearer Response received.')
jnxMbgSgwIfCrtBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 40), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCrtBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCrtBrRspTx.setDescription('Number of GTP V2 Create Bearer Response Sent.')
jnxMbgSgwIfUpdBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 41), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUpdBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUpdBrReqRx.setDescription('Number of GTP V2 Update Bearer Request received.')
jnxMbgSgwIfUpdBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 42), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUpdBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUpdBrReqTx.setDescription('Number of GTP V2 Update Bearer Request Sent.')
jnxMbgSgwIfUpdBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 43), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUpdBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUpdBrRspRx.setDescription('Number of GTP V2 Update Bearer Response received.')
jnxMbgSgwIfUpdBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 44), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUpdBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUpdBrRspTx.setDescription('Number of GTP V2 Update Bearer Response Sent.')
jnxMbgSgwIfDelBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 45), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrReqRx.setDescription('Number of GTP V2 Delete Bearer Request received.')
jnxMbgSgwIfDelBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 46), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrReqTx.setDescription('Number of GTP V2 Delete Bearer Request Sent.')
jnxMbgSgwIfDelBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 47), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrRspRx.setDescription('Number of GTP V2 Delete Bearer Response received.')
jnxMbgSgwIfDelBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 48), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrRspTx.setDescription('Number of GTP V2 Delete Bearer Response Sent.')
jnxMbgSgwIfDelConnSetReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 49), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelConnSetReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelConnSetReqRx.setDescription('Number of GTP V2 Delete PDN connection set Request received.')
jnxMbgSgwIfDelConnSetReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 50), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelConnSetReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelConnSetReqTx.setDescription('Number of GTP V2 Delete PDN connection set Request Sent.')
jnxMbgSgwIfDelConnSetRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 51), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelConnSetRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelConnSetRspRx.setDescription('Number of GTP V2 Delete PDN connection set Response received.')
jnxMbgSgwIfDelConnSetRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 52), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelConnSetRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelConnSetRspTx.setDescription('Number of GTP V2 Delete PDN connection set Response Sent.')
jnxMbgSgwIfUpdConnSetReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 53), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUpdConnSetReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUpdConnSetReqRx.setDescription('Number of GTP V2 Update Connection set Request received.')
jnxMbgSgwIfUpdConnSetReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 54), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUpdConnSetReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUpdConnSetReqTx.setDescription('Number of GTP V2 Update Connection set Request Sent.')
jnxMbgSgwIfUpdConnSetRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 55), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUpdConnSetRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUpdConnSetRspRx.setDescription('Number of GTP V2 Update Connection set Response received.')
jnxMbgSgwIfUpdConnSetRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 56), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUpdConnSetRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUpdConnSetRspTx.setDescription('Number of GTP V2 Update Connection set Response Sent.')
jnxMbgSgwIfModBrCmdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 57), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfModBrCmdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfModBrCmdRx.setDescription('Number of GTP V2 Modify Bearer Command received.')
jnxMbgSgwIfModBrCmdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 58), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfModBrCmdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfModBrCmdTx.setDescription('Number of GTP V2 Modify Bearer Command Sent.')
jnxMbgSgwIfModBrFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 59), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfModBrFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfModBrFlrIndRx.setDescription('Number of GTP V2 Modify Bearer Failure received.')
jnxMbgSgwIfModBrFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 60), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfModBrFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfModBrFlrIndTx.setDescription('Number of GTP V2 Modify Bearer Failure Sent.')
jnxMbgSgwIfDelBrCmdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 61), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrCmdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrCmdRx.setDescription('Number of GTP V2 Delete Bearer Command received.')
jnxMbgSgwIfDelBrCmdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 62), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrCmdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrCmdTx.setDescription('Number of GTP V2 Delete Bearer Command Sent.')
jnxMbgSgwIfDelBrFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 63), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrFlrIndRx.setDescription('Number of GTP V2 Delete Bearer Failure received.')
jnxMbgSgwIfDelBrFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 64), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrFlrIndTx.setDescription('Number of GTP V2 Delete Bearer Failure Sent.')
jnxMbgSgwIfBrResCmdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 65), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfBrResCmdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfBrResCmdRx.setDescription('Number of GTP V2 Bearer Response Command received.')
jnxMbgSgwIfBrResCmdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 66), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfBrResCmdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfBrResCmdTx.setDescription('Number of GTP V2 Bearer Response Command Sent.')
jnxMbgSgwIfBrResFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 67), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfBrResFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfBrResFlrIndRx.setDescription('Number of GTP V2 Bearer Resource Failure received.')
jnxMbgSgwIfBrResFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 68), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfBrResFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfBrResFlrIndTx.setDescription('Number of GTP V2 Bearer Resource Failure Sent.')
jnxMbgSgwIfRelAcsBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 69), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfRelAcsBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfRelAcsBrReqRx.setDescription('Number of GTP V2 Release Access Bearer Requests received.')
jnxMbgSgwIfRelAcsBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 70), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfRelAcsBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfRelAcsBrReqTx.setDescription('Number of GTP V2 Release Access Bearer Requests sent.')
jnxMbgSgwIfRelAcsBrRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 71), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfRelAcsBrRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfRelAcsBrRespRx.setDescription('Number of GTP V2 Release Access Bearer Response received.')
jnxMbgSgwIfRelAcsBrRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 72), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfRelAcsBrRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfRelAcsBrRespTx.setDescription('Number of GTP V2 Release Access Bearer Response sent.')
jnxMbgSgwIfCrIndTunReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 73), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCrIndTunReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCrIndTunReqRx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Request Received')
jnxMbgSgwIfCrIndTunReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 74), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCrIndTunReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCrIndTunReqTx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Request sent')
jnxMbgSgwIfCrIndTunRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 75), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCrIndTunRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCrIndTunRespRx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Response Received')
jnxMbgSgwIfCrIndTunRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 76), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCrIndTunRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCrIndTunRespTx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Response sent')
jnxMbgSgwIfDelIndTunReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 77), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelIndTunReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelIndTunReqRx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Request Received')
jnxMbgSgwIfDelIndTunReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 78), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelIndTunReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelIndTunReqTx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Request sent.')
jnxMbgSgwIfDelIndTunRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 79), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelIndTunRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelIndTunRespRx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Response Received')
jnxMbgSgwIfDelIndTunRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 80), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelIndTunRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelIndTunRespTx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Response sent.')
jnxMbgSgwIfDlDataNotifRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 81), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataNotifRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataNotifRx.setDescription('Number of GTP V2 Downlink Data Notify received.')
jnxMbgSgwIfDlDataNotifTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 82), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataNotifTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataNotifTx.setDescription('Number of GTP V2 Downlink Data Notify Sent.')
jnxMbgSgwIfDlDataAckRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 83), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataAckRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataAckRx.setDescription('Number of GTP V2 Downlink Data Notify Acknowledgement received.')
jnxMbgSgwIfDlDataAckTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 84), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataAckTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataAckTx.setDescription('Number of GTP V2 Downlink Data Notify Acknowledgement Sent.')
jnxMbgSgwIfDlDataNotiFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 85), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataNotiFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataNotiFlrIndRx.setDescription('Number of GTP V2 Downlink Data Notification fail received.')
jnxMbgSgwIfDlDataNotiFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 86), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataNotiFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataNotiFlrIndTx.setDescription('Number of GTP V2 Downlink Data Notification fail Sent.')
jnxMbgSgwIfStopPagingIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 87), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfStopPagingIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfStopPagingIndRx.setDescription('Number of GTP V2 Number of Stop Paging Indication Messages Received.')
jnxMbgSgwIfStopPagingIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 88), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfStopPagingIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfStopPagingIndTx.setDescription('Number of GTP V2 Number of Stop Paging Indicaton messages sent')
jnxMbgSgwIfGtpV2ICsReqAcceptRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 89), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsReqAcceptRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsReqAcceptRx.setDescription('Number of GTPV2 packets received with cause Request Accept.')
jnxMbgSgwIfGtpV2ICsReqAcceptTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 90), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsReqAcceptTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsReqAcceptTx.setDescription('Number of GTP packets sent with cause Request Accept.')
jnxMbgSgwIfGtpV2ICsAcceptPartRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 91), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAcceptPartRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAcceptPartRx.setDescription('Number of GTPV2 packets received with cause Accept Partial.')
jnxMbgSgwIfGtpV2ICsAcceptPartTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 92), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAcceptPartTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAcceptPartTx.setDescription('Number of GTP packets sent with cause Accept Partial.')
jnxMbgSgwIfGtpV2ICsNewPTNPrefRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 93), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNewPTNPrefRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNewPTNPrefRx.setDescription('Number of GTPV2 packets received with cause New PDN type due to Network Preference.')
jnxMbgSgwIfGtpV2ICsNewPTNPrefTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 94), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNewPTNPrefTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNewPTNPrefTx.setDescription('Number of GTP packets sent with cause New PDN type due to Network Preference')
jnxMbgSgwIfGtpV2ICsNPTSIAdbrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 95), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNPTSIAdbrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNPTSIAdbrRx.setDescription('Number of GTPV2 packets received with cause New PDN type due to Single Address Bearer.')
jnxMbgSgwIfGtpV2ICsNPTSIAdbrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 96), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNPTSIAdbrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNPTSIAdbrTx.setDescription('Number of GTP packets sent with cause New PDN type due to Single Address Bearer.')
jnxMbgSgwIfGtpV2ICsCtxNotFndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 97), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCtxNotFndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCtxNotFndRx.setDescription('Number of GTPV2 packets received with cause Context not found.')
jnxMbgSgwIfGtpV2ICsCtxNotFndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 98), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCtxNotFndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCtxNotFndTx.setDescription('Number of GTP packets sent with cause Context not found.')
jnxMbgSgwIfGtpV2ICsInvMsgFmtRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 99), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvMsgFmtRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvMsgFmtRx.setDescription('Number of GTPV2 packets received with cause Invalid Message Format.')
jnxMbgSgwIfGtpV2ICsInvMsgFmtTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 100), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvMsgFmtTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvMsgFmtTx.setDescription('Number of GTP packets sent with cause Invalid Message Format.')
jnxMbgSgwIfGtpV2ICsVerNotSuppRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 101), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsVerNotSuppRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsVerNotSuppRx.setDescription('Number of GTPV2 packets received with cause Version not Supported.')
jnxMbgSgwIfGtpV2ICsVerNotSuppTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 102), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsVerNotSuppTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsVerNotSuppTx.setDescription('Number of GTP packets sent with cause Version not Supported.')
jnxMbgSgwIfGtpV2ICsInvLenRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 103), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvLenRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvLenRx.setDescription('Number of GTPV2 packets received with cause Invalid Length.')
jnxMbgSgwIfGtpV2ICsInvLenTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 104), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvLenTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvLenTx.setDescription('Number of GTP packets sent with cause Invalid Length.')
jnxMbgSgwIfGtpV2ICsSrvNotSuppRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 105), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSrvNotSuppRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSrvNotSuppRx.setDescription('Number of GTPV2 packets received with cause Service Not supported.')
jnxMbgSgwIfGtpV2ICsSrvNotSuppTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 106), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSrvNotSuppTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSrvNotSuppTx.setDescription('Number of GTP packets sent with cause Service Not supported.')
jnxMbgSgwIfGtpV2ICsManIEIncorRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 107), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsManIEIncorRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsManIEIncorRx.setDescription('Number of GTPV2 packets received with cause Mandatory IE incorrect.')
jnxMbgSgwIfGtpV2ICsManIEIncorTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 108), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsManIEIncorTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsManIEIncorTx.setDescription('Number of GTP packets sent with cause Mandatory IE incorrect.')
jnxMbgSgwIfGtpV2ICsManIEMissRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 109), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsManIEMissRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsManIEMissRx.setDescription('Number of GTPV2 packets received with cause Mandatory IE Missing.')
jnxMbgSgwIfGtpV2ICsManIEMissTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 110), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsManIEMissTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsManIEMissTx.setDescription('Number of GTP packets sent with cause Mandatory IE Missing.')
jnxMbgSgwIfGtpV2ICsOptIEIncorRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 111), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsOptIEIncorRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsOptIEIncorRx.setDescription('Number of GTPV2 packets received with cause Optional IE Incorrect.')
jnxMbgSgwIfGtpV2ICsOptIEIncorTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 112), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsOptIEIncorTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsOptIEIncorTx.setDescription('Number of GTP packets sent with cause Optional IE Incorrect.')
jnxMbgSgwIfGtpV2ICsSysFailRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 113), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSysFailRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSysFailRx.setDescription('Number of GTPV2 packets received with cause System Failure.')
jnxMbgSgwIfGtpV2ICsSysFailTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 114), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSysFailTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSysFailTx.setDescription('Number of GTP packets sent with cause System Failure.')
jnxMbgSgwIfGtpV2ICsNoResRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 115), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNoResRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNoResRx.setDescription('Number of GTPV2 packets received with cause No Resource.')
jnxMbgSgwIfGtpV2ICsNoResTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 116), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNoResTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNoResTx.setDescription('Number of GTP packets sent with cause No Resource.')
jnxMbgSgwIfGtpV2ICsTFTSMANTErRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 117), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsTFTSMANTErRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsTFTSMANTErRx.setDescription('Number of GTPV2 packets received with cause TFT Symantic Error.')
jnxMbgSgwIfGtpV2ICsTFTSMANTErTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 118), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsTFTSMANTErTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsTFTSMANTErTx.setDescription('Number of GTP packets sent with cause TFT Symantic Error.')
jnxMbgSgwIfGtpV2ICsTFTSysErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 119), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsTFTSysErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsTFTSysErrRx.setDescription('Number of GTPV2 packets received with cause TFT System Error.')
jnxMbgSgwIfGtpV2ICsTFTSysErrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 120), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsTFTSysErrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsTFTSysErrTx.setDescription('Number of GTP packets sent with cause TFT System Error.')
jnxMbgSgwIfGtpV2ICsPkFltManErRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 121), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPkFltManErRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPkFltManErRx.setDescription('Number of GTPV2 packets received with cause Packet Filter Symantic Error.')
jnxMbgSgwIfGtpV2ICsPkFltManErTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 122), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPkFltManErTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPkFltManErTx.setDescription('Number of GTP packets sent with cause Packet Filter Symantic Error.')
jnxMbgSgwIfGtpV2ICsPkFltSynErRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 123), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPkFltSynErRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPkFltSynErRx.setDescription('Number of GTPV2 packets received with cause Packet Filter Syntax Error.')
jnxMbgSgwIfGtpV2ICsPkFltSynErTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 124), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPkFltSynErTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPkFltSynErTx.setDescription('Number of GTP packets sent with cause Packet Filter Syntax Error.')
jnxMbgSgwIfGtpV2ICsMisUnknAPNRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 125), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsMisUnknAPNRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsMisUnknAPNRx.setDescription('Number of GTPV2 packets received with cause Unknown APN.')
jnxMbgSgwIfGtpV2ICsMisUnknAPNTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 126), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsMisUnknAPNTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsMisUnknAPNTx.setDescription('Number of GTP packets sent with cause Unknown APN.')
jnxMbgSgwIfGtpV2ICsUnexpRptIERx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 127), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnexpRptIERx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnexpRptIERx.setDescription('Number of GTPV2 packets received with cause Unexpected Repeated IE.')
jnxMbgSgwIfGtpV2ICsUnexpRptIETx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 128), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnexpRptIETx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnexpRptIETx.setDescription('Number of GTP packets sent with cause Unexpected Repeated IE.')
jnxMbgSgwIfGtpV2ICsGREKeyNtFdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 129), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsGREKeyNtFdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsGREKeyNtFdRx.setDescription('Number of GTPV2 packets received with cause GRE Key Not Found.')
jnxMbgSgwIfGtpV2ICsGREKeyNtFdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 130), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsGREKeyNtFdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsGREKeyNtFdTx.setDescription('Number of GTP packets sent with cause GRE Key Not Found.')
jnxMbgSgwIfGtpV2ICsRelocFailRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 131), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRelocFailRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRelocFailRx.setDescription('Number of GTPV2 packets received with cause Relocation Failed.')
jnxMbgSgwIfGtpV2ICsRelocFailTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 132), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRelocFailTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRelocFailTx.setDescription('Number of GTP packets sent with cause Relocation Failed.')
jnxMbgSgwIfGtpV2ICsDenINRatRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 133), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsDenINRatRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsDenINRatRx.setDescription('Number of GTPV2 packets received with cause Denied in RAT.')
jnxMbgSgwIfGtpV2ICsDenINRatTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 134), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsDenINRatTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsDenINRatTx.setDescription('Number of GTP packets sent with cause Denied in RAT.')
jnxMbgSgwIfGtpV2ICsPTNotSuppRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 135), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPTNotSuppRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPTNotSuppRx.setDescription('Number of GTPV2 packets received with cause PDN Type Not Supported.')
jnxMbgSgwIfGtpV2ICsPTNotSuppTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 136), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPTNotSuppTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPTNotSuppTx.setDescription('Number of GTP packets sent with cause PDN Type Not Supported.')
jnxMbgSgwIfGtpV2ICsAlDynAdOccRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 137), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAlDynAdOccRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAlDynAdOccRx.setDescription('Number of GTPV2 packets received with cause Allocated Dynamic Address Occupied.')
jnxMbgSgwIfGtpV2ICsAlDynAdOccTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 138), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAlDynAdOccTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAlDynAdOccTx.setDescription('Number of GTP packets sent with cause Allocated Dynamic Address Occupied.')
jnxMbgSgwIfGtpV2ICsNOTFTUECTXRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 139), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNOTFTUECTXRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNOTFTUECTXRx.setDescription('Number of GTPV2 packets received with cause UE Context Without TFT Exists.')
jnxMbgSgwIfGtpV2ICsNOTFTUECTXTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 140), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNOTFTUECTXTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNOTFTUECTXTx.setDescription('Number of GTP packets sent with cause UE Context Without TFT Exists.')
jnxMbgSgwIfGtpV2ICsProtoNtSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 141), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsProtoNtSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsProtoNtSupRx.setDescription('Number of GTPV2 packets received with cause Protocol Not Supported.')
jnxMbgSgwIfGtpV2ICsProtoNtSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 142), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsProtoNtSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsProtoNtSupTx.setDescription('Number of GTP packets sent with cause Protocol Not Supported.')
jnxMbgSgwIfGtpV2ICsUENotRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 143), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUENotRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUENotRespRx.setDescription('Number of GTPV2 packets received with cause UE Not Responding.')
jnxMbgSgwIfGtpV2ICsUENotRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 144), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUENotRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUENotRespTx.setDescription('Number of GTP packets sent with cause UE Not Responding.')
jnxMbgSgwIfGtpV2ICsUERefusesRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 145), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUERefusesRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUERefusesRx.setDescription('Number of GTPV2 packets received with cause UE Refuses.')
jnxMbgSgwIfGtpV2ICsUERefusesTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 146), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUERefusesTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUERefusesTx.setDescription('Number of GTP packets sent with cause UE Refuses.')
jnxMbgSgwIfGtpV2ICsServDeniedRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 147), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsServDeniedRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsServDeniedRx.setDescription('Number of GTPV2 packets received with cause Service Denied.')
jnxMbgSgwIfGtpV2ICsServDeniedTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 148), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsServDeniedTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsServDeniedTx.setDescription('Number of GTP packets sent with cause Service Denied.')
jnxMbgSgwIfGtpV2ICsUnabPageUERx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 149), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnabPageUERx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnabPageUERx.setDescription('Number of GTPV2 packets received with cause Unable to Page UE.')
jnxMbgSgwIfGtpV2ICsUnabPageUETx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 150), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnabPageUETx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnabPageUETx.setDescription('Number of GTP packets sent with cause Unable to Page UE.')
jnxMbgSgwIfGtpV2ICsNoMemRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 151), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNoMemRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNoMemRx.setDescription('Number of GTPV2 packets received with cause No Memory.')
jnxMbgSgwIfGtpV2ICsNoMemTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 152), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNoMemTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNoMemTx.setDescription('Number of GTP packets sent with cause No Memory.')
jnxMbgSgwIfGtpV2ICsUserAUTHFlRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 153), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUserAUTHFlRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUserAUTHFlRx.setDescription('Number of GTPV2 packets received with cause User AUTH Failed.')
jnxMbgSgwIfGtpV2ICsUserAUTHFlTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 154), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUserAUTHFlTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUserAUTHFlTx.setDescription('Number of GTP packets sent with cause User AUTH Failed.')
jnxMbgSgwIfGtpV2ICsAPNAcsDenRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 155), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAPNAcsDenRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAPNAcsDenRx.setDescription('Number of GTPV2 packets received with cause APN Access Denied.')
jnxMbgSgwIfGtpV2ICsAPNAcsDenTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 156), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAPNAcsDenTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAPNAcsDenTx.setDescription('Number of GTP packets sent with cause APN Access Denied.')
jnxMbgSgwIfGtpV2ICsReqRejRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 157), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsReqRejRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsReqRejRx.setDescription('Number of GTPV2 packets received with cause Request Rejected.')
jnxMbgSgwIfGtpV2ICsReqRejTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 158), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsReqRejTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsReqRejTx.setDescription('Number of GTP packets sent with cause Request Rejected.')
jnxMbgSgwIfGtpV2ICsPTMSISigMMRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 159), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPTMSISigMMRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPTMSISigMMRx.setDescription('Number of GTPV2 packets received with cause P-TMSI Signature Mismatch.')
jnxMbgSgwIfGtpV2ICsPTMSISigMMTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 160), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPTMSISigMMTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPTMSISigMMTx.setDescription('Number of GTP packets sent with cause P-TMSI Signature Mismatch')
jnxMbgSgwIfGtpV2ICsIMSINotKnRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 161), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsIMSINotKnRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsIMSINotKnRx.setDescription('Number of GTPV2 packets received with cause IMSI Not Known.')
jnxMbgSgwIfGtpV2ICsIMSINotKnTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 162), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsIMSINotKnTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsIMSINotKnTx.setDescription('Number of GTP packets sent with cause IMSI Not Known.')
jnxMbgSgwIfGtpV2ICsCondIEMsRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 163), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCondIEMsRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCondIEMsRx.setDescription('Number of GTPV2 packets received with cause Conditional IE Missing.')
jnxMbgSgwIfGtpV2ICsCondIEMsTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 164), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCondIEMsTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCondIEMsTx.setDescription('Number of GTP packets sent with cause Conditional IE Missing.')
jnxMbgSgwIfGtpV2ICsAPNResTIncRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 165), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAPNResTIncRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAPNResTIncRx.setDescription('Number of GTPV2 packets received with cause APN Restriction Type Incompatible.')
jnxMbgSgwIfGtpV2ICsAPNResTIncTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 166), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAPNResTIncTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAPNResTIncTx.setDescription('Number of GTP packets sent with cause APN Restriction Type Incompatible.')
jnxMbgSgwIfGtpV2ICsUnknownRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 167), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnknownRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnknownRx.setDescription('Number of GTPV2 packets received with cause Unknown.')
jnxMbgSgwIfGtpV2ICsUnknownTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 168), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnknownTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnknownTx.setDescription('Number of GTP packets sent with cause Unknown.')
jnxMbgSgwIfGtpV2ICsLclDetRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 169), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsLclDetRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsLclDetRx.setDescription('Number of GTP packets received with cause Local Detach.')
jnxMbgSgwIfGtpV2ICsLclDetTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 170), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsLclDetTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsLclDetTx.setDescription('Number of GTP packets sent with cause Local Detach.')
jnxMbgSgwIfGtpV2ICsCmpDetRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 171), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCmpDetRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCmpDetRx.setDescription('Number of GTP packets received with cause Complete Detach.')
jnxMbgSgwIfGtpV2ICsCmpDetTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 172), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCmpDetTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCmpDetTx.setDescription('Number of GTP packets sent with cause Complete Detach.')
jnxMbgSgwIfGtpV2ICsRATChgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 173), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRATChgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRATChgRx.setDescription('Number of GTP packets received with cause RAT changed from 3GPP to non 3GPP.')
jnxMbgSgwIfGtpV2ICsRATChgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 174), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRATChgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRATChgTx.setDescription('Number of GTP packets sent with cause RAT changed from 3GPP to non 3GPP.')
jnxMbgSgwIfGtpV2ICsISRDeactRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 175), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsISRDeactRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsISRDeactRx.setDescription('Number of GTP packets received with cause ISR Deactivated.')
jnxMbgSgwIfGtpV2ICsISRDeactTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 176), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsISRDeactTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsISRDeactTx.setDescription('Number of GTP packets sent with cause ISR Deactivated.')
jnxMbgSgwIfGtpV2ICsEIFRNCEnRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 177), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsEIFRNCEnRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsEIFRNCEnRx.setDescription('Number of GTP packets received with cause Error Indication from RNC eNodeB.')
jnxMbgSgwIfGtpV2ICsEIFRNCEnTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 178), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsEIFRNCEnTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsEIFRNCEnTx.setDescription('Number of GTP packets sent with cause Error Indication from RNC eNodeB.')
jnxMbgSgwIfGtpV2ICsSemErTADRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 179), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSemErTADRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSemErTADRx.setDescription('Number of GTP packets received with cause Semantic Error in TAD Operation.')
jnxMbgSgwIfGtpV2ICsSemErTADTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 180), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSemErTADTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSemErTADTx.setDescription('Number of GTP packets sent with cause Semantic Error in TAD Operation.')
jnxMbgSgwIfGtpV2ICsSynErTADRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 181), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSynErTADRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSynErTADRx.setDescription('Number of GTP packets received with cause Syntactic Error in TAD Operation.')
jnxMbgSgwIfGtpV2ICsSynErTADTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 182), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSynErTADTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSynErTADTx.setDescription('Number of GTP packets sent with cause Syntactic Error in TAD Operation.')
jnxMbgSgwIfGtpV2ICsRMValRcvRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 183), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRMValRcvRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRMValRcvRx.setDescription('Number of GTP packets received with cause Reserved Message Value Received.')
jnxMbgSgwIfGtpV2ICsRMValRcvTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 184), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRMValRcvTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRMValRcvTx.setDescription('Number of GTP packets sent with cause Reserved Message Value Received.')
jnxMbgSgwIfGtpV2ICsRPrNtRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 185), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRPrNtRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRPrNtRspRx.setDescription('Number of GTP packets received with cause Remote peer not responding.')
jnxMbgSgwIfGtpV2ICsRPrNtRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 186), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRPrNtRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRPrNtRspTx.setDescription('Number of GTP packets sent with cause Remote peer not responding.')
jnxMbgSgwIfGtpV2ICsColNWReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 187), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsColNWReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsColNWReqRx.setDescription('Number of GTP packets received with cause Collision with network initiated request.')
jnxMbgSgwIfGtpV2ICsColNWReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 188), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsColNWReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsColNWReqTx.setDescription('Number of GTP packets sent with cause Collision with network initiated request.')
jnxMbgSgwIfGtpV2ICsUnPgUESusRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 189), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnPgUESusRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnPgUESusRx.setDescription('Number of GTP packets received with cause Unable to page UE due to suspension.')
jnxMbgSgwIfGtpV2ICsUnPgUESusTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 190), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnPgUESusTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnPgUESusTx.setDescription('Number of GTP packets sent with cause Unable to page UE due to suspension.')
jnxMbgSgwIfGtpV2ICsInvTotLenRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 191), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvTotLenRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvTotLenRx.setDescription('Number of GTP packets received with cause Invalid total len.')
jnxMbgSgwIfGtpV2ICsInvTotLenTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 192), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvTotLenTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvTotLenTx.setDescription('Number of GTP packets sent with cause Invalid total len.')
jnxMbgSgwIfGtpV2ICsDtForNtSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 193), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsDtForNtSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsDtForNtSupRx.setDescription('Number of GTP packets received with cause Data forwarding not supported.')
jnxMbgSgwIfGtpV2ICsDtForNtSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 194), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsDtForNtSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsDtForNtSupTx.setDescription('Number of GTP packets sent with cause Data forwarding not supported.')
jnxMbgSgwIfGtpV2ICsInReFRePrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 195), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInReFRePrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInReFRePrRx.setDescription('Number of GTP packets received with cause Invalid Reply from Remote peer.')
jnxMbgSgwIfGtpV2ICsInReFRePrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 196), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInReFRePrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInReFRePrTx.setDescription('Number of GTP packets sent with cause Invalid Reply from Remote peer.')
jnxMbgSgwIfGtpV2ICsInvPrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 197), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvPrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvPrRx.setDescription('Number of GTP packets received with cause Invalid peer.')
jnxMbgSgwIfGtpV2ICsInvPrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 198), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvPrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvPrTx.setDescription('Number of GTP packets sent with cause Invalid peer.')
jnxMbgSgwIfGtpV1ProtocolErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 199), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1ProtocolErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1ProtocolErrRx.setDescription('Number of GTPv1 Protocol Errors Received.')
jnxMbgSgwIfGtpV1UnSupMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 200), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1UnSupMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1UnSupMsgRx.setDescription('Number of GTPv1 Unsupported Messages received.')
jnxMbgSgwIfGtpV1T3RespTmrExpRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 201), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1T3RespTmrExpRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1T3RespTmrExpRx.setDescription('Number of GTP V1 T3 timer expiries Received.')
jnxMbgSgwIfGtpV1EndMarkerRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 202), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EndMarkerRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EndMarkerRx.setDescription('Number of GTP V1 end marker packets received.')
jnxMbgSgwIfGtpV1EndMarkerTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 203), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EndMarkerTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EndMarkerTx.setDescription('Number of GTP V1 end marker packets sent.')
jnxMbgSgwIfGtpV1EchoReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 204), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EchoReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EchoReqRx.setDescription('Number of GTP V1 echo request packets received.')
jnxMbgSgwIfGtpV1EchoReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 205), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EchoReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EchoReqTx.setDescription('Number of GTP V1 echo request packets sent.')
jnxMbgSgwIfGtpV1EchoRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 206), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EchoRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EchoRespRx.setDescription('Number of GTP V1 echo response packets received.')
jnxMbgSgwIfGtpV1EchoRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 207), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EchoRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EchoRespTx.setDescription('Number of GTP V1 echo response packets sent.')
jnxMbgSgwIfGtpV1ErrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 208), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1ErrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1ErrIndRx.setDescription('Number of GTP packets V1 Error Indication packets received.')
jnxMbgSgwIfGtpV1ErrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 209), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1ErrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1ErrIndTx.setDescription('Number of GTP packets V1 Error Indication packets sent.')
jnxMbgSgwIfSuspNotifRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 210), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfSuspNotifRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfSuspNotifRx.setDescription('Number of GTPv2 Suspend Notification messages received.')
jnxMbgSgwIfSuspNotifTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 211), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfSuspNotifTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfSuspNotifTx.setDescription('Number of GTPv2 Suspend Notification messages sent.')
jnxMbgSgwIfSuspAckRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 212), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfSuspAckRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfSuspAckRx.setDescription('Number of GTPv2 Suspend Acknowledgement messages received.')
jnxMbgSgwIfSuspAckTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 213), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfSuspAckTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfSuspAckTx.setDescription('Number of GTPv2 Suspend Acknowledgement messages sent.')
jnxMbgSgwIfResumeNotifRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 214), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfResumeNotifRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfResumeNotifRx.setDescription('Number of GTPv2 Resume Notification messages received.')
jnxMbgSgwIfResumeNotifTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 215), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfResumeNotifTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfResumeNotifTx.setDescription('Number of GTPv2 Resume Notification messages sent.')
jnxMbgSgwIfResumeAckRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 216), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfResumeAckRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfResumeAckRx.setDescription('Number of GTPv2 Resume Acknowledgement messages received.')
jnxMbgSgwIfResumeAckTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 217), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfResumeAckTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfResumeAckTx.setDescription('Number of GTPv2 Resume Acknowledgement messages sent.')
jnxMbgSgwIfPiggybackMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 218), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfPiggybackMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfPiggybackMsgRx.setDescription('Number of GTPv2 Piggyback messages received.')
jnxMbgSgwIfPiggybackMsgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 219), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfPiggybackMsgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfPiggybackMsgTx.setDescription('Number of GTPv2 S11 Piggyback messages sent.')
jnxMbgSgwGtpNotificationVars = MibIdentifier((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 3))
jnxMbgSgwGtpPeerName = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 3, 1), DisplayString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: jnxMbgSgwGtpPeerName.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpPeerName.setDescription('GTP Peer Name/IP')
jnxMbgSgwGtpAlarmStatCounter = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 3, 2), Unsigned32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: jnxMbgSgwGtpAlarmStatCounter.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpAlarmStatCounter.setDescription('Current Value of (Alarm) Statistics Counter eg: in jnxMbgSgwGtpPrDNTPerPrAlrmActv it spefies the number of times peer is down with in the monitoring interval')
jnxMbgSgwGtpInterfaceType = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 3, 3), DisplayString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: jnxMbgSgwGtpInterfaceType.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpInterfaceType.setDescription('GTP Interface Type which can be one of S5/S8/S11/S1U/S12/S4')
jnxMbgSgwGtpGwName = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 3, 4), DisplayString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: jnxMbgSgwGtpGwName.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpGwName.setDescription('A string that indicates the gateway name')
jnxMbgSgwGtpGwIndex = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 3, 5), Unsigned32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: jnxMbgSgwGtpGwIndex.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpGwIndex.setDescription('Current Gateway ID value')
jnxMbgSgwGtpPeerGwUpNotif = NotificationType((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 0, 1)).setObjects(("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpGwIndex"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpGwName"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpInterfaceType"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpPeerName"))
if mibBuilder.loadTexts: jnxMbgSgwGtpPeerGwUpNotif.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpPeerGwUpNotif.setDescription('GTPC Peer UP Notification. This trap is sent when a new peer is added or an existing peer goes down and comes back up.')
jnxMbgSgwGtpPeerGwDnNotif = NotificationType((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 0, 2)).setObjects(("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpGwIndex"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpGwName"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpInterfaceType"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpPeerName"))
if mibBuilder.loadTexts: jnxMbgSgwGtpPeerGwDnNotif.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpPeerGwDnNotif.setDescription('GTPC Peer Down Notification. This trap is sent when a peer connection goes down.')
jnxMbgSgwGtpPrDnTPerPrAlrmActv = NotificationType((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 0, 3)).setObjects(("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpGwIndex"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpGwName"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpInterfaceType"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpPeerName"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpAlarmStatCounter"))
if mibBuilder.loadTexts: jnxMbgSgwGtpPrDnTPerPrAlrmActv.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpPrDnTPerPrAlrmActv.setDescription('Peer down Threshold trap Active. This is sent when a peer connection flaps for more than a higher threshold number of times with in a monitor interval.')
jnxMbgSgwGtpPrDnTPerPrAlrmClr = NotificationType((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 0, 4)).setObjects(("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpGwIndex"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpGwName"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpInterfaceType"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpPeerName"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpAlarmStatCounter"))
if mibBuilder.loadTexts: jnxMbgSgwGtpPrDnTPerPrAlrmClr.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpPrDnTPerPrAlrmClr.setDescription('Peer down Threshold trap Cleared. This is sent when the number of times a peer connection flaps in a monitor interval come down below the lower threshold.')
mibBuilder.exportSymbols("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", jnxMbgSgwIfGtpV2ICsPkFltManErRx=jnxMbgSgwIfGtpV2ICsPkFltManErRx, jnxMbgSgwGtpV2ICsGREKeyNtFdTx=jnxMbgSgwGtpV2ICsGREKeyNtFdTx, jnxMbgSgwPPIPProtoErrRx=jnxMbgSgwPPIPProtoErrRx, jnxMbgSgwCrtBrReqRx=jnxMbgSgwCrtBrReqRx, jnxMbgSgwPPV2NumMsgRx=jnxMbgSgwPPV2NumMsgRx, jnxMbgSgwPPGtpV2ICsTFTSMANTErRx=jnxMbgSgwPPGtpV2ICsTFTSMANTErRx, jnxMbgSgwIPProtoErrRx=jnxMbgSgwIPProtoErrRx, jnxMbgSgwPPModBrCmdRx=jnxMbgSgwPPModBrCmdRx, jnxMbgSgwGtpPeerGwUpNotif=jnxMbgSgwGtpPeerGwUpNotif, jnxMbgSgwPPGtpV2ICsCtxNotFndTx=jnxMbgSgwPPGtpV2ICsCtxNotFndTx, jnxMbgSgwGtpMib=jnxMbgSgwGtpMib, jnxMbgSgwIfUnSupportedMsgRx=jnxMbgSgwIfUnSupportedMsgRx, jnxMbgSgwPPGtpV2ICsOptIEIncorTx=jnxMbgSgwPPGtpV2ICsOptIEIncorTx, jnxMbgSgwIfGtpV1ErrIndTx=jnxMbgSgwIfGtpV1ErrIndTx, jnxMbgSgwGtpV2ICsUERefusesRx=jnxMbgSgwGtpV2ICsUERefusesRx, jnxMbgSgwPPCreateSessReqRx=jnxMbgSgwPPCreateSessReqRx, jnxMbgSgwPcktLenErrRx=jnxMbgSgwPcktLenErrRx, jnxMbgSgwGtpV2ICsOptIEIncorrTx=jnxMbgSgwGtpV2ICsOptIEIncorrTx, jnxMbgSgwPPGtpV2ICsSysFailTx=jnxMbgSgwPPGtpV2ICsSysFailTx, jnxMbgSgwGtpV2ICsUENotRespTx=jnxMbgSgwGtpV2ICsUENotRespTx, jnxMbgSgwIfGtpV2ICsManIEMissTx=jnxMbgSgwIfGtpV2ICsManIEMissTx, jnxMbgSgwPPGtpV2ICsAPNAcsDenTx=jnxMbgSgwPPGtpV2ICsAPNAcsDenTx, jnxMbgSgwPPGtpV2ICsEIFRNCEnTx=jnxMbgSgwPPGtpV2ICsEIFRNCEnTx, jnxMbgSgwPPGtpV2ICsNoResRx=jnxMbgSgwPPGtpV2ICsNoResRx, jnxMbgSgwGtpV2ICsOptIEIncorrRx=jnxMbgSgwGtpV2ICsOptIEIncorrRx, jnxMbgSgwPPUpdConnSetReqTx=jnxMbgSgwPPUpdConnSetReqTx, jnxMbgSgwUnSupportedMsgRx=jnxMbgSgwUnSupportedMsgRx, jnxMbgSgwRelAcsBrReqTx=jnxMbgSgwRelAcsBrReqTx, jnxMbgSgwPPDelBrCmdRx=jnxMbgSgwPPDelBrCmdRx, jnxMbgSgwIfCrIndTunRespTx=jnxMbgSgwIfCrIndTunRespTx, jnxMbgSgwPPCrIndTunRespRx=jnxMbgSgwPPCrIndTunRespRx, jnxMbgSgwIfGtpV2ICsTFTSMANTErRx=jnxMbgSgwIfGtpV2ICsTFTSMANTErRx, jnxMbgSgwPPModBrCmdTx=jnxMbgSgwPPModBrCmdTx, jnxMbgSgwPPGtpV2ICsNOTFTUECTXRx=jnxMbgSgwPPGtpV2ICsNOTFTUECTXRx, jnxMbgSgwIfGtpV2ICsAcceptPartRx=jnxMbgSgwIfGtpV2ICsAcceptPartRx, jnxMbgSgwPPGtpV2ICsInvPrRx=jnxMbgSgwPPGtpV2ICsInvPrRx, PYSNMP_MODULE_ID=jnxMbgSgwGtpMib, jnxMbgSgwGtpV2ICsAPNAcsDenTx=jnxMbgSgwGtpV2ICsAPNAcsDenTx, jnxMbgSgwPPGTPUnknVerRx=jnxMbgSgwPPGTPUnknVerRx, jnxMbgSgwPPGtpV2ICsRATChgRx=jnxMbgSgwPPGtpV2ICsRATChgRx, jnxMbgSgwPPGtpV2ICsAcceptPartTx=jnxMbgSgwPPGtpV2ICsAcceptPartTx, jnxMbgSgwPPGtpV2ICsRATChgTx=jnxMbgSgwPPGtpV2ICsRATChgTx, jnxMbgSgwGtpV2ICsServDeniedTx=jnxMbgSgwGtpV2ICsServDeniedTx, jnxMbgSgwIfGtpV2ICsInvPrRx=jnxMbgSgwIfGtpV2ICsInvPrRx, jnxMbgSgwIfCrIndTunReqTx=jnxMbgSgwIfCrIndTunReqTx, jnxMbgSgwRelAcsBrRespRx=jnxMbgSgwRelAcsBrRespRx, jnxMbgSgwCreateSessRspTx=jnxMbgSgwCreateSessRspTx, jnxMbgSgwUpdBrRspTx=jnxMbgSgwUpdBrRspTx, jnxMbgSgwPPRxPacketsDropped=jnxMbgSgwPPRxPacketsDropped, jnxMbgSgwDelBrCmdRx=jnxMbgSgwDelBrCmdRx, jnxMbgSgwGtpV2ICsLclDetRx=jnxMbgSgwGtpV2ICsLclDetRx, jnxMbgSgwPPStopPagingIndTx=jnxMbgSgwPPStopPagingIndTx, jnxMbgSgwGtpPrDnTPerPrAlrmClr=jnxMbgSgwGtpPrDnTPerPrAlrmClr, jnxMbgSgwPPModBrRspRx=jnxMbgSgwPPModBrRspRx, jnxMbgSgwPPUpdBrReqRx=jnxMbgSgwPPUpdBrReqRx, jnxMbgSgwGtpV2ICsPTMSISigMMTx=jnxMbgSgwGtpV2ICsPTMSISigMMTx, jnxMbgSgwPPGtpV2ICsMisUnknAPNTx=jnxMbgSgwPPGtpV2ICsMisUnknAPNTx, jnxMbgSgwGtpV2ICsReqRejTx=jnxMbgSgwGtpV2ICsReqRejTx, jnxMbgSgwGtpV2ICsCtxNotFndRx=jnxMbgSgwGtpV2ICsCtxNotFndRx, jnxMbgSgwIfCrtBrReqTx=jnxMbgSgwIfCrtBrReqTx, jnxMbgSgwIfResumeAckTx=jnxMbgSgwIfResumeAckTx, jnxMbgSgwV2NumMsgTx=jnxMbgSgwV2NumMsgTx, jnxMbgSgwPPGtpV2ICsGREKeyNtFdTx=jnxMbgSgwPPGtpV2ICsGREKeyNtFdTx, jnxMbgSgwPPGtpV2ICsInvMsgFmtRx=jnxMbgSgwPPGtpV2ICsInvMsgFmtRx, jnxMbgSgwIfGtpV2ICsInvTotLenTx=jnxMbgSgwIfGtpV2ICsInvTotLenTx, jnxMbgSgwIfGtpV2ICsGREKeyNtFdTx=jnxMbgSgwIfGtpV2ICsGREKeyNtFdTx, jnxMbgSgwPPGtpV2ICsIMSINotKnTx=jnxMbgSgwPPGtpV2ICsIMSINotKnTx, jnxMbgSgwGtpV2ICsUnknownRx=jnxMbgSgwGtpV2ICsUnknownRx, jnxMbgSgwUpdConnSetRspTx=jnxMbgSgwUpdConnSetRspTx, jnxMbgSgwCreateSessReqRx=jnxMbgSgwCreateSessReqRx, jnxMbgSgwGtpV2ICsInvTotLenRx=jnxMbgSgwGtpV2ICsInvTotLenRx, jnxMbgSgwDelBrReqRx=jnxMbgSgwDelBrReqRx, jnxMbgSgwIfV2EchoRespTx=jnxMbgSgwIfV2EchoRespTx, jnxMbgSgwIfGtpV2ICsVerNotSuppTx=jnxMbgSgwIfGtpV2ICsVerNotSuppTx, jnxMbgSgwIfGtpV2ICsRPrNtRspRx=jnxMbgSgwIfGtpV2ICsRPrNtRspRx, jnxMbgSgwIfGtpV2ICsUnexpRptIETx=jnxMbgSgwIfGtpV2ICsUnexpRptIETx, jnxMbgSgwGtpV2ICsRelocFailTx=jnxMbgSgwGtpV2ICsRelocFailTx, jnxMbgSgwGtpV2ICsAllDynAdOccRx=jnxMbgSgwGtpV2ICsAllDynAdOccRx, jnxMbgSgwPPDelSessReqTx=jnxMbgSgwPPDelSessReqTx, jnxMbgSgwDelSessReqRx=jnxMbgSgwDelSessReqRx, jnxMbgSgwPPResumeAckTx=jnxMbgSgwPPResumeAckTx, jnxMbgSgwPPGtpV2ICsSynErTADTx=jnxMbgSgwPPGtpV2ICsSynErTADTx, jnxMbgSgwGtpV2ICsUserAUTHFlRx=jnxMbgSgwGtpV2ICsUserAUTHFlRx, jnxMbgSgwIfGtpV2ICsCmpDetTx=jnxMbgSgwIfGtpV2ICsCmpDetTx, jnxMbgSgwGtpV2ICsDtForNtSupRx=jnxMbgSgwGtpV2ICsDtForNtSupRx, jnxMbgSgwIfGtpV2ICsUnabPageUERx=jnxMbgSgwIfGtpV2ICsUnabPageUERx, jnxMbgSgwPPGtpV2ICsProtoNtSupRx=jnxMbgSgwPPGtpV2ICsProtoNtSupRx, jnxMbgSgwV2VerNotSupTx=jnxMbgSgwV2VerNotSupTx, jnxMbgSgwGtpV2ICsInReFRePrRx=jnxMbgSgwGtpV2ICsInReFRePrRx, jnxMbgSgwIfGtpV2ICsSysFailRx=jnxMbgSgwIfGtpV2ICsSysFailRx, jnxMbgSgwIfGtpV2ICsInReFRePrTx=jnxMbgSgwIfGtpV2ICsInReFRePrTx, jnxMbgSgwPPDlDataNotifTx=jnxMbgSgwPPDlDataNotifTx, jnxMbgSgwIfGtpV2ICsRelocFailRx=jnxMbgSgwIfGtpV2ICsRelocFailRx, jnxMbgSgwDelConnSetRspTx=jnxMbgSgwDelConnSetRspTx, jnxMbgSgwT3RespTmrExpRx=jnxMbgSgwT3RespTmrExpRx, jnxMbgSgwStopPagingIndRx=jnxMbgSgwStopPagingIndRx, jnxMbgSgwDelSessReqTx=jnxMbgSgwDelSessReqTx, jnxMbgSgwGtpV2ICsTFTSysErrRx=jnxMbgSgwGtpV2ICsTFTSysErrRx, jnxMbgSgwPPRelAcsBrRespRx=jnxMbgSgwPPRelAcsBrRespRx, jnxMbgSgwIfDelSessReqRx=jnxMbgSgwIfDelSessReqRx, jnxMbgSgwGtpV2ICsReqAcceptTx=jnxMbgSgwGtpV2ICsReqAcceptTx, jnxMbgSgwPPRelAcsBrRespTx=jnxMbgSgwPPRelAcsBrRespTx, jnxMbgSgwIfGtpV1EchoRespTx=jnxMbgSgwIfGtpV1EchoRespTx, jnxMbgSgwGtpV2ICsAcceptPartTx=jnxMbgSgwGtpV2ICsAcceptPartTx, jnxMbgSgwIfModBrReqRx=jnxMbgSgwIfModBrReqRx, jnxMbgSgwGtpCGlbStatsTable=jnxMbgSgwGtpCGlbStatsTable, jnxMbgSgwGtpV2ICsPkFltSynErrTx=jnxMbgSgwGtpV2ICsPkFltSynErrTx, jnxMbgSgwIfDelBrRspTx=jnxMbgSgwIfDelBrRspTx, jnxMbgSgwV2EchoReqRx=jnxMbgSgwV2EchoReqRx, jnxMbgSgwPPGtpV1EchoReqTx=jnxMbgSgwPPGtpV1EchoReqTx, jnxMbgSgwPPGtpV2ICsUERefusesRx=jnxMbgSgwPPGtpV2ICsUERefusesRx, jnxMbgSgwGtpV2ICsInReFRePrTx=jnxMbgSgwGtpV2ICsInReFRePrTx, jnxMbgSgwIfGtpV2ICsSrvNotSuppRx=jnxMbgSgwIfGtpV2ICsSrvNotSuppRx, jnxMbgSgwPPDelBrRspTx=jnxMbgSgwPPDelBrRspTx, jnxMbgSgwResumeAckRx=jnxMbgSgwResumeAckRx, jnxMbgSgwPPPacketAllocFail=jnxMbgSgwPPPacketAllocFail, jnxMbgSgwPPGtpV2ICsDenINRatTx=jnxMbgSgwPPGtpV2ICsDenINRatTx, jnxMbgSgwUnknMsgRx=jnxMbgSgwUnknMsgRx, jnxMbgSgwPPDelIndTunRespTx=jnxMbgSgwPPDelIndTunRespTx, jnxMbgSgwIfGtpV2ICsUnexpRptIERx=jnxMbgSgwIfGtpV2ICsUnexpRptIERx, jnxMbgSgwIfCrtBrRspTx=jnxMbgSgwIfCrtBrRspTx, jnxMbgSgwGtpV2ICsNewPTNPrefTx=jnxMbgSgwGtpV2ICsNewPTNPrefTx, jnxMbgSgwIfGtpV2ICsUnknownRx=jnxMbgSgwIfGtpV2ICsUnknownRx, jnxMbgSgwGtpPrDnTPerPrAlrmActv=jnxMbgSgwGtpPrDnTPerPrAlrmActv, jnxMbgSgwPPGtpV2ICsPkFltManErRx=jnxMbgSgwPPGtpV2ICsPkFltManErRx, jnxMbgSgwPPGtpV2ICsNPTSIAdbrTx=jnxMbgSgwPPGtpV2ICsNPTSIAdbrTx, jnxMbgSgwIfDelBrFlrIndTx=jnxMbgSgwIfDelBrFlrIndTx, jnxMbgSgwIfGtpV2ICsPkFltManErTx=jnxMbgSgwIfGtpV2ICsPkFltManErTx, jnxMbgSgwStopPagingIndTx=jnxMbgSgwStopPagingIndTx, jnxMbgSgwGtpV2ICsProtoNtSupTx=jnxMbgSgwGtpV2ICsProtoNtSupTx, jnxMbgSgwIfRelAcsBrReqRx=jnxMbgSgwIfRelAcsBrReqRx, jnxMbgSgwGtpV2ICsCmpDetRx=jnxMbgSgwGtpV2ICsCmpDetRx, jnxMbgSgwIfDelBrRspRx=jnxMbgSgwIfDelBrRspRx, jnxMbgSgwPPDelIndTunRespRx=jnxMbgSgwPPDelIndTunRespRx, jnxMbgSgwPPGtpV1UnSupMsgRx=jnxMbgSgwPPGtpV1UnSupMsgRx, jnxMbgSgwPPGtpV1EchoReqRx=jnxMbgSgwPPGtpV1EchoReqRx, jnxMbgSgwIfGtpV2ICsPkFltSynErTx=jnxMbgSgwIfGtpV2ICsPkFltSynErTx, jnxMbgSgwIfCrIndTunReqRx=jnxMbgSgwIfCrIndTunReqRx, jnxMbgSgwGtpV2ICsManIEIncorrTx=jnxMbgSgwGtpV2ICsManIEIncorrTx, jnxMbgSgwIfGtpV2ICsManIEMissRx=jnxMbgSgwIfGtpV2ICsManIEMissRx, jnxMbgSgwPPCreateSessReqTx=jnxMbgSgwPPCreateSessReqTx, jnxMbgSgwIfDelConnSetReqTx=jnxMbgSgwIfDelConnSetReqTx, jnxMbgSgwPPGtpV2ICsPTNotSuppTx=jnxMbgSgwPPGtpV2ICsPTNotSuppTx, jnxMbgSgwPPDlDataAckTx=jnxMbgSgwPPDlDataAckTx, jnxMbgSgwIfDelConnSetRspTx=jnxMbgSgwIfDelConnSetRspTx, jnxMbgSgwPPModBrReqRx=jnxMbgSgwPPModBrReqRx, jnxMbgSgwRxPacketsDropped=jnxMbgSgwRxPacketsDropped, jnxMbgSgwIfGtpV2ICsColNWReqTx=jnxMbgSgwIfGtpV2ICsColNWReqTx, jnxMbgSgwPPGtpV2ICsUnPgUESusRx=jnxMbgSgwPPGtpV2ICsUnPgUESusRx, jnxMbgSgwDelConnSetReqTx=jnxMbgSgwDelConnSetReqTx, jnxMbgSgwGtpV1EchoReqTx=jnxMbgSgwGtpV1EchoReqTx, jnxMbgSgwPPCrtBrRspRx=jnxMbgSgwPPCrtBrRspRx, jnxMbgSgwIfStopPagingIndRx=jnxMbgSgwIfStopPagingIndRx, jnxMbgSgwIfGtpV2ICsLclDetRx=jnxMbgSgwIfGtpV2ICsLclDetRx, jnxMbgSgwIfModBrRspRx=jnxMbgSgwIfModBrRspRx, jnxMbgSgwS11PiggybackMsgTx=jnxMbgSgwS11PiggybackMsgTx, jnxMbgSgwGtpV2ICsSynErTADRx=jnxMbgSgwGtpV2ICsSynErTADRx, jnxMbgSgwPPGtpV2ICsISRDeactTx=jnxMbgSgwPPGtpV2ICsISRDeactTx, jnxMbgSgwIfGtpV2ICsUnPgUESusTx=jnxMbgSgwIfGtpV2ICsUnPgUESusTx, jnxMbgSgwGtpV2ICsUnexpRptIERx=jnxMbgSgwGtpV2ICsUnexpRptIERx, jnxMbgSgwGtpV2ICsUnabPageUETx=jnxMbgSgwGtpV2ICsUnabPageUETx, jnxMbgSgwGtpV2ICsRATChgRx=jnxMbgSgwGtpV2ICsRATChgRx, jnxMbgSgwGtpV2ICsColNWReqTx=jnxMbgSgwGtpV2ICsColNWReqTx, jnxMbgSgwPPGtpV2ICsDtForNtSupRx=jnxMbgSgwPPGtpV2ICsDtForNtSupRx, jnxMbgSgwPPDelConnSetRspRx=jnxMbgSgwPPDelConnSetRspRx, jnxMbgSgwPPResumeNotifRx=jnxMbgSgwPPResumeNotifRx, jnxMbgSgwIfCrtBrRspRx=jnxMbgSgwIfCrtBrRspRx, jnxMbgSgwIfGtpV2ICsInvLenRx=jnxMbgSgwIfGtpV2ICsInvLenRx, jnxMbgSgwPPGtpV2ICsPTNotSuppRx=jnxMbgSgwPPGtpV2ICsPTNotSuppRx, jnxMbgSgwPPGtpV2ICsRMValRcvTx=jnxMbgSgwPPGtpV2ICsRMValRcvTx, jnxMbgSgwPPGtpV2ICsMisUnknAPNRx=jnxMbgSgwPPGtpV2ICsMisUnknAPNRx, jnxMbgSgwGtpV2ICsDeniedINRatRx=jnxMbgSgwGtpV2ICsDeniedINRatRx, jnxMbgSgwGtpV2ICsManIEIncorrRx=jnxMbgSgwGtpV2ICsManIEIncorrRx, jnxMbgSgwGtpV2ICsReqRejRx=jnxMbgSgwGtpV2ICsReqRejRx, jnxMbgSgwPPPiggybackMsgTx=jnxMbgSgwPPPiggybackMsgTx, jnxMbgSgwPPGtpV2ICsInvLenRx=jnxMbgSgwPPGtpV2ICsInvLenRx, jnxMbgSgwIfGtpV2ICsEIFRNCEnRx=jnxMbgSgwIfGtpV2ICsEIFRNCEnRx, jnxMbgSgwPPBrResFlrIndTx=jnxMbgSgwPPBrResFlrIndTx, jnxMbgSgwIfModBrCmdRx=jnxMbgSgwIfModBrCmdRx, jnxMbgSgwIfDelIndTunRespRx=jnxMbgSgwIfDelIndTunRespRx, jnxMbgSgwGtpV1EchoReqRx=jnxMbgSgwGtpV1EchoReqRx, jnxMbgSgwIfDelBrReqTx=jnxMbgSgwIfDelBrReqTx, jnxMbgSgwIfGtpV2ICsUserAUTHFlTx=jnxMbgSgwIfGtpV2ICsUserAUTHFlTx, jnxMbgSgwIfGtpV2ICsCmpDetRx=jnxMbgSgwIfGtpV2ICsCmpDetRx, jnxMbgSgwIfSuspAckTx=jnxMbgSgwIfSuspAckTx, jnxMbgSgwGtpV2ICsCtxNotFndTx=jnxMbgSgwGtpV2ICsCtxNotFndTx, jnxMbgSgwPPGtpV1ErrIndTx=jnxMbgSgwPPGtpV1ErrIndTx, jnxMbgSgwGtpV2ICsUERefusesTx=jnxMbgSgwGtpV2ICsUERefusesTx, jnxMbgSgwPPCrIndTunRespTx=jnxMbgSgwPPCrIndTunRespTx, jnxMbgSgwPPIPVerErrRx=jnxMbgSgwPPIPVerErrRx, jnxMbgSgwIfUpdBrReqTx=jnxMbgSgwIfUpdBrReqTx, jnxMbgSgwIfGtpV2ICsCtxNotFndRx=jnxMbgSgwIfGtpV2ICsCtxNotFndRx, jnxMbgSgwIfGtpV2ICsTFTSysErrTx=jnxMbgSgwIfGtpV2ICsTFTSysErrTx, jnxMbgSgwGtpV2ICsLclDetTx=jnxMbgSgwGtpV2ICsLclDetTx, jnxMbgSgwPPGtpV2ICsPkFltManErTx=jnxMbgSgwPPGtpV2ICsPkFltManErTx, jnxMbgSgwPPGtpV2ICsUENotRespTx=jnxMbgSgwPPGtpV2ICsUENotRespTx, jnxMbgSgwGtpV1ErrIndRx=jnxMbgSgwGtpV1ErrIndRx, jnxMbgSgwIfModBrFlrIndTx=jnxMbgSgwIfModBrFlrIndTx, jnxMbgSgwPPGtpV2ICsPTMSISigMMTx=jnxMbgSgwPPGtpV2ICsPTMSISigMMTx, jnxMbgSgwIfGtpV2ICsSemErTADTx=jnxMbgSgwIfGtpV2ICsSemErTADTx, jnxMbgSgwPPUnSupportedMsgRx=jnxMbgSgwPPUnSupportedMsgRx, jnxMbgSgwIfDelSessRspTx=jnxMbgSgwIfDelSessRspTx, jnxMbgSgwIfGTPUnknVerRx=jnxMbgSgwIfGTPUnknVerRx, jnxMbgSgwPPGTPPortErrRx=jnxMbgSgwPPGTPPortErrRx, jnxMbgSgwIfGtpV2ICsUERefusesTx=jnxMbgSgwIfGtpV2ICsUERefusesTx, jnxMbgSgwDelBrFlrIndRx=jnxMbgSgwDelBrFlrIndRx, jnxMbgSgwS4PiggybackMsgTx=jnxMbgSgwS4PiggybackMsgTx, jnxMbgSgwIfGtpV1EndMarkerRx=jnxMbgSgwIfGtpV1EndMarkerRx, jnxMbgSgwGtpV2ICsGREKeyNtFdRx=jnxMbgSgwGtpV2ICsGREKeyNtFdRx, jnxMbgSgwGtpV2ICsTFTSMANTErTx=jnxMbgSgwGtpV2ICsTFTSMANTErTx, jnxMbgSgwIfBrResCmdTx=jnxMbgSgwIfBrResCmdTx, jnxMbgSgwPPResumeNotifTx=jnxMbgSgwPPResumeNotifTx, jnxMbgSgwIfGtpV2ICsPTNotSuppRx=jnxMbgSgwIfGtpV2ICsPTNotSuppRx, jnxMbgSgwIfGtpV2ICsSrvNotSuppTx=jnxMbgSgwIfGtpV2ICsSrvNotSuppTx, jnxMbgSgwPPGtpV2ICsInReFRePrTx=jnxMbgSgwPPGtpV2ICsInReFRePrTx, jnxMbgSgwPPDelBrFlrIndTx=jnxMbgSgwPPDelBrFlrIndTx, jnxMbgSgwIfGtpV2ICsTFTSysErrRx=jnxMbgSgwIfGtpV2ICsTFTSysErrRx, jnxMbgSgwIfGTPPortErrRx=jnxMbgSgwIfGTPPortErrRx, jnxMbgSgwPPGtpV2ICsSynErTADRx=jnxMbgSgwPPGtpV2ICsSynErTADRx, jnxMbgSgwPPGtpV2ICsPTMSISigMMRx=jnxMbgSgwPPGtpV2ICsPTMSISigMMRx, jnxMbgSgwGtpV2ICsInvMsgFmtTx=jnxMbgSgwGtpV2ICsInvMsgFmtTx, jnxMbgSgwPPGtpV2ICsInvPrTx=jnxMbgSgwPPGtpV2ICsInvPrTx, jnxMbgSgwIfV2NumBytesTx=jnxMbgSgwIfV2NumBytesTx, jnxMbgSgwIfGtpV2ICsUnPgUESusRx=jnxMbgSgwIfGtpV2ICsUnPgUESusRx, jnxMbgSgwPPGtpV1EchoRespRx=jnxMbgSgwPPGtpV1EchoRespRx, jnxMbgSgwGtpV2ICsPageTx=jnxMbgSgwGtpV2ICsPageTx, jnxMbgSgwGtpV2ICsNewPTSIAdbrRx=jnxMbgSgwGtpV2ICsNewPTSIAdbrRx, jnxMbgSgwIfGtpV2ICsAPNAcsDenRx=jnxMbgSgwIfGtpV2ICsAPNAcsDenRx, jnxMbgSgwIfModBrFlrIndRx=jnxMbgSgwIfModBrFlrIndRx, jnxMbgSgwS5PiggybackMsgRx=jnxMbgSgwS5PiggybackMsgRx, jnxMbgSgwIfGtpV2ICsInvMsgFmtRx=jnxMbgSgwIfGtpV2ICsInvMsgFmtRx, jnxMbgSgwGtpV2ICsPTMSISigMMRx=jnxMbgSgwGtpV2ICsPTMSISigMMRx, jnxMbgSgwIfGtpV2ICsRATChgTx=jnxMbgSgwIfGtpV2ICsRATChgTx, jnxMbgSgwPPGtpV2ICsUnknownRx=jnxMbgSgwPPGtpV2ICsUnknownRx, jnxMbgSgwPPDelSessReqRx=jnxMbgSgwPPDelSessReqRx, jnxMbgSgwPPT3RespTmrExpRx=jnxMbgSgwPPT3RespTmrExpRx, jnxMbgSgwIfGtpV2ICsLclDetTx=jnxMbgSgwIfGtpV2ICsLclDetTx, jnxMbgSgwGtpV2ICsServNotSuppRx=jnxMbgSgwGtpV2ICsServNotSuppRx, jnxMbgSgwPPDelConnSetRspTx=jnxMbgSgwPPDelConnSetRspTx, jnxMbgSgwPacketSendFail=jnxMbgSgwPacketSendFail, jnxMbgSgwIfModBrCmdTx=jnxMbgSgwIfModBrCmdTx, jnxMbgSgwGtpV2ICsNoMemTx=jnxMbgSgwGtpV2ICsNoMemTx, jnxMbgSgwIfGtpV2ICsIMSINotKnTx=jnxMbgSgwIfGtpV2ICsIMSINotKnTx, jnxMbgSgwPPGtpV2ICsVerNotSuppRx=jnxMbgSgwPPGtpV2ICsVerNotSuppRx, jnxMbgSgwGtpV2ICsRMValRcvTx=jnxMbgSgwGtpV2ICsRMValRcvTx, jnxMbgSgwIfGtpV2ICsDtForNtSupRx=jnxMbgSgwIfGtpV2ICsDtForNtSupRx, jnxMbgSgwIfUpdBrReqRx=jnxMbgSgwIfUpdBrReqRx, jnxMbgSgwModBrFlrIndTx=jnxMbgSgwModBrFlrIndTx, jnxMbgSgwDlDataNotifTx=jnxMbgSgwDlDataNotifTx, jnxMbgSgwGtpV2ICsPkFltSynErrRx=jnxMbgSgwGtpV2ICsPkFltSynErrRx, jnxMbgSgwPPGtpV2ICsManIEMissRx=jnxMbgSgwPPGtpV2ICsManIEMissRx, jnxMbgSgwCrIndTunRespTx=jnxMbgSgwCrIndTunRespTx, jnxMbgSgwPPCrtBrReqTx=jnxMbgSgwPPCrtBrReqTx, jnxMbgSgwPPGtpV2ICsServDeniedRx=jnxMbgSgwPPGtpV2ICsServDeniedRx)
mibBuilder.exportSymbols("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", jnxMbgSgwBrResFlrIndRx=jnxMbgSgwBrResFlrIndRx, jnxMbgSgwPPModBrReqTx=jnxMbgSgwPPModBrReqTx, jnxMbgSgwIfDlDataAckRx=jnxMbgSgwIfDlDataAckRx, jnxMbgSgwIfGtpV2ICsUENotRespTx=jnxMbgSgwIfGtpV2ICsUENotRespTx, jnxMbgSgwGtpV2ICsRelocFailRx=jnxMbgSgwGtpV2ICsRelocFailRx, jnxMbgSgwPPGtpV2ICsNOTFTUECTXTx=jnxMbgSgwPPGtpV2ICsNOTFTUECTXTx, jnxMbgSgwDelConnSetRspRx=jnxMbgSgwDelConnSetRspRx, jnxMbgSgwGtpV1EndMarkerTx=jnxMbgSgwGtpV1EndMarkerTx, jnxMbgSgwPPSuspNotifRx=jnxMbgSgwPPSuspNotifRx, jnxMbgSgwGtpV2ICsUnexpRptIETx=jnxMbgSgwGtpV2ICsUnexpRptIETx, jnxMbgSgwIfGtpV2ICsReqRejRx=jnxMbgSgwIfGtpV2ICsReqRejRx, jnxMbgSgwModBrRspTx=jnxMbgSgwModBrRspTx, jnxMbgSgwGtpV2ICsAcceptPartRx=jnxMbgSgwGtpV2ICsAcceptPartRx, jnxMbgSgwIfSuspNotifRx=jnxMbgSgwIfSuspNotifRx, jnxMbgSgwPPV2NumMsgTx=jnxMbgSgwPPV2NumMsgTx, jnxMbgSgwPPGtpV2ICsNoMemTx=jnxMbgSgwPPGtpV2ICsNoMemTx, jnxMbgSgwIfGtpV2ICsReqAcceptRx=jnxMbgSgwIfGtpV2ICsReqAcceptRx, jnxMbgSgwPPPcktLenErrRx=jnxMbgSgwPPPcktLenErrRx, jnxMbgSgwBrResCmdTx=jnxMbgSgwBrResCmdTx, jnxMbgSgwIfGtpV2ICsCondIEMsTx=jnxMbgSgwIfGtpV2ICsCondIEMsTx, jnxMbgSgwGtpV2ICsInvLenRx=jnxMbgSgwGtpV2ICsInvLenRx, jnxMbgSgwPPGtpV1T3RespTmrExpRx=jnxMbgSgwPPGtpV1T3RespTmrExpRx, jnxMbgSgwPPGtpV2ICsRMValRcvRx=jnxMbgSgwPPGtpV2ICsRMValRcvRx, jnxMbgSgwS5PiggybackMsgTx=jnxMbgSgwS5PiggybackMsgTx, jnxMbgSgwIfDelIndTunReqTx=jnxMbgSgwIfDelIndTunReqTx, jnxMbgSgwIfDelBrCmdTx=jnxMbgSgwIfDelBrCmdTx, jnxMbgSgwIfGtpV2ICsMisUnknAPNRx=jnxMbgSgwIfGtpV2ICsMisUnknAPNRx, jnxMbgSgwPPGtpV2ICsNoMemRx=jnxMbgSgwPPGtpV2ICsNoMemRx, jnxMbgSgwPPUpdConnSetReqRx=jnxMbgSgwPPUpdConnSetReqRx, jnxMbgSgwDlDataNotiFlrIndRx=jnxMbgSgwDlDataNotiFlrIndRx, jnxMbgSgwPPDlDataNotiFlrIndRx=jnxMbgSgwPPDlDataNotiFlrIndRx, jnxMbgSgwIfV2VerNotSupTx=jnxMbgSgwIfV2VerNotSupTx, jnxMbgSgwPPGtpV2ICsIMSINotKnRx=jnxMbgSgwPPGtpV2ICsIMSINotKnRx, jnxMbgSgwPPRelAcsBrReqRx=jnxMbgSgwPPRelAcsBrReqRx, jnxMbgSgwPPGtpV2ICsInReFRePrRx=jnxMbgSgwPPGtpV2ICsInReFRePrRx, jnxMbgSgwGtpV2ICsPageRx=jnxMbgSgwGtpV2ICsPageRx, jnxMbgSgwIfResumeNotifTx=jnxMbgSgwIfResumeNotifTx, jnxMbgSgwPPGtpV2ICsUnabPageUERx=jnxMbgSgwPPGtpV2ICsUnabPageUERx, jnxMbgSgwIfRxPacketsDropped=jnxMbgSgwIfRxPacketsDropped, jnxMbgSgwGtpV1EchoRespTx=jnxMbgSgwGtpV1EchoRespTx, jnxMbgSgwPPGtpV2ICsUserAUTHFlRx=jnxMbgSgwPPGtpV2ICsUserAUTHFlRx, jnxMbgSgwIfCreateSessReqTx=jnxMbgSgwIfCreateSessReqTx, jnxMbgSgwPPDelIndTunReqTx=jnxMbgSgwPPDelIndTunReqTx, jnxMbgSgwIfDelIndTunReqRx=jnxMbgSgwIfDelIndTunReqRx, jnxMbgSgwPPGtpV2ICsAcceptPartRx=jnxMbgSgwPPGtpV2ICsAcceptPartRx, jnxMbgSgwPPV2EchoRespTx=jnxMbgSgwPPV2EchoRespTx, jnxMbgSgwPPGtpV2ICsColNWReqTx=jnxMbgSgwPPGtpV2ICsColNWReqTx, jnxMbgSgwGtpV2ICsCondIEMsRx=jnxMbgSgwGtpV2ICsCondIEMsRx, jnxMbgSgwGTPPortErrRx=jnxMbgSgwGTPPortErrRx, jnxMbgSgwIfGtpV2ICsInReFRePrRx=jnxMbgSgwIfGtpV2ICsInReFRePrRx, jnxMbgSgwPPGtpV2ICsPkFltSynErTx=jnxMbgSgwPPGtpV2ICsPkFltSynErTx, jnxMbgSgwIfGtpV2ICsUENotRespRx=jnxMbgSgwIfGtpV2ICsUENotRespRx, jnxMbgSgwDelBrRspRx=jnxMbgSgwDelBrRspRx, jnxMbgSgwIfType=jnxMbgSgwIfType, jnxMbgSgwIfGtpV2ICsServDeniedTx=jnxMbgSgwIfGtpV2ICsServDeniedTx, jnxMbgSgwSuspAckRx=jnxMbgSgwSuspAckRx, jnxMbgSgwPPGtpV2ICsManIEIncorTx=jnxMbgSgwPPGtpV2ICsManIEIncorTx, jnxMbgSgwIfGtpV2ICsISRDeactRx=jnxMbgSgwIfGtpV2ICsISRDeactRx, jnxMbgSgwGtpIfStatsEntry=jnxMbgSgwGtpIfStatsEntry, jnxMbgSgwIfGtpV2ICsServDeniedRx=jnxMbgSgwIfGtpV2ICsServDeniedRx, jnxMbgSgwGtpV2ICsNoResTx=jnxMbgSgwGtpV2ICsNoResTx, jnxMbgSgwPPGtpV2ICsReqAcceptRx=jnxMbgSgwPPGtpV2ICsReqAcceptRx, jnxMbgSgwDelBrCmdTx=jnxMbgSgwDelBrCmdTx, jnxMbgSgwGtpV2ICsNoMemRx=jnxMbgSgwGtpV2ICsNoMemRx, jnxMbgSgwGtpV2ICsUnPgUESusTx=jnxMbgSgwGtpV2ICsUnPgUESusTx, jnxMbgSgwIfDelConnSetRspRx=jnxMbgSgwIfDelConnSetRspRx, jnxMbgSgwIfGtpV2ICsNOTFTUECTXTx=jnxMbgSgwIfGtpV2ICsNOTFTUECTXTx, jnxMbgSgwPPDelIndTunReqRx=jnxMbgSgwPPDelIndTunReqRx, jnxMbgSgwIfGtpV2ICsPTNotSuppTx=jnxMbgSgwIfGtpV2ICsPTNotSuppTx, jnxMbgSgwCrtBrRspRx=jnxMbgSgwCrtBrRspRx, jnxMbgSgwGtpV2ICsInvLenTx=jnxMbgSgwGtpV2ICsInvLenTx, jnxMbgSgwGtpV2ICsNewPTNPrefRx=jnxMbgSgwGtpV2ICsNewPTNPrefRx, jnxMbgSgwGtpV2ICsIMSINotKnRx=jnxMbgSgwGtpV2ICsIMSINotKnRx, jnxMbgSgwIfCreateSessRspRx=jnxMbgSgwIfCreateSessRspRx, jnxMbgSgwIfDelSessRspRx=jnxMbgSgwIfDelSessRspRx, jnxMbgSgwGtpV2ICsEIFRNCEnTx=jnxMbgSgwGtpV2ICsEIFRNCEnTx, jnxMbgSgwIfBrResFlrIndRx=jnxMbgSgwIfBrResFlrIndRx, jnxMbgSgwIfGtpV2ICsColNWReqRx=jnxMbgSgwIfGtpV2ICsColNWReqRx, jnxMbgSgwIfProtocolErrRx=jnxMbgSgwIfProtocolErrRx, jnxMbgSgwGtpV2ICsInvPrTx=jnxMbgSgwGtpV2ICsInvPrTx, jnxMbgSgwGtpNotifications=jnxMbgSgwGtpNotifications, jnxMbgSgwGtpV2ICsPkFltManErrTx=jnxMbgSgwGtpV2ICsPkFltManErrTx, jnxMbgSgwPPGtpV2ICsNewPTNPrefRx=jnxMbgSgwPPGtpV2ICsNewPTNPrefRx, jnxMbgSgwGtpV2ICsRPrNtRspTx=jnxMbgSgwGtpV2ICsRPrNtRspTx, jnxMbgSgwRelAcsBrRespTx=jnxMbgSgwRelAcsBrRespTx, jnxMbgSgwGtpV2ICsAPNResTIncTx=jnxMbgSgwGtpV2ICsAPNResTIncTx, jnxMbgSgwPPGtpV2ICsCondIEMsTx=jnxMbgSgwPPGtpV2ICsCondIEMsTx, jnxMbgSgwUpdConnSetReqTx=jnxMbgSgwUpdConnSetReqTx, jnxMbgSgwPPGtpV2ICsAPNResTIncRx=jnxMbgSgwPPGtpV2ICsAPNResTIncRx, jnxMbgSgwGtpV2ICsServNotSuppTx=jnxMbgSgwGtpV2ICsServNotSuppTx, jnxMbgSgwUpdBrRspRx=jnxMbgSgwUpdBrRspRx, jnxMbgSgwDelIndTunReqTx=jnxMbgSgwDelIndTunReqTx, jnxMbgSgwIfGtpV2ICsNewPTNPrefRx=jnxMbgSgwIfGtpV2ICsNewPTNPrefRx, jnxMbgSgwDelConnSetReqRx=jnxMbgSgwDelConnSetReqRx, jnxMbgSgwGtpV2ICsInvPrRx=jnxMbgSgwGtpV2ICsInvPrRx, jnxMbgSgwIfGtpV1EndMarkerTx=jnxMbgSgwIfGtpV1EndMarkerTx, jnxMbgSgwBrResFlrIndTx=jnxMbgSgwBrResFlrIndTx, jnxMbgSgwIPVerErrRx=jnxMbgSgwIPVerErrRx, jnxMbgSgwIfGtpV1T3RespTmrExpRx=jnxMbgSgwIfGtpV1T3RespTmrExpRx, jnxMbgSgwIfModBrRspTx=jnxMbgSgwIfModBrRspTx, jnxMbgSgwPPGtpV2ICsProtoNtSupTx=jnxMbgSgwPPGtpV2ICsProtoNtSupTx, jnxMbgSgwIfGtpV2ICsInvMsgFmtTx=jnxMbgSgwIfGtpV2ICsInvMsgFmtTx, jnxMbgSgwPPSuspAckTx=jnxMbgSgwPPSuspAckTx, jnxMbgSgwGtpV2ICsNOTFTUECTXRx=jnxMbgSgwGtpV2ICsNOTFTUECTXRx, jnxMbgSgwPPGtpV2ICsOptIEIncorRx=jnxMbgSgwPPGtpV2ICsOptIEIncorRx, jnxMbgSgwUpdConnSetRspRx=jnxMbgSgwUpdConnSetRspRx, jnxMbgSgwGtpV2ICsPTNotSuppRx=jnxMbgSgwGtpV2ICsPTNotSuppRx, jnxMbgSgwPPGtpV2ICsAPNAcsDenRx=jnxMbgSgwPPGtpV2ICsAPNAcsDenRx, jnxMbgSgwPPGtpV2ICsUnexpRptIERx=jnxMbgSgwPPGtpV2ICsUnexpRptIERx, jnxMbgSgwCreateSessRspRx=jnxMbgSgwCreateSessRspRx, jnxMbgSgwIfGtpV1UnSupMsgRx=jnxMbgSgwIfGtpV1UnSupMsgRx, jnxMbgSgwUpdBrReqTx=jnxMbgSgwUpdBrReqTx, jnxMbgSgwPPGtpV2ICsServNotSupRx=jnxMbgSgwPPGtpV2ICsServNotSupRx, jnxMbgSgwIfGtpV2ICsUERefusesRx=jnxMbgSgwIfGtpV2ICsUERefusesRx, jnxMbgSgwPPGtpV2ICsRPrNtRspTx=jnxMbgSgwPPGtpV2ICsRPrNtRspTx, jnxMbgSgwIfGtpV1EchoReqTx=jnxMbgSgwIfGtpV1EchoReqTx, jnxMbgSgwPPGtpV2ICsCmpDetRx=jnxMbgSgwPPGtpV2ICsCmpDetRx, jnxMbgSgwCrIndTunRespRx=jnxMbgSgwCrIndTunRespRx, jnxMbgSgwIfGtpV2ICsOptIEIncorRx=jnxMbgSgwIfGtpV2ICsOptIEIncorRx, jnxMbgSgwPPGtpV2ICsCmpDetTx=jnxMbgSgwPPGtpV2ICsCmpDetTx, jnxMbgSgwGtpAlarmStatCounter=jnxMbgSgwGtpAlarmStatCounter, jnxMbgSgwPPV2EchoReqRx=jnxMbgSgwPPV2EchoReqRx, jnxMbgSgwDlDataAckRx=jnxMbgSgwDlDataAckRx, jnxMbgSgwIfIPVerErrRx=jnxMbgSgwIfIPVerErrRx, jnxMbgSgwPPStopPagingIndRx=jnxMbgSgwPPStopPagingIndRx, jnxMbgSgwGtpV2ICsInvTotLenTx=jnxMbgSgwGtpV2ICsInvTotLenTx, jnxMbgSgwIfModBrReqTx=jnxMbgSgwIfModBrReqTx, jnxMbgSgwGtpV1ProtocolErrRx=jnxMbgSgwGtpV1ProtocolErrRx, jnxMbgSgwGtpV2ICsUENotRespRx=jnxMbgSgwGtpV2ICsUENotRespRx, jnxMbgSgwIfGtpV2ICsManIEIncorRx=jnxMbgSgwIfGtpV2ICsManIEIncorRx, jnxMbgSgwPPGtpV2ICsUnexpRptIETx=jnxMbgSgwPPGtpV2ICsUnexpRptIETx, jnxMbgSgwIfGtpV2ICsPkFltSynErRx=jnxMbgSgwIfGtpV2ICsPkFltSynErRx, jnxMbgSgwPPDlDataNotifRx=jnxMbgSgwPPDlDataNotifRx, jnxMbgSgwGtpV2ICsCondIEMsTx=jnxMbgSgwGtpV2ICsCondIEMsTx, jnxMbgSgwDelIndTunRespTx=jnxMbgSgwDelIndTunRespTx, jnxMbgSgwPPGtpV2ICsDenINRatRx=jnxMbgSgwPPGtpV2ICsDenINRatRx, jnxMbgSgwGtpPeerGwDnNotif=jnxMbgSgwGtpPeerGwDnNotif, jnxMbgSgwGtpV2ICsPkFltManErrRx=jnxMbgSgwGtpV2ICsPkFltManErrRx, jnxMbgSgwV2NumMsgRx=jnxMbgSgwV2NumMsgRx, jnxMbgSgwPPModBrFlrIndTx=jnxMbgSgwPPModBrFlrIndTx, jnxMbgSgwIfUpdConnSetReqRx=jnxMbgSgwIfUpdConnSetReqRx, jnxMbgSgwGtpObjects=jnxMbgSgwGtpObjects, jnxMbgSgwGtpV2ICsSynErTADTx=jnxMbgSgwGtpV2ICsSynErTADTx, jnxMbgSgwIfPacketAllocFail=jnxMbgSgwIfPacketAllocFail, jnxMbgSgwIfCreateSessReqRx=jnxMbgSgwIfCreateSessReqRx, jnxMbgSgwPPCrIndTunReqRx=jnxMbgSgwPPCrIndTunReqRx, jnxMbgSgwCrtBrRspTx=jnxMbgSgwCrtBrRspTx, jnxMbgSgwPPV2NumBytesTx=jnxMbgSgwPPV2NumBytesTx, jnxMbgSgwGtpIfStatsTable=jnxMbgSgwGtpIfStatsTable, jnxMbgSgwGtpV2ICsColNWReqRx=jnxMbgSgwGtpV2ICsColNWReqRx, jnxMbgSgwPPGtpV2ICsUserAUTHFlTx=jnxMbgSgwPPGtpV2ICsUserAUTHFlTx, jnxMbgSgwIfGtpV2ICsEIFRNCEnTx=jnxMbgSgwIfGtpV2ICsEIFRNCEnTx, jnxMbgSgwPPV2EchoReqTx=jnxMbgSgwPPV2EchoReqTx, jnxMbgSgwDlDataNotifRx=jnxMbgSgwDlDataNotifRx, jnxMbgSgwPPGtpV2ICsRPrNtRspRx=jnxMbgSgwPPGtpV2ICsRPrNtRspRx, jnxMbgSgwS11PiggybackMsgRx=jnxMbgSgwS11PiggybackMsgRx, jnxMbgSgwPPGtpV2ICsSysFailRx=jnxMbgSgwPPGtpV2ICsSysFailRx, jnxMbgSgwGtpV2ICsVerNotSuppRx=jnxMbgSgwGtpV2ICsVerNotSuppRx, jnxMbgSgwDlDataAckTx=jnxMbgSgwDlDataAckTx, jnxMbgSgwPPGtpV2ICsCtxNotFndRx=jnxMbgSgwPPGtpV2ICsCtxNotFndRx, jnxMbgSgwIfGtpV2ICsAlDynAdOccTx=jnxMbgSgwIfGtpV2ICsAlDynAdOccTx, jnxMbgSgwPPGtpV1ProtocolErrRx=jnxMbgSgwPPGtpV1ProtocolErrRx, jnxMbgSgwModBrCmdTx=jnxMbgSgwModBrCmdTx, jnxMbgSgwPPProtocolErrRx=jnxMbgSgwPPProtocolErrRx, jnxMbgSgwPPRelAcsBrReqTx=jnxMbgSgwPPRelAcsBrReqTx, jnxMbgSgwGtpV1UnSupMsgRx=jnxMbgSgwGtpV1UnSupMsgRx, jnxMbgSgwResumeAckTx=jnxMbgSgwResumeAckTx, jnxMbgSgwIfGtpV2ICsSysFailTx=jnxMbgSgwIfGtpV2ICsSysFailTx, jnxMbgSgwIfV2EchoReqTx=jnxMbgSgwIfV2EchoReqTx, jnxMbgSgwIfGtpV2ICsRMValRcvRx=jnxMbgSgwIfGtpV2ICsRMValRcvRx, jnxMbgSgwPPUpdBrRspTx=jnxMbgSgwPPUpdBrRspTx, jnxMbgSgwIfGtpV2ICsSynErTADRx=jnxMbgSgwIfGtpV2ICsSynErTADRx, jnxMbgSgwPPSuspAckRx=jnxMbgSgwPPSuspAckRx, jnxMbgSgwProtocolErrRx=jnxMbgSgwProtocolErrRx, jnxMbgSgwCrtBrReqTx=jnxMbgSgwCrtBrReqTx, jnxMbgSgwIfCrtBrReqRx=jnxMbgSgwIfCrtBrReqRx, jnxMbgSgwIfGtpV2ICsProtoNtSupTx=jnxMbgSgwIfGtpV2ICsProtoNtSupTx, jnxMbgSgwIfGtpV2ICsUserAUTHFlRx=jnxMbgSgwIfGtpV2ICsUserAUTHFlRx, jnxMbgSgwGtpV2ICsSemErTADTx=jnxMbgSgwGtpV2ICsSemErTADTx, jnxMbgSgwGtpV2ICsSysFailTx=jnxMbgSgwGtpV2ICsSysFailTx, jnxMbgSgwGtpV2ICsDtForNtSupTx=jnxMbgSgwGtpV2ICsDtForNtSupTx, jnxMbgSgwPPDelBrFlrIndRx=jnxMbgSgwPPDelBrFlrIndRx, jnxMbgSgwPPGtpV2ICsUnPgUESusTx=jnxMbgSgwPPGtpV2ICsUnPgUESusTx, jnxMbgSgwGtpV2ICsManIEMissTx=jnxMbgSgwGtpV2ICsManIEMissTx, jnxMbgSgwCreateSessReqTx=jnxMbgSgwCreateSessReqTx, jnxMbgSgwPPUpdBrReqTx=jnxMbgSgwPPUpdBrReqTx, jnxMbgSgwIfPiggybackMsgTx=jnxMbgSgwIfPiggybackMsgTx, jnxMbgSgwIfResumeAckRx=jnxMbgSgwIfResumeAckRx, jnxMbgSgwPPGtpV2ICsPageRx=jnxMbgSgwPPGtpV2ICsPageRx, jnxMbgSgwIfGtpV2ICsNOTFTUECTXRx=jnxMbgSgwIfGtpV2ICsNOTFTUECTXRx, jnxMbgSgwIfGtpV2ICsInvTotLenRx=jnxMbgSgwIfGtpV2ICsInvTotLenRx, jnxMbgSgwGTPUnknVerRx=jnxMbgSgwGTPUnknVerRx, jnxMbgSgwV2EchoReqTx=jnxMbgSgwV2EchoReqTx, jnxMbgSgwPPGtpV2ICsManIEMissTx=jnxMbgSgwPPGtpV2ICsManIEMissTx, jnxMbgSgwIfGtpV2ICsAPNResTIncRx=jnxMbgSgwIfGtpV2ICsAPNResTIncRx, jnxMbgSgwIfDlDataNotiFlrIndRx=jnxMbgSgwIfDlDataNotiFlrIndRx, jnxMbgSgwPPGtpV2ICsInvTotLenTx=jnxMbgSgwPPGtpV2ICsInvTotLenTx, jnxMbgSgwGtpV2ICsMisUnknAPNRx=jnxMbgSgwGtpV2ICsMisUnknAPNRx, jnxMbgSgwModBrReqRx=jnxMbgSgwModBrReqRx, jnxMbgSgwModBrReqTx=jnxMbgSgwModBrReqTx, jnxMbgSgwIfGtpV2ICsReqRejTx=jnxMbgSgwIfGtpV2ICsReqRejTx, jnxMbgSgwIfRelAcsBrRespTx=jnxMbgSgwIfRelAcsBrRespTx, jnxMbgSgwCrIndTunReqRx=jnxMbgSgwCrIndTunReqRx, jnxMbgSgwIfGtpV2ICsProtoNtSupRx=jnxMbgSgwIfGtpV2ICsProtoNtSupRx, jnxMbgSgwPPDelBrRspRx=jnxMbgSgwPPDelBrRspRx, jnxMbgSgwPPGtpV2ICsLclDetTx=jnxMbgSgwPPGtpV2ICsLclDetTx, jnxMbgSgwIfGtpV2ICsIMSINotKnRx=jnxMbgSgwIfGtpV2ICsIMSINotKnRx, jnxMbgSgwIfRelAcsBrRespRx=jnxMbgSgwIfRelAcsBrRespRx, jnxMbgSgwPPV2VerNotSupTx=jnxMbgSgwPPV2VerNotSupTx, jnxMbgSgwIfGtpV2ICsPTMSISigMMTx=jnxMbgSgwIfGtpV2ICsPTMSISigMMTx, jnxMbgSgwIfGtpV2ICsRATChgRx=jnxMbgSgwIfGtpV2ICsRATChgRx, jnxMbgSgwGtpGwName=jnxMbgSgwGtpGwName, jnxMbgSgwPPGtpV2ICsUnabPageUETx=jnxMbgSgwPPGtpV2ICsUnabPageUETx, jnxMbgSgwGtpV2ICsNewPTSIAdbrTx=jnxMbgSgwGtpV2ICsNewPTSIAdbrTx, jnxMbgSgwSuspNotifRx=jnxMbgSgwSuspNotifRx, jnxMbgSgwPPV2VerNotSupRx=jnxMbgSgwPPV2VerNotSupRx, jnxMbgSgwIfGtpV2ICsNoMemRx=jnxMbgSgwIfGtpV2ICsNoMemRx, jnxMbgSgwDelBrRspTx=jnxMbgSgwDelBrRspTx, jnxMbgSgwIfGtpV2ICsSynErTADTx=jnxMbgSgwIfGtpV2ICsSynErTADTx, jnxMbgSgwIfIPProtoErrRx=jnxMbgSgwIfIPProtoErrRx, jnxMbgSgwIfGtpV1EchoRespRx=jnxMbgSgwIfGtpV1EchoRespRx, jnxMbgSgwPPGtpV2ICsInvLenTx=jnxMbgSgwPPGtpV2ICsInvLenTx, jnxMbgSgwSuspNotifTx=jnxMbgSgwSuspNotifTx, jnxMbgSgwPPGtpV2ICsColNWReqRx=jnxMbgSgwPPGtpV2ICsColNWReqRx, jnxMbgSgwIfGtpV2ICsCtxNotFndTx=jnxMbgSgwIfGtpV2ICsCtxNotFndTx, jnxMbgSgwIfGtpV1ErrIndRx=jnxMbgSgwIfGtpV1ErrIndRx, jnxMbgSgwPPDelConnSetReqTx=jnxMbgSgwPPDelConnSetReqTx, jnxMbgSgwPPGtpV1EchoRespTx=jnxMbgSgwPPGtpV1EchoRespTx, jnxMbgSgwPPGtpV2ICsServDeniedTx=jnxMbgSgwPPGtpV2ICsServDeniedTx, jnxMbgSgwGtpV2ICsUnknownTx=jnxMbgSgwGtpV2ICsUnknownTx, jnxMbgSgwPPDelConnSetReqRx=jnxMbgSgwPPDelConnSetReqRx, jnxMbgSgwIfDelIndTunRespTx=jnxMbgSgwIfDelIndTunRespTx, jnxMbgSgwIfPiggybackMsgRx=jnxMbgSgwIfPiggybackMsgRx, jnxMbgSgwIfDelBrCmdRx=jnxMbgSgwIfDelBrCmdRx, jnxMbgSgwGtpV1EchoRespRx=jnxMbgSgwGtpV1EchoRespRx, jnxMbgSgwPPGtpLclAddr=jnxMbgSgwPPGtpLclAddr, jnxMbgSgwPPGtpV2ICsPkFltSynErRx=jnxMbgSgwPPGtpV2ICsPkFltSynErRx, jnxMbgSgwPPGtpV2ICsRelocFailTx=jnxMbgSgwPPGtpV2ICsRelocFailTx, jnxMbgSgwGtpV2ICsAPNAcsDenRx=jnxMbgSgwGtpV2ICsAPNAcsDenRx, jnxMbgSgwIfGtpV2ICsUnknownTx=jnxMbgSgwIfGtpV2ICsUnknownTx, jnxMbgSgwPPDelSessRspRx=jnxMbgSgwPPDelSessRspRx, jnxMbgSgwPPGtpV2ICsNewPTNPrefTx=jnxMbgSgwPPGtpV2ICsNewPTNPrefTx, jnxMbgSgwV2EchoRespTx=jnxMbgSgwV2EchoRespTx, jnxMbgSgwPPGtpV2ICsVerNotSuppTx=jnxMbgSgwPPGtpV2ICsVerNotSuppTx, jnxMbgSgwResumeNotifTx=jnxMbgSgwResumeNotifTx, jnxMbgSgwPPCrtBrRspTx=jnxMbgSgwPPCrtBrRspTx, jnxMbgSgwPPGtpV2ICsGREKeyNtFdRx=jnxMbgSgwPPGtpV2ICsGREKeyNtFdRx, jnxMbgSgwPPGtpV2ICsManIEIncorRx=jnxMbgSgwPPGtpV2ICsManIEIncorRx, jnxMbgSgwIfDlDataNotifRx=jnxMbgSgwIfDlDataNotifRx, jnxMbgSgwIfDelConnSetReqRx=jnxMbgSgwIfDelConnSetReqRx, jnxMbgSgwGtpV2ICsAPNResTIncRx=jnxMbgSgwGtpV2ICsAPNResTIncRx, jnxMbgSgwIfGtpV1ProtocolErrRx=jnxMbgSgwIfGtpV1ProtocolErrRx, jnxMbgSgwIfUpdBrRspTx=jnxMbgSgwIfUpdBrRspTx, jnxMbgSgwPPGtpV2ICsTFTSysErrRx=jnxMbgSgwPPGtpV2ICsTFTSysErrRx)
mibBuilder.exportSymbols("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", jnxMbgSgwResumeNotifRx=jnxMbgSgwResumeNotifRx, jnxMbgSgwIfV2EchoReqRx=jnxMbgSgwIfV2EchoReqRx, jnxMbgSgwSuspAckTx=jnxMbgSgwSuspAckTx, jnxMbgSgwGtpV1EndMarkerRx=jnxMbgSgwGtpV1EndMarkerRx, jnxMbgSgwIfGtpV2ICsNoMemTx=jnxMbgSgwIfGtpV2ICsNoMemTx, jnxMbgSgwPPGtpV2ICsNPTSIAdbrRx=jnxMbgSgwPPGtpV2ICsNPTSIAdbrRx, jnxMbgSgwPPGtpV1EndMarkerTx=jnxMbgSgwPPGtpV1EndMarkerTx, jnxMbgSgwPPGtpV2ICsReqRejRx=jnxMbgSgwPPGtpV2ICsReqRejRx, jnxMbgSgwIfSuspAckRx=jnxMbgSgwIfSuspAckRx, jnxMbgSgwIfGtpV2ICsNoResRx=jnxMbgSgwIfGtpV2ICsNoResRx, jnxMbgSgwIfGtpV2ICsInvPrTx=jnxMbgSgwIfGtpV2ICsInvPrTx, jnxMbgSgwGtpV2ICsMisUnknAPNTx=jnxMbgSgwGtpV2ICsMisUnknAPNTx, jnxMbgSgwIfGtpV2ICsNPTSIAdbrTx=jnxMbgSgwIfGtpV2ICsNPTSIAdbrTx, jnxMbgSgwPPGtpV2ICsEIFRNCEnRx=jnxMbgSgwPPGtpV2ICsEIFRNCEnRx, jnxMbgSgwUpdConnSetReqRx=jnxMbgSgwUpdConnSetReqRx, jnxMbgSgwGtpV2ICsAllDynAdOccTx=jnxMbgSgwGtpV2ICsAllDynAdOccTx, jnxMbgSgwIfResumeNotifRx=jnxMbgSgwIfResumeNotifRx, jnxMbgSgwGtpV2ICsRATChgTx=jnxMbgSgwGtpV2ICsRATChgTx, jnxMbgSgwGtpV1ErrIndTx=jnxMbgSgwGtpV1ErrIndTx, jnxMbgSgwIfSuspNotifTx=jnxMbgSgwIfSuspNotifTx, jnxMbgSgwIfGtpV2ICsAlDynAdOccRx=jnxMbgSgwIfGtpV2ICsAlDynAdOccRx, jnxMbgSgwIfUnknMsgRx=jnxMbgSgwIfUnknMsgRx, jnxMbgSgwPPDlDataAckRx=jnxMbgSgwPPDlDataAckRx, jnxMbgSgwIfGtpV2ICsNoResTx=jnxMbgSgwIfGtpV2ICsNoResTx, jnxMbgSgwPPGtpV2ICsTFTSMANTErTx=jnxMbgSgwPPGtpV2ICsTFTSMANTErTx, jnxMbgSgwIfGtpV2ICsTFTSMANTErTx=jnxMbgSgwIfGtpV2ICsTFTSMANTErTx, jnxMbgSgwIfV2NumBytesRx=jnxMbgSgwIfV2NumBytesRx, jnxMbgSgwPPPacketSendFail=jnxMbgSgwPPPacketSendFail, jnxMbgSgwModBrRspRx=jnxMbgSgwModBrRspRx, jnxMbgSgwPPCrIndTunReqTx=jnxMbgSgwPPCrIndTunReqTx, jnxMbgSgwRelAcsBrReqRx=jnxMbgSgwRelAcsBrReqRx, jnxMbgSgwPPGtpV2ICsCondIEMsRx=jnxMbgSgwPPGtpV2ICsCondIEMsRx, jnxMbgSgwIfDelBrFlrIndRx=jnxMbgSgwIfDelBrFlrIndRx, jnxMbgSgwV2NumBytesRx=jnxMbgSgwV2NumBytesRx, jnxMbgSgwPPGtpV2ICsUENotRespRx=jnxMbgSgwPPGtpV2ICsUENotRespRx, jnxMbgSgwPPGtpV2ICsDtForNtSupTx=jnxMbgSgwPPGtpV2ICsDtForNtSupTx, jnxMbgSgwGtpNotificationVars=jnxMbgSgwGtpNotificationVars, jnxMbgSgwPPV2EchoRespRx=jnxMbgSgwPPV2EchoRespRx, jnxMbgSgwPPGtpV2ICsTFTSysErrTx=jnxMbgSgwPPGtpV2ICsTFTSysErrTx, jnxMbgSgwGtpV2ICsVerNotSuppTx=jnxMbgSgwGtpV2ICsVerNotSuppTx, jnxMbgSgwIfGtpV1EchoReqRx=jnxMbgSgwIfGtpV1EchoReqRx, jnxMbgSgwIfCreateSessRspTx=jnxMbgSgwIfCreateSessRspTx, jnxMbgSgwGtpPerPeerStatsEntry=jnxMbgSgwGtpPerPeerStatsEntry, jnxMbgSgwPPGtpV2ICsUnknownTx=jnxMbgSgwPPGtpV2ICsUnknownTx, jnxMbgSgwIfGtpV2ICsPTMSISigMMRx=jnxMbgSgwIfGtpV2ICsPTMSISigMMRx, jnxMbgSgwGtpGwIndex=jnxMbgSgwGtpGwIndex, jnxMbgSgwModBrCmdRx=jnxMbgSgwModBrCmdRx, jnxMbgSgwPPDelSessRspTx=jnxMbgSgwPPDelSessRspTx, jnxMbgSgwIfV2NumMsgRx=jnxMbgSgwIfV2NumMsgRx, jnxMbgSgwIfBrResCmdRx=jnxMbgSgwIfBrResCmdRx, jnxMbgSgwPPGtpV2ICsAllDynAdOcTx=jnxMbgSgwPPGtpV2ICsAllDynAdOcTx, jnxMbgSgwIfV2VerNotSupRx=jnxMbgSgwIfV2VerNotSupRx, jnxMbgSgwPPGtpV2ICsPageTx=jnxMbgSgwPPGtpV2ICsPageTx, jnxMbgSgwPPGtpV2ICsSemErTADTx=jnxMbgSgwPPGtpV2ICsSemErTADTx, jnxMbgSgwPPResumeAckRx=jnxMbgSgwPPResumeAckRx, jnxMbgSgwGtpV2ICsPTNotSuppTx=jnxMbgSgwGtpV2ICsPTNotSuppTx, jnxMbgSgwGtpV2ICsSysFailRx=jnxMbgSgwGtpV2ICsSysFailRx, jnxMbgSgwPPUnknMsgRx=jnxMbgSgwPPUnknMsgRx, jnxMbgSgwGtpV2ICsNOTFTUECTXTx=jnxMbgSgwGtpV2ICsNOTFTUECTXTx, jnxMbgSgwPPDelBrReqRx=jnxMbgSgwPPDelBrReqRx, jnxMbgSgwGtpV2ICsManIEMissRx=jnxMbgSgwGtpV2ICsManIEMissRx, jnxMbgSgwDelIndTunReqRx=jnxMbgSgwDelIndTunReqRx, jnxMbgSgwGtpV2ICsReqAcceptRx=jnxMbgSgwGtpV2ICsReqAcceptRx, jnxMbgSgwPPUpdBrRspRx=jnxMbgSgwPPUpdBrRspRx, jnxMbgSgwIfGtpV2ICsMisUnknAPNTx=jnxMbgSgwIfGtpV2ICsMisUnknAPNTx, jnxMbgSgwPPGtpV1ErrIndRx=jnxMbgSgwPPGtpV1ErrIndRx, jnxMbgSgwGtpPeerName=jnxMbgSgwGtpPeerName, jnxMbgSgwDelBrReqTx=jnxMbgSgwDelBrReqTx, jnxMbgSgwIfGtpV2ICsManIEIncorTx=jnxMbgSgwIfGtpV2ICsManIEIncorTx, jnxMbgSgwIfGtpV2ICsSemErTADRx=jnxMbgSgwIfGtpV2ICsSemErTADRx, jnxMbgSgwIfGtpV2ICsDenINRatRx=jnxMbgSgwIfGtpV2ICsDenINRatRx, jnxMbgSgwIfUpdBrRspRx=jnxMbgSgwIfUpdBrRspRx, jnxMbgSgwPPCrtBrReqRx=jnxMbgSgwPPCrtBrReqRx, jnxMbgSgwGtpV2ICsTFTSysErrTx=jnxMbgSgwGtpV2ICsTFTSysErrTx, jnxMbgSgwIfPcktLenErrRx=jnxMbgSgwIfPcktLenErrRx, jnxMbgSgwIfGtpV2ICsRelocFailTx=jnxMbgSgwIfGtpV2ICsRelocFailTx, jnxMbgSgwIfGtpV2ICsReqAcceptTx=jnxMbgSgwIfGtpV2ICsReqAcceptTx, jnxMbgSgwPPPiggybackMsgRx=jnxMbgSgwPPPiggybackMsgRx, jnxMbgSgwDelSessRspTx=jnxMbgSgwDelSessRspTx, jnxMbgSgwPPDelBrReqTx=jnxMbgSgwPPDelBrReqTx, jnxMbgSgwGtpV2ICsTFTSMANTErRx=jnxMbgSgwGtpV2ICsTFTSMANTErRx, jnxMbgSgwPPGtpV2ICsUERefusesTx=jnxMbgSgwPPGtpV2ICsUERefusesTx, jnxMbgSgwGtpV2ICsNoResRx=jnxMbgSgwGtpV2ICsNoResRx, jnxMbgSgwIfDelBrReqRx=jnxMbgSgwIfDelBrReqRx, jnxMbgSgwGtpV2ICsRMValRcvRx=jnxMbgSgwGtpV2ICsRMValRcvRx, jnxMbgSgwGtpV2ICsUnabPageUERx=jnxMbgSgwGtpV2ICsUnabPageUERx, jnxMbgSgwIfGtpV2ICsNewPTNPrefTx=jnxMbgSgwIfGtpV2ICsNewPTNPrefTx, jnxMbgSgwPPBrResCmdTx=jnxMbgSgwPPBrResCmdTx, jnxMbgSgwIfDlDataNotifTx=jnxMbgSgwIfDlDataNotifTx, jnxMbgSgwPacketAllocFail=jnxMbgSgwPacketAllocFail, jnxMbgSgwPPGtpV2ICsSemErTADRx=jnxMbgSgwPPGtpV2ICsSemErTADRx, jnxMbgSgwPPDlDataNotiFlrIndTx=jnxMbgSgwPPDlDataNotiFlrIndTx, jnxMbgSgwIfUpdConnSetRspTx=jnxMbgSgwIfUpdConnSetRspTx, jnxMbgSgwGtpV2ICsIMSINotKnTx=jnxMbgSgwGtpV2ICsIMSINotKnTx, jnxMbgSgwGtpV2ICsSemErTADRx=jnxMbgSgwGtpV2ICsSemErTADRx, jnxMbgSgwPPGtpV2ICsLclDetRx=jnxMbgSgwPPGtpV2ICsLclDetRx, jnxMbgSgwIfUpdConnSetReqTx=jnxMbgSgwIfUpdConnSetReqTx, jnxMbgSgwIfPacketSendFail=jnxMbgSgwIfPacketSendFail, jnxMbgSgwIfGtpV2ICsNPTSIAdbrRx=jnxMbgSgwIfGtpV2ICsNPTSIAdbrRx, jnxMbgSgwIfGtpV2ICsAcceptPartTx=jnxMbgSgwIfGtpV2ICsAcceptPartTx, jnxMbgSgwPPUpdConnSetRspRx=jnxMbgSgwPPUpdConnSetRspRx, jnxMbgSgwDelIndTunRespRx=jnxMbgSgwDelIndTunRespRx, jnxMbgSgwPPSuspNotifTx=jnxMbgSgwPPSuspNotifTx, jnxMbgSgwIfUpdConnSetRspRx=jnxMbgSgwIfUpdConnSetRspRx, jnxMbgSgwGtpV2ICsInvMsgFmtRx=jnxMbgSgwGtpV2ICsInvMsgFmtRx, jnxMbgSgwIfGtpV2ICsAPNAcsDenTx=jnxMbgSgwIfGtpV2ICsAPNAcsDenTx, jnxMbgSgwDelBrFlrIndTx=jnxMbgSgwDelBrFlrIndTx, jnxMbgSgwIfIndex=jnxMbgSgwIfIndex, jnxMbgSgwGtpV1T3RespTmrExpRx=jnxMbgSgwGtpV1T3RespTmrExpRx, jnxMbgSgwPPCreateSessRspTx=jnxMbgSgwPPCreateSessRspTx, jnxMbgSgwPPDelBrCmdTx=jnxMbgSgwPPDelBrCmdTx, jnxMbgSgwIfStopPagingIndTx=jnxMbgSgwIfStopPagingIndTx, jnxMbgSgwPPGtpV2ICsRelocFailRx=jnxMbgSgwPPGtpV2ICsRelocFailRx, jnxMbgSgwPPGtpV2ICsInvMsgFmtTx=jnxMbgSgwPPGtpV2ICsInvMsgFmtTx, jnxMbgSgwIfGtpV2ICsDenINRatTx=jnxMbgSgwIfGtpV2ICsDenINRatTx, jnxMbgSgwPPGtpV2ICsISRDeactRx=jnxMbgSgwPPGtpV2ICsISRDeactRx, jnxMbgSgwPPBrResCmdRx=jnxMbgSgwPPBrResCmdRx, jnxMbgSgwPPGtpV2ICsNoResTx=jnxMbgSgwPPGtpV2ICsNoResTx, jnxMbgSgwPPGtpV2ICsAllDynAdOcRx=jnxMbgSgwPPGtpV2ICsAllDynAdOcRx, jnxMbgSgwIfGtpV2ICsDtForNtSupTx=jnxMbgSgwIfGtpV2ICsDtForNtSupTx, jnxMbgSgwIfV2NumMsgTx=jnxMbgSgwIfV2NumMsgTx, jnxMbgSgwIfGtpV2ICsInvLenTx=jnxMbgSgwIfGtpV2ICsInvLenTx, jnxMbgSgwDlDataNotiFlrIndTx=jnxMbgSgwDlDataNotiFlrIndTx, jnxMbgSgwIfGtpV2ICsRPrNtRspTx=jnxMbgSgwIfGtpV2ICsRPrNtRspTx, jnxMbgSgwGtpV2ICsDeniedINRatTx=jnxMbgSgwGtpV2ICsDeniedINRatTx, jnxMbgSgwGtpV2ICsProtoNtSupRx=jnxMbgSgwGtpV2ICsProtoNtSupRx, jnxMbgSgwPPModBrFlrIndRx=jnxMbgSgwPPModBrFlrIndRx, jnxMbgSgwV2VerNotSupRx=jnxMbgSgwV2VerNotSupRx, jnxMbgSgwPPGtpV1EndMarkerRx=jnxMbgSgwPPGtpV1EndMarkerRx, jnxMbgSgwGtpV2ICsISRDeactTx=jnxMbgSgwGtpV2ICsISRDeactTx, jnxMbgSgwGtpV2ICsServDeniedRx=jnxMbgSgwGtpV2ICsServDeniedRx, jnxMbgSgwIfDlDataNotiFlrIndTx=jnxMbgSgwIfDlDataNotiFlrIndTx, jnxMbgSgwCrIndTunReqTx=jnxMbgSgwCrIndTunReqTx, jnxMbgSgwGtpV2ICsUserAUTHFlTx=jnxMbgSgwGtpV2ICsUserAUTHFlTx, jnxMbgSgwPPV2NumBytesRx=jnxMbgSgwPPV2NumBytesRx, jnxMbgSgwS4PiggybackMsgRx=jnxMbgSgwS4PiggybackMsgRx, jnxMbgSgwIfDlDataAckTx=jnxMbgSgwIfDlDataAckTx, jnxMbgSgwIfGtpV2ICsGREKeyNtFdRx=jnxMbgSgwIfGtpV2ICsGREKeyNtFdRx, jnxMbgSgwGtpV2ICsISRDeactRx=jnxMbgSgwGtpV2ICsISRDeactRx, jnxMbgSgwIfDelSessReqTx=jnxMbgSgwIfDelSessReqTx, jnxMbgSgwIfRelAcsBrReqTx=jnxMbgSgwIfRelAcsBrReqTx, jnxMbgSgwDelSessRspRx=jnxMbgSgwDelSessRspRx, jnxMbgSgwPPModBrRspTx=jnxMbgSgwPPModBrRspTx, jnxMbgSgwIfV2EchoRespRx=jnxMbgSgwIfV2EchoRespRx, jnxMbgSgwBrResCmdRx=jnxMbgSgwBrResCmdRx, jnxMbgSgwPPGtpV2ICsAPNResTIncTx=jnxMbgSgwPPGtpV2ICsAPNResTIncTx, jnxMbgSgwPPGtpV2ICsInvTotLenRx=jnxMbgSgwPPGtpV2ICsInvTotLenRx, jnxMbgSgwGtpV2ICsUnPgUESusRx=jnxMbgSgwGtpV2ICsUnPgUESusRx, jnxMbgSgwIfGtpV2ICsOptIEIncorTx=jnxMbgSgwIfGtpV2ICsOptIEIncorTx, jnxMbgSgwGtpV2ICsCmpDetTx=jnxMbgSgwGtpV2ICsCmpDetTx, jnxMbgSgwModBrFlrIndRx=jnxMbgSgwModBrFlrIndRx, jnxMbgSgwIfGtpV2ICsRMValRcvTx=jnxMbgSgwIfGtpV2ICsRMValRcvTx, jnxMbgSgwGtpCPerPeerStatsTable=jnxMbgSgwGtpCPerPeerStatsTable, jnxMbgSgwGtpInterfaceType=jnxMbgSgwGtpInterfaceType, jnxMbgSgwPPGtpV2ICsServNotSupTx=jnxMbgSgwPPGtpV2ICsServNotSupTx, jnxMbgSgwPPBrResFlrIndRx=jnxMbgSgwPPBrResFlrIndRx, jnxMbgSgwIfGtpV2ICsCondIEMsRx=jnxMbgSgwIfGtpV2ICsCondIEMsRx, jnxMbgSgwIfGtpV2ICsAPNResTIncTx=jnxMbgSgwIfGtpV2ICsAPNResTIncTx, jnxMbgSgwIfGtpV2ICsISRDeactTx=jnxMbgSgwIfGtpV2ICsISRDeactTx, jnxMbgSgwUpdBrReqRx=jnxMbgSgwUpdBrReqRx, jnxMbgSgwV2EchoRespRx=jnxMbgSgwV2EchoRespRx, jnxMbgSgwIfGtpV2ICsUnabPageUETx=jnxMbgSgwIfGtpV2ICsUnabPageUETx, jnxMbgSgwV2NumBytesTx=jnxMbgSgwV2NumBytesTx, jnxMbgSgwIfBrResFlrIndTx=jnxMbgSgwIfBrResFlrIndTx, jnxMbgSgwPPCreateSessRspRx=jnxMbgSgwPPCreateSessRspRx, jnxMbgSgwPPGtpV2ICsReqAcceptTx=jnxMbgSgwPPGtpV2ICsReqAcceptTx, jnxMbgSgwIfGtpV2ICsVerNotSuppRx=jnxMbgSgwIfGtpV2ICsVerNotSuppRx, jnxMbgSgwPPGtpV2ICsReqRejTx=jnxMbgSgwPPGtpV2ICsReqRejTx, jnxMbgSgwIfT3RespTmrExpRx=jnxMbgSgwIfT3RespTmrExpRx, jnxMbgSgwPPUpdConnSetRspTx=jnxMbgSgwPPUpdConnSetRspTx, jnxMbgSgwPPGtpRmtAddr=jnxMbgSgwPPGtpRmtAddr, jnxMbgSgwGtpGlbStatsEntry=jnxMbgSgwGtpGlbStatsEntry, jnxMbgSgwGtpV2ICsEIFRNCEnRx=jnxMbgSgwGtpV2ICsEIFRNCEnRx, jnxMbgSgwIfCrIndTunRespRx=jnxMbgSgwIfCrIndTunRespRx, jnxMbgSgwGtpV2ICsRPrNtRspRx=jnxMbgSgwGtpV2ICsRPrNtRspRx, jnxMbgSgwPPGtpRtgInst=jnxMbgSgwPPGtpRtgInst)
|
"""
Geospatial queries.
"""
from collections import defaultdict
from shapely.geometry import mapping
__all__ = [
'tweets_in_region',
'hashtag_counts_in',
'user_counts_in',
]
seattle = [
[[-122.4596959,47.4810022],
[-122.4596959,47.7341388],
[-122.2244329,47.7341388],
[-122.2244329,47.4810022],
[-122.4596959,47.4810022]],
]
def tweets_in_region(collection, geometry):
"""Iterator over tweets in a region.
Parameters
----------
collection : MongoDB collection
The collection of tweets.
geometry : GeoJSON-like geometry or list
The GeoJSON-like object representing the desired geometry.
This can be a Polygon or MultiPolygon. For a list, the [polygon]_
should be a list of LinearRing coordinate arrays. A LinearRing
coordinate array is a list of (longitude, latitude) pairs that is
closed---the first and last point must be the same.
Examples
--------
>>> seattle = [
... [[-122.4596959,47.4810022],
... [-122.4596959,47.7341388],
... [-122.2244329,47.7341388],
... [-122.2244329,47.4810022],
... [-122.4596959,47.4810022]],
... ]
...
>>> counts, skipped = tweets_in_region(collection, seattle)
Notes
-----
This relies on MongoDB's geospatial queries.
A '2dsphere' index on the collection will speed this up.
For more information on geojson polygons:
.. [polygon] http://geojson.org/geojson-spec.html#polygon
"""
try:
# Shapley Polygon or MultiPolygon to geoJSON-like object
geometry = mapping(geometry)
except AttributeError:
pass
if 'coordinates' not in geometry:
# Use the list to define a polygon.
geometry = {'type': 'Polygon', 'coordinates': geometry}
c = collection.find({
'coordinates': {
'$geoWithin': {
'$geometry' : geometry
}
}
})
return c
def hashtag_counts_in(collection, geometry, skip_users=None):
"""
Returns hashtag counts for all tweets in a geometry.
Parameters
----------
collection : MongoDB collection
The collection of tweets.
geometry : GeoJSON-like geometry or list
The GeoJSON-like object representing the desired geometry.
This can be a Polygon or MultiPolygon. For a list, the [polygon]_
should be a list of LinearRing coordinate arrays. A LinearRing
coordinate array is a list of (longitude, latitude) pairs that is
closed---the first and last point must be the same.
skip_users : list of int
A list of Twitter user ids. Any tweet from these user ids will be
skipped and not included in the counts.
Returns
-------
counts : defaultdict
The counts for each hashtag. Keys are hashtags, values are counts.
skipped : int
The number of tweets that were not counted, due to `skip_users`.
Examples
--------
>>> seattle = [
... [[-122.4596959,47.4810022],
... [-122.4596959,47.7341388],
... [-122.2244329,47.7341388],
... [-122.2244329,47.4810022],
... [-122.4596959,47.4810022]],
... ]
...
>>> counts, skipped = hashtags_counts_in(collection, seattle)
Notes
-----
This relies on MongoDB's geospatial queries.
A '2dsphere' index on the collection will speed this up.
For more information on geojson polygons:
.. [polygon] http://geojson.org/geojson-spec.html#polygon
"""
if skip_users is None:
skip_users = set([])
else:
skip_users = set(skip_users)
counts = defaultdict(int)
skipped = 0
for tweet in tweets_in_region(collection, geometry):
if tweet['user']['id'] in skip_users:
skipped += 1
else:
for hashtag in tweet['hashtags']:
counts[hashtag] += 1
return counts, skipped
def user_counts_in(collection, geometry, skip_users=None):
"""
Returns user tweet counts for all tweets in a geometry.
Parameters
----------
collection : MongoDB collection
The collection of tweets.
geometry : GeoJSON-like geometry or list
The GeoJSON-like object representing the desired geometry.
This can be a Polygon or MultiPolygon. For a list, the [polygon]_
should be a list of LinearRing coordinate arrays. A LinearRing
coordinate array is a list of (longitude, latitude) pairs that is
closed---the first and last point must be the same.
skip_users : list of int
A list of Twitter user ids. Any tweet from these user ids will be
skipped and not included in the counts.
Returns
-------
counts : defaultdict
The counts for each user. Keys are user ids, values are counts.
skipped : int
The number of tweets that were not counted, due to `skip_users`.
Examples
--------
>>> seattle = [
... [[-122.4596959,47.4810022],
... [-122.4596959,47.7341388],
... [-122.2244329,47.7341388],
... [-122.2244329,47.4810022],
... [-122.4596959,47.4810022]],
... ]
...
>>> counts, skipped = user_counts_in(collection, seattle)
Notes
-----
This relies on MongoDB's geospatial queries.
A '2dsphere' index on the collection will speed this up.
For more information on geojson polygons:
.. [polygon] http://geojson.org/geojson-spec.html#polygon
"""
if skip_users is None:
skip_users = set([])
else:
skip_users = set(skip_users)
counts = defaultdict(int)
skipped = 0
for tweet in tweets_in_region(collection, geometry):
if tweet['user']['id'] in skip_users:
skipped += 1
else:
user_id = tweet['user']['id']
counts[user_id] += 1
return counts, skipped
|
"""
---OK---
"""
from collections import OrderedDict
import copy
import numpy as np
from crystalpy.examples.Values import Interval
class PlotData1D(object):
"""
Represents a 1D plot. The graph data together with related information.
"""
def __init__(self, title, title_x_axis, title_y_axis):
"""
Constructor.
:param title: Plot title.
:param title_x_axis: X axis' title.
:param title_y_axis: Y axis' title.
"""
# Set titles.
self.title = title
self.title_x_axis = title_x_axis
self.title_y_axis = title_y_axis
# Initialize X and Y ranges.
self.x_min = None
self.x_max = None
self.y_min = None
self.y_max = None
# Initialize X and Y data.
self.x = None
self.y = None
# Initialize plot information to empty ordered dictionary.
self._plot_info = OrderedDict()
def set_x_min(self, x_min):
"""
Sets x range minimum.
:param x_min: X range minimum.
"""
self.x_min = x_min
def set_x_max(self, x_max):
"""
Sets X range maximum.
:param x_max: X range maximum.
"""
self.x_max = x_max
def set_y_min(self, y_min):
"""
Sets Y range minimum.
:param y_min: Y range minimum.
"""
self.y_min = y_min
def set_y_max(self, y_max):
"""
Sets Y range maximum.
:param y_max: Y range maximum.
"""
self.y_max = y_max
def set_x(self, x):
"""
Sets X data.
:param x: x data.
"""
self.x = x
def set_y(self, y):
"""
Sets Y data.
:param y: y data.
"""
self.y = y
def _set_interval_to_zero(self, indices, lower=True, upper=True):
"""
Sets the y's to zero in certain intervals of x's (extrema included).
:param indices: pair with the two extrema of the x interval.
:param lower: if True include the lower end of the interval.
:param upper: if True include the upper end of the interval.
"""
try:
inf_index = indices.inf
sup_index = indices.sup
# adjust the indices according to the lower and upper parameters.
if not lower:
inf_index += 1
if not upper:
sup_index -= 1
# in the index range defined by inf_index and sup_index, set the y's to zero.
for i in range(inf_index, sup_index + 1):
self.y[i] = 0
except TypeError:
print("\nERROR: could not set the values to zero in the specified intervals.\n")
def _unwrap_interval(self, indices, deg, lower=True, upper=True):
"""
Unwraps the y data vector in a certain interval.
:param indices: indices determining the interval to unwrap.
:param deg: True if values are in degrees. False if radians.
:param lower: if True include the lower end of the interval.
:param upper: if True include the upper end of the interval.
"""
inf_index = indices.inf
sup_index = indices.sup
# adjust the indices according to the lower and upper parameters.
if not lower:
inf_index += 1
if not upper:
sup_index -= 1
# numpy.unwrap works on data in radians, so if the data is in degrees, it needs to be converted.
if deg:
self.y = np.deg2rad(self.y)
# cut out the part to unwrap and then stitch it back on.
temp = self.y[inf_index:sup_index + 1]
self.y[inf_index:sup_index + 1] = np.unwrap(temp)
# convert back to degrees.
self.y = np.rad2deg(self.y)
return
# cut out the part to unwrap and then stitch it back on.
temp = self.y[inf_index:sup_index + 1]
self.y[inf_index:sup_index + 1] = np.unwrap(temp)
def _optimize_interval(self, indices, phase_limits):
"""
Takes an interval and restricts it so that the extrema match the points where the phase
becomes bigger(smaller) than some upper(lower) limit.
:param indices: indices corresponding to the interval to be optimized.
:param phase_limits: the limits of the phase to be used for the optimization, [min, max].
:return: indices of the optimized interval.
"""
inf = indices.inf
sup = indices.sup
# check the intervals.
if (self.y[inf] > phase_limits[1] or
self.y[inf] < phase_limits[0]):
print("\nERROR in PlotData1D._optimize_interval: First value in the interval exceeds limitations.")
return indices
if (self.y[sup] > phase_limits[1] or
self.y[sup] < phase_limits[0]):
print("\nERROR in PlotData1D._optimize_interval: Last value in the interval exceeds limitations.")
return indices
# starting from the lower end.
i = inf # counter initialization.
while phase_limits[0] < self.y[i] < phase_limits[1]:
i += 1
# if the conditions are not satisfied for index i:
new_inf = i - 1
# starting from the upper end.
i = sup # counter initialization.
while phase_limits[0] < self.y[i] < phase_limits[1]:
i -= 1
# if the conditions are not satisfied for index i:
new_sup = i + 1
new_indices = Interval(new_inf, new_sup)
# check that the inf is smaller than (or equal to) the sup.
if not new_indices.check_extrema():
print("\nERROR in PlotData1D._optimize_interval: The phase might be undersampled.")
return indices
return new_indices
def smart_unwrap(self, intervals, intervals_number, phase_limits, deg):
"""
Unwraps data correctly by avoiding discontinuities.
:param intervals: list of pairs. Each element is a pair with the two extrema of the x interval.
:param phase_limits: min and max tolerable values for the phase plot, [min, max].
:param intervals_number: number of intervals to set to zero.
:param deg: True if values are in degrees. False if radians.
"""
if intervals_number == 0:
if deg:
self.y = np.deg2rad(self.y) # unwrap works with radians.
self.y = np.unwrap(self.y)
self.y = np.rad2deg(self.y) # convert back to degrees.
return
self.y = np.unwrap(self.y)
return
# transform self.x into a numpy.ndarray object.
x = np.asarray(self.x)
# careful! only works with monotonic sequences.
temp_index = x.argmin()
for interval in intervals:
inf = interval.inf
sup = interval.sup
# find the indices of the y array corresponding to inf and sup.
inf_index = abs(x - inf).argmin()
sup_index = abs(x - sup).argmin()
# optimize the interval.
indices = Interval(inf_index, sup_index)
new_indices = self._optimize_interval(indices, phase_limits)
# unwrap the data before the interval.
indices_to_unwrap = Interval(temp_index, new_indices.inf)
self._unwrap_interval(indices_to_unwrap, deg, lower=True, upper=False)
# set the interval to zero.
indices_to_set = new_indices
self._set_interval_to_zero(indices_to_set, lower=True, upper=False)
temp_index = new_indices.sup
# careful! only works with monotonic sequences.
indices_to_unwrap = Interval(temp_index, x.argmax())
self._unwrap_interval(indices_to_unwrap, deg, lower=True, upper=True)
def add_xy_point(self, x_point, y_point):
"""
Adds an x-y point.
:param x_point: x coordinate.
:param y_point: y coordinate.
"""
self.x.append(x_point)
self.y.append(y_point)
def add_plot_info(self, name, info):
"""
Adds a plot info.
:param name: Name of the info.
:param info: The info.
"""
self._plot_info[name] = info
def plot_info(self):
"""
Returns the plot info copy.
:return: The plot info.
"""
return copy.deepcopy(self._plot_info)
|
import sys
sys.path.append("/vagrant/ctf-infrastructure/api")
import api
from datetime import datetime, timedelta
settings = api.config.get_settings()
settings["start_time"] = datetime.now()
settings["end_time"] = settings["start_time"] + timedelta(weeks=1)
api.config.change_settings(settings)
print("Started the competition")
|
import gzip
import io
import os
from datetime import datetime
from azure.storage.blob import BlockBlobService
class BlobHelper:
def __init__(self, blob=None):
account_name = os.environ["AzureStorageAccountName"]
account_key = os.environ["AzureStorageAccountKey"]
self.blob_service = BlockBlobService(
account_name=account_name, account_key=account_key
)
self.blob = blob
def create_output_blob(self, destination_container_name):
source_url = os.environ["StorageUrl"] + self.blob.name
destination_blob_name = self.get_destination_blob_name()
self.blob_service.copy_blob(
container_name=destination_container_name,
blob_name=destination_blob_name,
copy_source=source_url,
)
def get_destination_blob_name(self):
blob_filename = self.blob.name.split("/")[1]
datetime_str = datetime.today().strftime("%Y%m%d-%H%M%S")
return f"{datetime_str}-{blob_filename}"
def get_str_file(self, storage_container_name, storage_blob_name):
compressed_file = io.BytesIO()
self.blob_service.get_blob_to_stream(
storage_container_name,
storage_blob_name,
compressed_file,
max_connections=1
)
compressed_file.seek(0)
compressed_gzip = gzip.GzipFile(fileobj=compressed_file)
decompressed_file = compressed_gzip.read()
compressed_file.close()
compressed_gzip.close()
file_string = decompressed_file.decode("utf-8-sig")
return file_string
def write_stream_file(self, storage_container_name, storage_blob_name, encoded_file):
self.blob_service.create_blob_from_bytes(
storage_container_name,
storage_blob_name,
encoded_file,
max_connections=1
)
|
primeiro = int(input('Digite o primeiro Termo: '))
razao = int(input('Digite a razรฃo: '))
c = 0
termos = 10
while c < termos:
print(primeiro)
primeiro += razao
c += 1
if c == termos:
termos = int(input('Quantos tempos deseja mostrar Mais: '))
c = 0
print('PROGRAMA ENCERRADO')
|
from markdown import markdown
import os
def render_markdown(file_name, dir_path = 'app/templates'):
"""Takes the specified file path and
returns it as HTML
"""
html = ""
#os.path.join creates an OS-valid path
path = os.path.join(dir_path, file_name)
with open(path) as html_file:
html = html_file.read()
html = markdown(html)
return html
def render_my_html(file_name,dir_path = 'app/templates'):
html = ""
path = os.path.join(dir_path, file_name)
with open(path) as html_file:
html = html_file.read()
return html |
import os
import time
import datetime
import math
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from collections import namedtuple
from more_itertools import windowed
import dill as pickle
import json
from tqdm import tqdm
import torch
import torch.nn.functional as F
from utils.parameters import *
# Transition object
from utils.torch_utils import ExpertTransition
plt.style.use('ggplot')
class Logger(object):
'''
Logger for train/test runs.
Args:
- log_dir: Directory to write log
- num_envs: Number of environments running concurrently
'''
def __init__(self, log_dir, env, mode, num_envs, max_train_step, gamma, log_dir_sub=None):
# Logging variables
self.env = env
self.mode = mode
self.max_train_step = max_train_step
self.num_envs = num_envs
self.gamma = gamma
# Create directory in the logging directory
timestamp = time.time()
timestamp = datetime.datetime.fromtimestamp(timestamp)
if not log_dir_sub:
self.base_dir = os.path.join(log_dir, '{}_{}_{}'.format(self.mode, self.env, timestamp.strftime('%Y-%m-%d.%H:%M:%S')))
else:
self.base_dir = os.path.join(log_dir, log_dir_sub)
print('Creating logging session at: {}'.format(self.base_dir))
# Create subdirs to save important run info
self.info_dir = os.path.join(self.base_dir, 'info')
self.models_dir = os.path.join(self.base_dir, 'models')
self.checkpoint_dir = os.path.join(self.base_dir, 'checkpoint')
os.makedirs(self.info_dir)
os.makedirs(self.models_dir)
os.makedirs(self.checkpoint_dir)
# Variables to hold episode information
# self.episode_rewards = np.zeros(self.num_envs)
self.episode_rewards = [[] for _ in range(self.num_envs)]
self.num_steps = 0
self.num_training_steps = 0
self.num_episodes = 0
self.rewards = list()
self.losses = list()
self.steps_left = list()
self.td_errors = list()
self.expert_samples = list()
self.eval_rewards = list()
# Buffer of transitions
self.transitions = list()
def stepBookkeeping(self, rewards, step_lefts, done_masks):
for i, r in enumerate(rewards.reshape(-1)):
self.episode_rewards[i].append(r)
# self.episode_rewards += rewards.squeeze()
self.num_episodes += int(np.sum(done_masks))
for i, d in enumerate(done_masks.astype(bool)):
if d:
R = 0
for r in reversed(self.episode_rewards[i]):
R = r + self.gamma * R
self.rewards.append(R)
self.episode_rewards[i] = []
# self.rewards.extend(self.episode_rewards[done_masks.astype(bool)])
self.steps_left.extend(step_lefts[done_masks.astype(bool)])
# self.episode_rewards[done_masks.astype(bool)] = 0.
def trainingBookkeeping(self, loss, td_error):
self.losses.append(loss)
self.td_errors.append(td_error)
def tdErrorBookkeeping(self, td_error):
self.td_errors.append(td_error)
def close(self):
''' Close the logger and save the logging information '''
# self.saveLearningCurve()
# self.saveLossCurve()
self.saveRewards()
self.saveLosses()
self.saveTdErrors()
def getCurrentAvgReward(self, n=100, starting=0):
''' Get the average reward for the last n episodes '''
if not self.rewards:
return 0.0
starting = max(starting, len(self.rewards)-n)
return np.mean(self.rewards[starting:])
# return np.mean(self.rewards[-n:]) if self.rewards else 0.0
def getCurrentLoss(self):
''' Get the most recent loss. '''
if not self.losses:
return 0.0
current_loss = self.losses[-1]
if type(current_loss) is float:
return current_loss
else:
return np.mean(current_loss)
def saveLearningCurve(self, n=100):
''' Plot the rewards over timesteps and save to logging dir '''
n = min(n, len(self.rewards))
if n > 0:
avg_reward = np.mean(list(windowed(self.rewards, n)), axis=1)
xs = np.arange(n, (len(avg_reward))+n)
plt.plot(xs, np.mean(list(windowed(self.rewards, n)), axis=1))
plt.savefig(os.path.join(self.info_dir, 'learning_curve.pdf'))
plt.close()
def saveStepLeftCurve(self, n=100):
n = min(n, len(self.steps_left))
if n > 0:
plt.plot(np.mean(list(windowed(self.steps_left, n)), axis=1))
plt.savefig(os.path.join(self.info_dir, 'steps_left_curve.pdf'))
plt.close()
def saveLossCurve(self, n=100):
losses = np.array(self.losses)
if len(losses) < n:
return
if len(losses.shape) == 1:
losses = np.expand_dims(losses, 0)
else:
losses = np.moveaxis(losses, 1, 0)
for loss in losses:
plt.plot(np.mean(list(windowed(loss, n)), axis=1))
plt.savefig(os.path.join(self.info_dir, 'loss_curve.pdf'))
plt.yscale('log')
plt.savefig(os.path.join(self.info_dir, 'loss_curve_log.pdf'))
plt.close()
def saveTdErrorCurve(self, n=100):
n = min(n, len(self.td_errors))
if n > 0:
plt.plot(np.mean(list(windowed(self.td_errors, n)), axis=1))
plt.yscale('log')
plt.savefig(os.path.join(self.info_dir, 'td_error_curve.pdf'))
plt.close()
def saveEvalCurve(self):
if len(self.eval_rewards) > 0:
xs = np.arange(eval_freq, (len(self.eval_rewards)+1) * eval_freq, eval_freq)
plt.plot(xs, self.eval_rewards)
plt.savefig(os.path.join(self.info_dir, 'eval_curve.pdf'))
plt.close()
def saveModel(self, iteration, name, agent):
'''
Save PyTorch model to log directory
Args:
- iteration: Interation of the current run
- name: Name to save model as
- agent: Agent containing model to save
'''
agent.saveModel(os.path.join(self.models_dir, 'snapshot_{}'.format(name)))
def saveRewards(self):
np.save(os.path.join(self.info_dir, 'rewards.npy'), self.rewards)
def saveLosses(self):
np.save(os.path.join(self.info_dir, 'losses.npy'), self.losses)
def saveTdErrors(self):
np.save(os.path.join(self.info_dir, 'td_errors.npy'), self.td_errors)
def saveCandidateSchedule(self, schedule):
np.save(os.path.join(self.info_dir, 'schedule.npy'), schedule)
def saveEvalRewards(self):
np.save(os.path.join(self.info_dir, 'eval_rewards.npy'), self.eval_rewards)
def saveParameters(self, parameters):
class NumpyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.ndarray):
return obj.tolist()
return json.JSONEncoder.default(self, obj)
with open(os.path.join(self.info_dir, "parameters.json"), 'w') as f:
json.dump(parameters, f, cls=NumpyEncoder)
def saveBuffer(self, buffer):
print('saving buffer')
torch.save(buffer.getSaveState(), os.path.join(self.checkpoint_dir, 'buffer.pt'))
def loadBuffer(self, buffer, path, max_n=1000000):
print('loading buffer: '+path)
load = torch.load(path)
if not no_bar:
loop = tqdm(range(len(load['storage'])))
else:
loop = range(len(load['storage']))
for i in loop:
if i == max_n:
break
t = load['storage'][i]
buffer.add(t)
def saveCheckPoint(self, args, envs, agent, buffer):
envs_save_path = os.path.join(self.checkpoint_dir, 'envs')
envs.saveToFile(envs_save_path)
checkpoint = {
'args': args.__dict__,
'agent': agent.getSaveState(),
'buffer_state': buffer.getSaveState(),
'logger':{
'env': self.env,
'num_envs': self.num_envs,
'max_train_step': self.max_train_step,
'episode_rewards': self.episode_rewards,
'num_steps': self.num_steps,
'num_training_steps': self.num_training_steps,
'num_episodes': self.num_episodes,
'rewards': self.rewards,
'losses': self.losses,
'steps_left': self.steps_left,
'td_errors': self.td_errors,
'expert_samples': self.expert_samples,
'eval_rewards': self.eval_rewards,
},
'torch_rng_state': torch.get_rng_state(),
'torch_cuda_rng_state': torch.cuda.get_rng_state(),
'np_rng_state': np.random.get_state()
}
if hasattr(agent, 'his'):
checkpoint.update({'agent_his': agent.his})
torch.save(checkpoint, os.path.join(self.checkpoint_dir, 'checkpoint.pt'))
def loadCheckPoint(self, checkpoint_dir, envs, agent, buffer):
print('loading checkpoint')
checkpoint = torch.load(os.path.join(checkpoint_dir, 'checkpoint.pt'))
print('agent loading')
agent.loadFromState(checkpoint['agent'])
print('buffer loading')
buffer.loadFromState(checkpoint['buffer_state'])
print('logger loading')
self.env = checkpoint['logger']['env']
self.num_envs = checkpoint['logger']['num_envs']
# self.max_episode = checkpoint['logger']['max_episode']
self.episode_rewards = checkpoint['logger']['episode_rewards']
self.num_steps = checkpoint['logger']['num_steps']
self.num_training_steps = checkpoint['logger']['num_training_steps']
self.num_episodes = checkpoint['logger']['num_episodes']
self.rewards = checkpoint['logger']['rewards']
self.losses = checkpoint['logger']['losses']
self.steps_left = checkpoint['logger']['steps_left']
self.td_errors =checkpoint['logger']['td_errors']
self.expert_samples = checkpoint['logger']['expert_samples']
self.eval_rewards = checkpoint['logger']['eval_rewards']
torch.set_rng_state(checkpoint['torch_rng_state'])
torch.cuda.set_rng_state(checkpoint['torch_cuda_rng_state'])
np.random.set_state(checkpoint['np_rng_state'])
if hasattr(agent, 'his'):
agent.his = checkpoint['agent_his']
# envs_save_path = os.path.join(checkpoint_dir, 'envs')
# success = envs.loadFromFile(envs_save_path)
# if not success:
# raise EnvironmentError
print('loaded checkpoint')
def expertSampleBookkeeping(self, expert_ratio):
self.expert_samples.append(expert_ratio)
def saveExpertSampleCurve(self, n=100):
n = min(n, len(self.expert_samples))
if n > 0:
plt.plot(np.mean(list(windowed(self.expert_samples, n)), axis=1))
plt.savefig(os.path.join(self.info_dir, 'expert_sample_curve.pdf'))
plt.close()
def saveResult(self):
result_dir = os.path.join(self.base_dir, 'result')
os.makedirs(result_dir)
np.save(os.path.join(result_dir, 'rewards.npy'), self.rewards)
np.save(os.path.join(result_dir, 'losses.npy'), self.losses)
np.save(os.path.join(result_dir, 'td_errors.npy'), self.td_errors)
np.save(os.path.join(result_dir, 'eval_rewards.npy'), self.eval_rewards)
|
from transformer import cli
if __name__ == '__main__':
cli.main_group()
|
# Copyright 2014 hm authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
from hm import config
_managers = {}
_expected_methods = ['create_load_balancer', 'destroy_load_balancer', 'attach_real', 'detach_real']
def register(name, cls):
for m in _expected_methods:
if not getattr(cls, m, None):
raise InvalidLBManager("Expected method '{}' not found in {}".format(m, cls))
_managers[name] = cls
def by_name(name, conf=None):
return _managers[name](conf)
class BaseLBManager(object):
def __init__(self, conf):
self.config = conf or {}
def get_conf(self, name, default=config.undefined):
return config.get_config(name, default, self.config)
class LBConfig(object):
environment_p44 = None
client = None
finality = None
healthcheck = None
healthcheck_expect = None
cache = None
lb_method = None
persistence = None
maxconn = None
def __init__(self, **kwargs):
for k, v in kwargs.items():
self.__setattr__(k, v)
class InvalidLBManager(Exception):
pass
|
from .db import db
class User_Group(db.Model):
__tablename__ = 'users_groups'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False)
groups_id = db.Column(db.Integer, db.ForeignKey(
"groups.id"), nullable=False)
def to_dict(self):
return{
"id": self.id,
"user_id": self.user_id,
"groups_id": self.groups_id
}
|
print(input("Enter string to be reversed: ")[::-1])
|
import unittest
from monocliche.src.Board import Board
from monocliche.src.Game import Game
from monocliche.src.enum.GameStatus import GameStatus
class GameTest(unittest.TestCase):
def test_add_player(self):
game = Game()
player = game.add_player("Name")
self.assertEqual("Name", player.name)
self.assertEqual(1, game.players.size)
game.players = None
player = game.add_player("Name")
self.assertIsNotNone(game.players)
self.assertEqual("Name", player.name)
self.assertEqual(1, game.players.size)
player2 = game.add_player("Two")
self.assertEqual("Two", player2.name)
self.assertEqual(2, game.players.size)
game.status = GameStatus.RUNNING
self.assertRaises(Exception, game.add_player, "Test")
game.status = GameStatus.COMPLETED
self.assertRaises(Exception, game.add_player, "Test")
def test_remove_player(self):
game = Game()
player = game.add_player("test")
game.status = GameStatus.RUNNING
self.assertRaises(Exception, game.remove_player, player)
game.status = GameStatus.NEW
game.remove_player(player)
self.assertEqual(0, game.players.size)
self.assertRaises(Exception, game.remove_player)
def test_passes_turn(self):
game = Game()
player1 = game.add_player("One")
player2 = game.add_player("Two")
# Necessary to set the starting player, this would be done when the game was started
game.players.next_player()
player_turn = game.passes_turn()
self.assertEqual(player_turn, player2)
self.assertIs(True, player2.my_turn)
player_turn = game.passes_turn()
self.assertEqual(player_turn, player1)
self.assertIs(False, player2.my_turn)
self.assertIs(True, player1.my_turn)
pass
def test_start_game(self):
game = Game()
player = game.add_player("One")
self.assertIsNone(game.players.current_player)
game.start_game()
self.assertIsNotNone(game.players.current_player)
self.assertEqual(player, game.players.current_player)
self.assertEqual(GameStatus.RUNNING, game.status)
def test_update_game_status(self):
game = Game()
game.update_game_status(GameStatus.RUNNING)
self.assertEqual(GameStatus.RUNNING, game.status)
game.update_game_status(GameStatus.COMPLETED)
self.assertEqual(GameStatus.COMPLETED, game.status)
def test_end_game(self):
game = Game()
game.end_game()
self.assertEqual(GameStatus.COMPLETED, game.status)
def test_check_game_is_over(self):
game = Game()
self.assertFalse(game.check_game_is_over())
game.status = GameStatus.COMPLETED
self.assertTrue(game.check_game_is_over())
def test_extract_non_bankrupt_player(self):
game = Game()
player1 = game.add_player("One")
player2 = game.add_player("Two")
player1.bankrupt = True
self.assertEqual(player2, game.players.extract_non_bankrupt_player())
def test_complete_match(self):
game = Game()
player1 = game.add_player("One")
player2 = game.add_player("Two")
player1.budget = 100
player2.budget = 50
self.assertEqual(player1, game.complete_match())
self.assertEqual(GameStatus.COMPLETED, game.status)
def test_roll_dice(self):
game = Game()
game.board = Board()
game.board.initialize_board()
player1 = game.add_player("One")
# Initialize the first player.
game.players.next_player()
game.roll_dice()
self.assertEqual(player1.position, game.dice_roll_result.dice_result)
player1.position = 0
# Forces the inside counter to have 3 so that on the next roll the player has to go to jail.
game.dice_roll_result.update_dice_result(1, 1)
game.dice_roll_result.update_dice_result(1, 1)
game.dice_roll_result.update_dice_result(1, 1)
game.roll_dice()
self.assertTrue(player1.in_jail)
if __name__ == '__main__':
unittest.main()
|
"""
Dask.DataFrame IO.
"""
from typing import Any, Callable, Dict, Iterable, Optional, Union
import dask
import dask.bag as db
import dask.dataframe as dd
import simplekv
from dask.delayed import Delayed
from kartothek.api.discover import discover_datasets_unchecked
from kartothek.core.cube.cube import Cube
from kartothek.core.docs import default_docs
from kartothek.io.dask.common_cube import (
append_to_cube_from_bag_internal,
extend_cube_from_bag_internal,
query_cube_bag_internal,
)
from kartothek.io.dask.dataframe import store_dataset_from_ddf
from kartothek.io_components.cube.common import check_store_factory
from kartothek.io_components.cube.write import (
apply_postwrite_checks,
assert_dimesion_index_cols_notnull,
check_datasets_prebuild,
check_provided_metadata_dict,
check_user_df,
prepare_ktk_metadata,
prepare_ktk_partition_on,
)
__all__ = (
"append_to_cube_from_dataframe",
"build_cube_from_dataframe",
"extend_cube_from_dataframe",
"query_cube_dataframe",
)
@default_docs
def build_cube_from_dataframe(
data: Union[dd.DataFrame, Dict[str, dd.DataFrame]],
cube: Cube,
store: Callable[[], simplekv.KeyValueStore],
metadata: Optional[Dict[str, Dict[str, Any]]] = None,
overwrite: bool = False,
partition_on: Optional[Dict[str, Iterable[str]]] = None,
shuffle: bool = False,
num_buckets: int = 1,
bucket_by: Optional[Iterable[str]] = None,
) -> Delayed:
"""
Create dask computation graph that builds a cube with the data supplied from a dask dataframe.
Parameters
----------
data:
Data that should be written to the cube. If only a single dataframe is given, it is assumed to be the seed
dataset.
cube:
Cube specification.
store:
Store to which the data should be written to.
metadata:
Metadata for every dataset.
overwrite:
If possibly existing datasets should be overwritten.
partition_on:
Optional parition-on attributes for datasets (dictionary mapping :term:`Dataset ID` -> columns).
See :ref:`Dimensionality and Partitioning Details` for details.
Returns
-------
metadata_dict:
A dask delayed object containing the compute graph to build a cube returning the dict of dataset metadata
objects.
"""
check_store_factory(store)
if not isinstance(data, dict):
data = {cube.seed_dataset: data}
ktk_cube_dataset_ids = sorted(data.keys())
metadata = check_provided_metadata_dict(metadata, ktk_cube_dataset_ids)
existing_datasets = discover_datasets_unchecked(cube.uuid_prefix, store)
check_datasets_prebuild(ktk_cube_dataset_ids, cube, existing_datasets)
partition_on_checked = prepare_ktk_partition_on(
cube, ktk_cube_dataset_ids, partition_on
)
del partition_on
dct = {}
for table_name, ddf in data.items():
check_user_df(table_name, ddf, cube, set(), partition_on_checked[table_name])
indices_to_build = set(cube.index_columns) & set(ddf.columns)
if table_name == cube.seed_dataset:
indices_to_build |= set(cube.dimension_columns) - cube.suppress_index_on
indices_to_build -= set(partition_on_checked[table_name])
ddf = ddf.map_partitions(
assert_dimesion_index_cols_notnull,
ktk_cube_dataset_id=table_name,
cube=cube,
partition_on=partition_on_checked[table_name],
meta=ddf._meta,
)
graph = store_dataset_from_ddf(
ddf,
dataset_uuid=cube.ktk_dataset_uuid(table_name),
store=store,
metadata=prepare_ktk_metadata(cube, table_name, metadata),
partition_on=partition_on_checked[table_name],
secondary_indices=sorted(indices_to_build),
sort_partitions_by=sorted(
(set(cube.dimension_columns) - set(cube.partition_columns))
& set(ddf.columns)
),
overwrite=overwrite,
shuffle=shuffle,
num_buckets=num_buckets,
bucket_by=bucket_by,
)
dct[table_name] = graph
return dask.delayed(apply_postwrite_checks)(
dct, cube=cube, store=store, existing_datasets=existing_datasets
)
def extend_cube_from_dataframe(
data, cube, store, metadata=None, overwrite=False, partition_on=None
):
"""
Create dask computation graph that extends a cube by the data supplied from a dask dataframe.
For details on ``data`` and ``metadata``, see :meth:`build_cube`.
Parameters
----------
data: Union[dask.DataFrame, Dict[str, dask.DataFrame]
Data that should be written to the cube. If only a single dataframe is given, it is assumed to be the seed
dataset.
cube: kartothek.core.cube.cube.Cube
Cube specification.
store: simplekv.KeyValueStore
Store to which the data should be written to.
metadata: Optional[Dict[str, Dict[str, Any]]]
Metadata for every dataset.
overwrite: bool
If possibly existing datasets should be overwritten.
partition_on: Optional[Dict[str, Iterable[str]]]
Optional parition-on attributes for datasets (dictionary mapping :term:`Dataset ID` -> columns).
See :ref:`Dimensionality and Partitioning Details` for details.
Returns
-------
metadata_dict: dask.bag.Bag
A dask bag object containing the compute graph to extend a cube returning the dict of dataset metadata objects.
The bag has a single partition with a single element.
"""
data, ktk_cube_dataset_ids = _ddfs_to_bag(data, cube)
return (
extend_cube_from_bag_internal(
data=data,
cube=cube,
store=store,
ktk_cube_dataset_ids=ktk_cube_dataset_ids,
metadata=metadata,
overwrite=overwrite,
partition_on=partition_on,
)
.map_partitions(_unpack_list, default=None)
.to_delayed()[0]
)
def query_cube_dataframe(
cube,
store,
conditions=None,
datasets=None,
dimension_columns=None,
partition_by=None,
payload_columns=None,
):
"""
Query cube.
For detailed documentation, see :meth:`query_cube`.
.. important::
In contrast to other backends, the Dask DataFrame may contain partitions with empty DataFrames!
Parameters
----------
cube: Cube
Cube specification.
store: simplekv.KeyValueStore
KV store that preserves the cube.
conditions: Union[None, Condition, Iterable[Condition], Conjunction]
Conditions that should be applied, optional.
datasets: Union[None, Iterable[str], Dict[str, kartothek.core.dataset.DatasetMetadata]]
Datasets to query, must all be part of the cube. May be either the result of :meth:`discover_datasets`, a list
of Ktk_cube dataset ID or ``None`` (in which case auto-discovery will be used).
dimension_columns: Union[None, str, Iterable[str]]
Dimension columns of the query, may result in projection. If not provided, dimension columns from cube
specification will be used.
partition_by: Union[None, str, Iterable[str]]
By which column logical partitions should be formed. If not provided, a single partition will be generated.
payload_columns: Union[None, str, Iterable[str]]
Which columns apart from ``dimension_columns`` and ``partition_by`` should be returned.
Returns
-------
ddf: dask.dataframe.DataFrame
Dask DataFrame, partitioned and order by ``partition_by``. Column of DataFrames is alphabetically ordered. Data
types are provided on best effort (they are restored based on the preserved data, but may be different due to
Pandas NULL-handling, e.g. integer columns may be floats).
"""
empty, b = query_cube_bag_internal(
cube=cube,
store=store,
conditions=conditions,
datasets=datasets,
dimension_columns=dimension_columns,
partition_by=partition_by,
payload_columns=payload_columns,
blocksize=1,
)
dfs = b.map_partitions(_unpack_list, default=empty).to_delayed()
return dd.from_delayed(
dfs=dfs, meta=empty, divisions=None # TODO: figure out an API to support this
)
def append_to_cube_from_dataframe(data, cube, store, metadata=None):
"""
Append data to existing cube.
For details on ``data`` and ``metadata``, see :meth:`build_cube`.
.. important::
Physical partitions must be updated as a whole. If only single rows within a physical partition are updated, the
old data is treated as "removed".
.. hint::
To have better control over the overwrite "mask" (i.e. which partitions are overwritten), you should use
:meth:`remove_partitions` beforehand.
Parameters
----------
data: dask.Bag
Bag containing dataframes
cube: kartothek.core.cube.cube.Cube
Cube specification.
store: simplekv.KeyValueStore
Store to which the data should be written to.
metadata: Optional[Dict[str, Dict[str, Any]]]
Metadata for every dataset, optional. For every dataset, only given keys are updated/replaced. Deletion of
metadata keys is not possible.
Returns
-------
metadata_dict: dask.bag.Bag
A dask bag object containing the compute graph to append to the cube returning the dict of dataset metadata
objects. The bag has a single partition with a single element.
"""
data, ktk_cube_dataset_ids = _ddfs_to_bag(data, cube)
return (
append_to_cube_from_bag_internal(
data=data,
cube=cube,
store=store,
ktk_cube_dataset_ids=ktk_cube_dataset_ids,
metadata=metadata,
)
.map_partitions(_unpack_list, default=None)
.to_delayed()[0]
)
def _ddfs_to_bag(data, cube):
if not isinstance(data, dict):
data = {cube.seed_dataset: data}
ktk_cube_dataset_ids = sorted(data.keys())
bags = []
for ktk_cube_dataset_id in ktk_cube_dataset_ids:
bags.append(
db.from_delayed(data[ktk_cube_dataset_id].to_delayed()).map_partitions(
_convert_write_bag, ktk_cube_dataset_id=ktk_cube_dataset_id
)
)
return (db.concat(bags), ktk_cube_dataset_ids)
def _unpack_list(l, default): # noqa
l = list(l) # noqa
if l:
return l[0]
else:
return default
def _convert_write_bag(df, ktk_cube_dataset_id):
return [{ktk_cube_dataset_id: df}]
|
import spacy
from benepar.spacy_plugin import BeneparComponent
nlp = spacy.load('en')
nlp.add_pipe(BeneparComponent("benepar_en2"))
doc = nlp(u"The time for action is now. It's never too late to do something.")
sent = list(doc.sents)[0]
print(sent._.parse_string)
print(list(sent._.children)[0])
|
# Generated by Django 2.2.4 on 2019-10-11 14:27
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [("iaso", "0008_auto_20191010_1147")]
operations = [
migrations.CreateModel(
name="Group",
fields=[
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("name", models.TextField()),
("source_ref", models.TextField(blank=True, null=True)),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
("org_units", models.ManyToManyField(blank=True, to="iaso.OrgUnit")),
(
"sourceVersion",
models.ForeignKey(
blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="iaso.SourceVersion"
),
),
],
)
]
|
#!/usr/bin/python3.6
import pandas as pd
import matplotlib.pyplot as plt
from collections import Counter
from debug import dprint
df = pd.read_csv('../input/train.csv')
cls_counts = Counter(cls for classes in df['attribute_ids'].str.split() for cls in classes)
labels = pd.read_csv('../input/labels.csv')
labels = labels.to_dict()['attribute_name']
counts = {labels[int(id)]:count for (id, count) in cls_counts.items()}
# dprint(counts)
# plt.plot(sorted(counts.values()))
# plt.show()
dprint(len([cnt for id,cnt in counts.items() if cnt <= 100]))
dprint(len([cnt for id,cnt in counts.items() if cnt <= 50]))
dprint(len([cnt for id,cnt in counts.items() if cnt <= 30]))
dprint(len([cnt for id,cnt in counts.items() if cnt <= 20]))
dprint(len([cnt for id,cnt in counts.items() if cnt <= 10]))
dprint(len([cnt for id,cnt in counts.items() if cnt <= 5]))
dprint(len([cnt for id,cnt in counts.items() if cnt <= 3]))
# plt.plot(sorted([cnt for id,cnt in counts.items() if cnt < 100])); plt.show()
|
import json
import os
from os.path import dirname
class SubdivisionParser:
def __init__(self):
json = self._get_json()
temp_multi_words_dict = {
"united states": "US",
"us of america": "USA",
"u s a": "USA",
"u . s . a": "USA",
}
# self.sorted_multi_keys = sorted(self.multi_words_dict.keys())
# self.sorted_single_keys = sorted(self.divisions_dict.keys())
self.divisions_dict = {
"usa": "USA",
"u_s_a": "USA",
"u-s-a": "USA"
}
for key in json:
country = key.split('-')[0]
state = key.split('-')[1]
lower_state = state.lower()
full_name = json[key].lower()
self.divisions_dict[key.lower()] = state
self.divisions_dict[country.lower() + '_' + lower_state] = state
if ' ' in full_name:
temp_multi_words_dict[full_name] = state
self.divisions_dict[full_name.replace(' ', '-')] = state
else:
self.divisions_dict[full_name] = state
self.multi_words_dict = {}
for k in temp_multi_words_dict.keys():
value = temp_multi_words_dict[k]
splitted = k.split(' ')
first_word = splitted[0]
if first_word not in self.multi_words_dict:
self.multi_words_dict[first_word] = []
self.multi_words_dict[splitted[0]].append((splitted, value))
def _get_json(self):
path = r'resources/parser/subdivisions_US.json'
with open(path) as f:
return json.load(f)
def parse_word(self, lower_word, current_index, words):
if lower_word in self.divisions_dict:
return True, current_index, self.divisions_dict[lower_word]
if lower_word in self.multi_words_dict:
words = [word.lower() for word in words]
for tuple in self.multi_words_dict[lower_word]:
if tuple[0] == words[current_index:current_index + len(tuple[0])]:
return True, current_index + len(tuple[0]) - 1, tuple[1]
return False, current_index, None
|
#------------------------------------------------------------------------------
# Copyright (c) 2011, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
from .wx_test_assistant import WXTestAssistant, skip_nonwindows
from .. import html
@skip_nonwindows
class TestWxHtml(WXTestAssistant, html.TestHtml):
""" WXHtml tests. """
def get_source(self, widget):
""" Get the source of an Html widget.
"""
return widget.ToText()
|
import json
import re
import uuid
from typing import Optional
from django.apps import apps
from django.contrib.sites.shortcuts import get_current_site
from django.db import models, transaction
from django.db.models import Count, QuerySet
from django.template import Context, Template
from django.urls import reverse
from django.utils import timezone
from django.utils.crypto import get_random_string
from django.utils.translation import gettext, gettext_lazy as _
from bs4 import BeautifulSoup
from colossus.apps.lists.models import MailingList
from colossus.apps.subscribers.constants import ActivityTypes
from colossus.apps.templates.models import EmailTemplate
from colossus.apps.templates.utils import get_template_blocks
from .constants import CampaignStatus, CampaignTypes
from .tasks import send_campaign_task, update_rates_after_campaign_deletion
class Campaign(models.Model):
uuid = models.UUIDField(default=uuid.uuid4, unique=True, editable=False)
name = models.CharField(_('name'), max_length=100)
campaign_type = models.PositiveSmallIntegerField(
_('type'),
choices=CampaignTypes.CHOICES,
default=CampaignTypes.REGULAR
)
mailing_list = models.ForeignKey(
MailingList,
on_delete=models.CASCADE,
verbose_name=_('mailing list'),
related_name='campaigns',
null=True,
blank=True
)
tag = models.ForeignKey(
'subscribers.Tag',
on_delete=models.SET_NULL,
verbose_name=_('tag'),
related_name='campaigns',
null=True,
blank=True
)
status = models.PositiveSmallIntegerField(
_('status'),
choices=CampaignStatus.CHOICES,
default=CampaignStatus.DRAFT,
db_index=True
)
send_date = models.DateTimeField(_('send date'), null=True, blank=True, db_index=True)
create_date = models.DateTimeField(_('create date'), auto_now_add=True)
update_date = models.DateTimeField(_('update date'), default=timezone.now)
recipients_count = models.PositiveIntegerField(default=0)
track_opens = models.BooleanField(_('track opens'), default=True)
track_clicks = models.BooleanField(_('track clicks'), default=True)
unique_opens_count = models.PositiveIntegerField(_('unique opens'), default=0, editable=False)
total_opens_count = models.PositiveIntegerField(_('total opens'), default=0, editable=False)
unique_clicks_count = models.PositiveIntegerField(_('unique clicks'), default=0, editable=False)
total_clicks_count = models.PositiveIntegerField(_('total clicks'), default=0, editable=False)
open_rate = models.FloatField(_('opens'), default=0.0, editable=False)
click_rate = models.FloatField(_('clicks'), default=0.0, editable=False)
__cached_email = None
class Meta:
verbose_name = _('campaign')
verbose_name_plural = _('campaigns')
db_table = 'colossus_campaigns'
def __str__(self):
return self.name
def get_absolute_url(self) -> str:
if self.can_edit:
return reverse('campaigns:campaign_edit', kwargs={'pk': self.pk})
elif self.is_scheduled:
return reverse('campaigns:campaign_scheduled', kwargs={'pk': self.pk})
return reverse('campaigns:campaign_detail', kwargs={'pk': self.pk})
def delete(self, using=None, keep_parents=False):
super().delete(using, keep_parents)
update_rates_after_campaign_deletion.delay(self.mailing_list_id)
@property
def is_scheduled(self) -> bool:
return self.status == CampaignStatus.SCHEDULED
@property
def can_edit(self) -> bool:
return self.status == CampaignStatus.DRAFT
@property
def can_send(self) -> bool:
for email in self.emails.all():
if not email.can_send:
return False
else:
return True
@property
def email(self) -> Optional['Email']:
if not self.__cached_email and self.campaign_type == CampaignTypes.REGULAR:
try:
self.__cached_email, created = Email.objects.get_or_create(campaign=self)
except Email.MultipleObjectsReturned:
self.__cached_email = self.emails.order_by('id').first()
return self.__cached_email
def get_recipients(self):
queryset = self.mailing_list.get_active_subscribers()
if self.tag is not None:
queryset = queryset.filter(tags=self.tag)
return queryset
def send(self):
with transaction.atomic():
self.recipients_count = self.get_recipients().count()
self.send_date = timezone.now()
self.status = CampaignStatus.QUEUED
for email in self.emails.select_related('template').all():
if email.template is not None:
email.template.last_used_date = timezone.now()
email.template.last_used_campaign_id = self.pk
email.template.save()
self.save()
send_campaign_task.delay(self.pk)
@transaction.atomic
def replicate(self):
copy = gettext(' (copy)')
slice_at = 100 - len(copy)
name = '%s%s' % (self.name[:slice_at], copy)
replicated_campaign = Campaign.objects.create(
name=name,
campaign_type=self.campaign_type,
mailing_list=self.mailing_list,
status=CampaignStatus.DRAFT,
)
replicated_emails = list()
for email in self.emails.all():
replicated_email = Email(
campaign=replicated_campaign,
template=email.template,
template_content=email.template_content,
from_email=email.from_email,
from_name=email.from_name,
subject=email.subject,
preview=email.preview,
content=email.content,
content_html=email.content_html,
content_text=email.content_text
)
replicated_emails.append(replicated_email)
Email.objects.bulk_create(replicated_emails)
return replicated_campaign
def update_clicks_count_and_rate(self) -> tuple:
Activity = apps.get_model('subscribers', 'Activity')
qs = Activity.objects.filter(email__campaign=self, activity_type=ActivityTypes.CLICKED) \
.values('subscriber_id') \
.order_by('subscriber_id') \
.aggregate(unique_count=Count('subscriber_id', distinct=True), total_count=Count('subscriber_id'))
self.unique_clicks_count = qs['unique_count']
self.total_clicks_count = qs['total_count']
try:
self.click_rate = self.unique_clicks_count / self.recipients_count
except ZeroDivisionError:
self.click_rate = 0.0
self.save(update_fields=['unique_clicks_count', 'total_clicks_count', 'click_rate'])
return (self.unique_clicks_count, self.total_clicks_count, self.click_rate)
def update_opens_count_and_rate(self) -> tuple:
Activity = apps.get_model('subscribers', 'Activity')
qs = Activity.objects.filter(email__campaign=self, activity_type=ActivityTypes.OPENED) \
.values('subscriber_id') \
.order_by('subscriber_id') \
.aggregate(unique_count=Count('subscriber_id', distinct=True), total_count=Count('subscriber_id'))
self.unique_opens_count = qs['unique_count']
self.total_opens_count = qs['total_count']
try:
self.open_rate = self.unique_opens_count / self.recipients_count
except ZeroDivisionError:
self.open_rate = 0.0
self.save(update_fields=['unique_opens_count', 'total_opens_count', 'open_rate'])
return (self.unique_opens_count, self.total_opens_count, self.open_rate)
def get_links(self) -> QuerySet:
"""
A method to list campaign's links
:return: All links associated with the campaign, ordered by the total number of clicks
"""
links = Link.objects.filter(email__campaign=self).order_by('-total_clicks_count')
return links
class Email(models.Model):
uuid = models.UUIDField(default=uuid.uuid4, unique=True, editable=False)
campaign = models.ForeignKey(Campaign, on_delete=models.CASCADE, verbose_name=_('campaign'), related_name='emails')
template = models.ForeignKey(
EmailTemplate,
on_delete=models.SET_NULL,
verbose_name=_('email template'),
related_name='emails',
null=True,
blank=True
)
template_content = models.TextField(_('email template content'), blank=True)
from_email = models.EmailField(_('email address'))
from_name = models.CharField(_('name'), max_length=100, blank=True)
subject = models.CharField(_('subject'), max_length=150)
preview = models.CharField(_('preview'), max_length=150, blank=True)
content = models.TextField(_('content'), blank=True)
content_html = models.TextField(_('content HTML'), blank=True)
content_text = models.TextField(_('content plain text'), blank=True)
unique_opens_count = models.PositiveIntegerField(_('unique opens'), default=0, editable=False)
total_opens_count = models.PositiveIntegerField(_('total opens'), default=0, editable=False)
unique_clicks_count = models.PositiveIntegerField(_('unique clicks'), default=0, editable=False)
total_clicks_count = models.PositiveIntegerField(_('total clicks'), default=0, editable=False)
__blocks = None
__base_template = None
__child_template_string = None
BASE_TEMPLATE_VAR = 'base_template'
class Meta:
verbose_name = _('email')
verbose_name_plural = _('emails')
db_table = 'colossus_emails'
def __str__(self):
return self.subject
@property
def base_template(self) -> Template:
if self.__base_template is None:
self.__base_template = Template(self.template_content)
return self.__base_template
@property
def child_template_string(self) -> str:
if self.__child_template_string is None:
self.__child_template_string = self.build_child_template_string()
return self.__child_template_string
def set_template_content(self):
if self.template is None:
self.template_content = EmailTemplate.objects.default_content()
else:
self.template_content = self.template.content
def get_from(self) -> str:
if self.from_name:
return '%s <%s>' % (self.from_name, self.from_email)
return self.from_email
def get_base_template(self) -> Template:
"""
Retuns a Django template using `template_content` field.
Fallback to default basic template defined by EmailTemplate.
"""
if self.template_content:
template = Template(self.template_content)
else:
template_string = EmailTemplate.objects.default_content()
template = Template(template_string)
return template
def set_blocks(self, blocks=None):
if blocks is None:
old_blocks = self.get_blocks()
blocks = dict()
template = self.get_base_template()
template_blocks = get_template_blocks(template)
for block_name, block_content in template_blocks.items():
inherited_content = block_content
if block_name in old_blocks.keys():
old_block_content = old_blocks.get(block_name, '').strip()
if old_block_content:
inherited_content = old_blocks[block_name]
blocks[block_name] = inherited_content
self.content = json.dumps(blocks)
self.__blocks = blocks
def load_blocks(self) -> dict:
try:
blocks = json.loads(self.content)
except (TypeError, json.JSONDecodeError):
blocks = {'content': ''}
return blocks
def get_blocks(self) -> dict:
if self.__blocks is None:
self.__blocks = self.load_blocks()
return self.__blocks
def checklist(self) -> dict:
_checklist = {
'recipients': False,
'from': False,
'subject': False,
'content': False,
'unsub': False
}
if self.campaign.mailing_list is not None and self.campaign.mailing_list.get_active_subscribers().exists():
_checklist['recipients'] = True
if self.from_email:
_checklist['from'] = True
if self.subject:
_checklist['subject'] = True
if self.content:
_checklist['content'] = True
# Generate a random string and pass it to the render function
# as if it was the unsubscribe url. If we can find this token in the
# rendered template, we can say the unsubscribe url will be rendered.
# Not 100% guranteed, as the end user can still bypass it (e.g.
# changing visibility with html).
token = get_random_string(50)
rendered_template = self.render({'unsub': token})
_checklist['unsub'] = token in rendered_template
return _checklist
@property
def can_send(self) -> bool:
checklist = self.checklist()
for value in checklist.values():
if not value:
return False
else:
return True
def build_child_template_string(self) -> str:
"""
Build a valid Django template string with `extends` block tag
on top and representation of each content blocks, constructed
from the JSON object.
"""
virtual_template = ['{%% extends %s %%}' % self.BASE_TEMPLATE_VAR, ]
blocks = self.get_blocks()
for block_key, block_content in blocks.items():
if block_content:
virtual_template.append('{%% block %s %%}\n%s\n{%% endblock %%}' % (block_key, block_content))
return '\n\n'.join(virtual_template)
def _render(self, template_string, context_dict) -> str:
template = Template(template_string)
context = Context(context_dict)
return template.render(context)
def render(self, context_dict) -> str:
context_dict.update({self.BASE_TEMPLATE_VAR: self.base_template})
return self._render(self.child_template_string, context_dict)
def _enable_click_tracking(self, html, index=0):
urls = re.findall(r'(?i)(href=["\']?)(https?://[^"\' >]+)', html)
for data in urls:
href = data[0]
url = data[1]
link, created = Link.objects.get_or_create(email=self, url=url, index=index)
current_site = get_current_site(request=None)
protocol = 'http'
domain = current_site.domain
# We cannot use django.urls.reverse here because part of the kwargs
# will be processed during the sending campaign (including the `subscriber_uuid`)
# With the `{{ uuid }}` we are introducing an extra django template variable
# which will be later used to replace with the subscriber's uuid.
track_url = '%s://%s/track/click/%s/{{uuid}}/' % (protocol, domain, link.uuid)
html = html.replace('%s%s' % (href, url), '%s%s' % (href, track_url), 1)
index += 1
return html, index
def enable_click_tracking(self):
self.template_content, index = self._enable_click_tracking(self.template_content)
blocks = self.get_blocks()
for key, html in blocks.items():
blocks[key], index = self._enable_click_tracking(html, index)
self.set_blocks(blocks)
def enable_open_tracking(self):
current_site = get_current_site(request=None)
protocol = 'http'
domain = current_site.domain
track_url = '%s://%s/track/open/%s/{{uuid}}/' % (protocol, domain, self.uuid)
soup = BeautifulSoup(self.template_content, 'html.parser')
img_tag = soup.new_tag('img', src=track_url, height='1', width='1')
body = soup.find('body')
if body is not None:
body.append(img_tag)
self.template_content = str(soup)
else:
self.template_content = '%s %s' % (self.template_content, img_tag)
def update_clicks_count(self) -> tuple:
qs = self.activities.filter(activity_type=ActivityTypes.CLICKED) \
.values('subscriber_id') \
.order_by('subscriber_id') \
.aggregate(unique_count=Count('subscriber_id', distinct=True), total_count=Count('subscriber_id'))
self.unique_clicks_count = qs['unique_count']
self.total_clicks_count = qs['total_count']
self.save(update_fields=['unique_clicks_count', 'total_clicks_count'])
return (self.unique_clicks_count, self.total_clicks_count)
def update_opens_count(self) -> tuple:
qs = self.activities.filter(activity_type=ActivityTypes.OPENED) \
.values('subscriber_id') \
.order_by('subscriber_id') \
.aggregate(unique_count=Count('subscriber_id', distinct=True), total_count=Count('subscriber_id'))
self.unique_opens_count = qs['unique_count']
self.total_opens_count = qs['total_count']
self.save(update_fields=['unique_opens_count', 'total_opens_count'])
return (self.unique_opens_count, self.total_opens_count)
class Link(models.Model):
uuid = models.UUIDField(default=uuid.uuid4, unique=True, editable=False)
email = models.ForeignKey(
Email,
on_delete=models.SET_NULL,
null=True,
related_name='links',
verbose_name=_('email')
)
url = models.URLField(_('URL'), max_length=2048)
unique_clicks_count = models.PositiveIntegerField(_('unique clicks count'), default=0, editable=False)
total_clicks_count = models.PositiveIntegerField(_('total clicks count'), default=0, editable=False)
index = models.PositiveSmallIntegerField(_('index'), default=0)
class Meta:
verbose_name = _('link')
verbose_name_plural = _('links')
db_table = 'colossus_links'
def __str__(self) -> str:
return self.url
def delete(self, using=None, keep_parents=False):
"""
Prevent links from being deleted after they are sent. Otherwise it may
cause broken links in the emails.
"""
if self.can_delete:
return super().delete(using, keep_parents)
@property
def can_delete(self) -> bool:
"""
Determines if the link can be deleted or not. First check if the email
field is null. It should never be null unless the campaign was deleted
by the user and all relationship cascaded. Except for the link as it
should set to null. In that case, assume that the email/campaign was
already sent, as the links are created during the sending process.
The other case is when email is not null, so we can check the status of
the campaign.
:return: True if it's safe to delete the link, False otherwise.
"""
if self.email is None or self.email.campaign.status != CampaignStatus.DRAFT:
return False
return True
@property
def short_uuid(self) -> str:
"""
A short identifier to be used in the links reports.
:return: The first eight characters of the link UUID.
"""
return str(self.uuid)[:8]
def update_clicks_count(self) -> tuple:
"""
Query the database and update the link click statistics based on
subscribers activities.
:return: A tuple containing two values: unique clicks and total clicks
"""
qs = self.activities.values('subscriber_id').order_by('subscriber_id').aggregate(
unique_count=Count('subscriber_id', distinct=True),
total_count=Count('subscriber_id')
)
self.unique_clicks_count = qs['unique_count']
self.total_clicks_count = qs['total_count']
self.save(update_fields=['unique_clicks_count', 'total_clicks_count'])
return (self.unique_clicks_count, self.total_clicks_count)
|
from time import ctime
from SocketServer import (TCPServer as TCP, StreamRequestHandler as SRH)
host = ""
port = 4134
addrs = (host, port)
class MyRequestHandler(SRH):
def handle(self):
print "connected from "+str(self.client_address)
self.wfile.write("["+ctime()+"] "+str(self.rfile.readline()))
ts = TCP(addrs, MyRequestHandler)
print "waiting for incoming connection...."
ts.serve_forever()
|
from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.1.0',
description='To gather historical and ongoing rainfall data to use in the prediction of Australian wheat crop production.',
author='Rodney J Baker',
license='MIT',
)
|
"""Test creation of tensor datasets.
Test target:
- :py:meth:`lmp.util.dset.FastTensorDset`.
- :py:meth:`lmp.util.dset.SlowTensorDset`.
"""
import torch
import lmp.util.dset
from lmp.dset import WikiText2Dset
from lmp.tknzr import WsTknzr
def test_fast_tensor_dset(max_seq_len: int) -> None:
"""Load dataset in memory and convert to tensor."""
tknzr = WsTknzr(is_uncased=True, max_vocab=-1, min_count=10)
tknzr.build_vocab(batch_txt=['a', 'b', 'c'])
wiki_dset = WikiText2Dset(ver='valid')
dset = lmp.util.dset.FastTensorDset(dset=wiki_dset, max_seq_len=max_seq_len, tknzr=tknzr)
assert isinstance(dset, lmp.util.dset.FastTensorDset)
assert len(dset) == len(wiki_dset)
for idx, tkids in enumerate(dset):
assert isinstance(tkids, torch.Tensor), 'Each sample in the tensor dataset must be tensor.'
assert tkids.size() == torch.Size([max_seq_len]), 'Each sample in the tensor dataset must have same length.'
assert torch.all(dset[idx] == tkids), 'Support ``__getitem__`` and ``__iter__``.'
def test_slow_tensor_dset(max_seq_len: int) -> None:
"""Load dataset and convert to tensor on the fly."""
tknzr = WsTknzr(is_uncased=True, max_vocab=-1, min_count=10)
tknzr.build_vocab(batch_txt=['a', 'b', 'c'])
wiki_dset = WikiText2Dset(ver='valid')
dset = lmp.util.dset.SlowTensorDset(dset=wiki_dset, max_seq_len=max_seq_len, tknzr=tknzr)
assert isinstance(dset, lmp.util.dset.SlowTensorDset)
assert len(dset) == len(wiki_dset)
for idx, tkids in enumerate(dset):
assert isinstance(tkids, torch.Tensor), 'Each sample in the tensor dataset must be tensor.'
assert tkids.size() == torch.Size([max_seq_len]), 'Each sample in the tensor dataset must have same length.'
assert torch.all(dset[idx] == tkids), 'Support ``__getitem__`` and ``__iter__``.'
|
#!/usr/bin/python
"""Generate some amusing data."""
# It is your choice for the type of data you would like to send. It could
# be bitcoin prices at a given time, it could be the CPU utilization on the
# server at a given time, it could be temperature in your city at a given
# time. Keep in mind that this data will need to keep running for a few
# days. It is up to you where you want to get this data, whether you read
# from a csv, whether you make requests to another service to get this
# data. It doesn't matter to us, but we will judge your creativity in this
# process.
# This code runs on 2.x and 3.x
# We really only need 2.x, but my tools are configured to use 3.x now.
from __future__ import print_function
import os
import time
import pprint
import itertools
import port
# It might be nice to do Redhat stock price data. However, I haven't yet
# had much luck finding a module or REST interface that still works as
# described. This might be a reasonable approximation, though it seems to
# update only seldom, if at all:
#
# $ curl 'http://download.finance.yahoo.com/d/quotes.csv?s=rht&f=price'
# below cmd output started 2017 Sat Oct 07 08:22:50 AM PDT
# 115.52,70.05,N/A,"+1.54 - +1.33%",1.67
# above cmd output done 2017 Sat Oct 07 08:22:50 AM PDT
def timestamps_forever():
"""Generate timestamps forever."""
while True:
current_time = int(time.time())
yield current_time
def perfect_squares_forever():
"""
Generate perfect squares forever.
We take advantage of the fact that perfect squares increase by an odd
integer each time to avoid multiplying large numbers.
"""
square = 1
adjustment = 3
while True:
yield square
square += adjustment
adjustment += 2
def lcg_random_numbers_forever(seed=None):
"""
Generate random numbers forever, using the constants from glibc.
The algorithm used is Linear Congruential.
"""
modulus = pow(2, 31) - 1
multiplier = 1103515245
increment = 12345
if seed is None:
# This is not a strong seed, but it's good enough for our purposes
seed = int(time.time() * os.getpid()) % modulus
while True:
seed = (multiplier * seed + increment) % modulus
yield seed
def amusing_data():
"""
Generate amusing data (strings) forever.
The amusing data is comprised of:
1) A datum number.
2) A timestamp: seconds since the epoch And a human-readable time.
3) Perfect squares that grow forever.
4) Random numbers that go on forever with a decent-but-not-awesome period.
"""
for tuple_ in port.ZIPPER(
itertools.count(1),
timestamps_forever(),
perfect_squares_forever(),
lcg_random_numbers_forever(),
):
result_dict = {}
(
result_dict['datumno'],
result_dict['timestamp'],
result_dict['perfect-square'],
result_dict['random-number'],
) = tuple_
yield result_dict
def main():
"""Main function."""
# top = 1000000
top = 10
# Print the first "top" values from amusing_data
for row in port.ZIPPER(range(top), amusing_data()):
pprint.pprint((top - row[0], row[1]))
if __name__ == '__main__':
main()
|
""" Bivariate operators"""
from .base import *
from .activation import BoundSqrt, BoundReciprocal
class BoundMul(Bound):
def __init__(self, input_name, name, ori_name, attr, inputs, output_index, options, device):
super().__init__(input_name, name, ori_name, attr, inputs, output_index, options, device)
self.nonlinear = True
@Bound.save_io_shape
def forward(self, x, y):
self.x_shape = x.shape
self.y_shape = y.shape
return x * y
@staticmethod
def get_bound_mul(x_l, x_u, y_l, y_u):
alpha_l = y_l
beta_l = x_l
gamma_l = -alpha_l * beta_l
alpha_u = y_u
beta_u = x_l
gamma_u = -alpha_u * beta_u
return alpha_l, beta_l, gamma_l, alpha_u, beta_u, gamma_u
# Special case when input is x * x.
@staticmethod
def get_bound_square(x_l, x_u):
# Lower bound is a z=0 line if x_l and x_u have different signs.
# Otherwise, the lower bound is a tangent line at x_l.
# The lower bound should always be better than IBP.
# If both x_l and x_u < 0, select x_u. If both > 0, select x_l.
# If x_l < 0 and x_u > 0, we use the z=0 line as the lower bound.
x_m = F.relu(x_l) - F.relu(-x_u)
alpha_l = 2 * x_m
gamma_l = - x_m * x_m
# Upper bound: connect the two points (x_l, x_l^2) and (x_u, x_u^2).
# The upper bound should always be better than IBP.
alpha_u = x_l + x_u
gamma_u = - x_l * x_u
# Parameters before the second variable are all zeros, not used.
beta_l = torch.zeros_like(x_l)
beta_u = beta_l
return alpha_l, beta_l, gamma_l, alpha_u, beta_u, gamma_u
@staticmethod
def _relax(x, y):
if x is y:
# A shortcut for x * x.
return BoundMul.get_bound_square(x.lower, x.upper)
x_l, x_u = x.lower, x.upper
y_l, y_u = y.lower, y.upper
# broadcast
for k in [1, -1]:
x_l = x_l + k * y_l
x_u = x_u + k * y_u
for k in [1, -1]:
y_l = y_l + k * x_l
y_u = y_u + k * x_u
return BoundMul.get_bound_mul(x_l, x_u, y_l, y_u)
def bound_backward(self, last_lA, last_uA, x, y):
alpha_l, beta_l, gamma_l, alpha_u, beta_u, gamma_u = BoundMul._relax(x, y)
alpha_l, alpha_u = alpha_l.unsqueeze(0), alpha_u.unsqueeze(0)
beta_l, beta_u = beta_l.unsqueeze(0), beta_u.unsqueeze(0)
def _bound_oneside(last_A,
alpha_pos, beta_pos, gamma_pos,
alpha_neg, beta_neg, gamma_neg):
if last_A is None:
return None, None, 0
last_A_pos, last_A_neg = last_A.clamp(min=0), last_A.clamp(max=0)
A_x = last_A_pos * alpha_pos + last_A_neg * alpha_neg
A_y = last_A_pos * beta_pos + last_A_neg * beta_neg
last_A = last_A.reshape(last_A.shape[0], last_A.shape[1], -1)
A_x = self.broadcast_backward(A_x, x)
A_y = self.broadcast_backward(A_y, y)
bias = self.get_bias(last_A_pos, gamma_pos) + \
self.get_bias(last_A_neg, gamma_neg)
return A_x, A_y, bias
lA_x, lA_y, lbias = _bound_oneside(
last_lA, alpha_l, beta_l, gamma_l, alpha_u, beta_u, gamma_u)
uA_x, uA_y, ubias = _bound_oneside(
last_uA, alpha_u, beta_u, gamma_u, alpha_l, beta_l, gamma_l)
return [(lA_x, uA_x), (lA_y, uA_y)], lbias, ubias
@staticmethod
def bound_forward(dim_in, x, y):
x_lw, x_lb, x_uw, x_ub = x.lw, x.lb, x.uw, x.ub
y_lw, y_lb, y_uw, y_ub = y.lw, y.lb, y.uw, y.ub
alpha_l, beta_l, gamma_l, alpha_u, beta_u, gamma_u = BoundMul._relax(x, y)
if x_lw is None: x_lw = 0
if y_lw is None: y_lw = 0
if x_uw is None: x_uw = 0
if y_uw is None: y_uw = 0
lw = alpha_l.unsqueeze(1).clamp(min=0) * x_lw + alpha_l.unsqueeze(1).clamp(max=0) * x_uw
lw = lw + beta_l.unsqueeze(1).clamp(min=0) * y_lw + beta_l.unsqueeze(1).clamp(max=0) * y_uw
lb = alpha_l.clamp(min=0) * x_lb + alpha_l.clamp(max=0) * x_ub + \
beta_l.clamp(min=0) * y_lb + beta_l.clamp(max=0) * y_ub + gamma_l
uw = alpha_u.unsqueeze(1).clamp(max=0) * x_lw + alpha_u.unsqueeze(1).clamp(min=0) * x_uw
uw = uw + beta_u.unsqueeze(1).clamp(max=0) * y_lw + beta_u.unsqueeze(1).clamp(min=0) * y_uw
ub = alpha_u.clamp(max=0) * x_lb + alpha_u.clamp(min=0) * x_ub + \
beta_u.clamp(max=0) * y_lb + beta_u.clamp(min=0) * y_ub + gamma_u
return LinearBound(lw, lb, uw, ub)
@staticmethod
def interval_propagate(*v):
x, y = v[0], v[1]
if x is y:
# A shortcut for x * x.
h_L, h_U = v[0]
r0 = h_L * h_L
r1 = h_U * h_U
# When h_L < 0, h_U > 0, lower bound is 0.
# When h_L < 0, h_U < 0, lower bound is h_U * h_U.
# When h_L > 0, h_U > 0, lower bound is h_L * h_L.
l = F.relu(h_L) - F.relu(-h_U)
return l * l, torch.max(r0, r1)
if Interval.use_relative_bounds(x) and Interval.use_relative_bounds(y):
nominal = x.nominal * y.nominal
lower_offset = (
x.nominal.clamp(min=0) * (y.lower_offset) +
x.nominal.clamp(max=0) * (y.upper_offset) +
y.nominal.clamp(min=0) * (x.lower_offset) +
y.nominal.clamp(max=0) * (x.upper_offset) +
torch.min(x.lower_offset * y.upper_offset, x.upper_offset * y.lower_offset))
upper_offset = (
x.nominal.clamp(min=0) * (y.upper_offset) +
x.nominal.clamp(max=0) * (y.lower_offset) +
y.nominal.clamp(min=0) * (x.upper_offset) +
y.nominal.clamp(max=0) * (x.lower_offset) +
torch.max(x.lower_offset * y.lower_offset, x.upper_offset * y.upper_offset))
return Interval(None, None, nominal=nominal, lower_offset=lower_offset, upper_offset=upper_offset)
r0, r1, r2, r3 = x[0] * y[0], x[0] * y[1], x[1] * y[0], x[1] * y[1]
lower = torch.min(torch.min(r0, r1), torch.min(r2, r3))
upper = torch.max(torch.max(r0, r1), torch.max(r2, r3))
return lower, upper
@staticmethod
def infer_batch_dim(batch_size, *x):
if x[0] == -1:
return x[1]
elif x[1] == -1:
return x[0]
else:
assert x[0] == x[1]
return x[0]
class BoundDiv(Bound):
def __init__(self, input_name, name, ori_name, attr, inputs, output_index, options, device):
super().__init__(input_name, name, ori_name, attr, inputs, output_index, options, device)
self.nonlinear = True
@Bound.save_io_shape
def forward(self, x, y):
# ad-hoc implementation for layer normalization
if isinstance(self.inputs[1], BoundSqrt):
input = self.inputs[0].inputs[0]
x = input.forward_value
n = input.forward_value.shape[-1]
dev = x * (1. - 1. / n) - (x.sum(dim=-1, keepdim=True) - x) / n
dev_sqr = dev ** 2
s = (dev_sqr.sum(dim=-1, keepdim=True) - dev_sqr) / dev_sqr.clamp(min=epsilon)
sqrt = torch.sqrt(1. / n * (s + 1))
return torch.sign(dev) * (1. / sqrt)
self.x, self.y = x, y
return x / y
def bound_backward(self, last_lA, last_uA, x, y):
reciprocal, mul, y_r = self._convert_to_mul(x, y)
A, lower_b, upper_b = mul.bound_backward(last_lA, last_uA, x, y_r)
A_y, lower_b_y, upper_b_y = reciprocal.bound_backward(A[1][0], A[1][1], y)
upper_b = upper_b + upper_b_y
lower_b = lower_b + lower_b_y
return [A[0], A_y[0]], lower_b, upper_b
def bound_forward(self, dim_in, x, y):
reciprocal, mul, y_r = self._convert_to_mul(x, y)
y_r_linear = reciprocal.bound_forward(dim_in, y)
y_r_linear = y_r_linear._replace(lower=y_r.lower, upper=y_r.upper)
return mul.bound_forward(dim_in, x, y_r_linear)
def interval_propagate(self, *v):
# ad-hoc implementation for layer normalization
"""
Compute bounds for layer normalization
Lower bound
1) (x_i - mu) can be negative
- 1 / ( sqrt (1/n * sum_j Lower{(x_j-mu)^2/(x_i-mu)^2} ))
2) (x_i - mu) cannot be negative
1 / ( sqrt (1/n * sum_j Upper{(x_j-mu)^2/(x_i-mu)^2} ))
Lower{(x_j-mu)^2/(x_i-mu)^2}
Lower{sum_j (x_j-mu)^2} / Upper{(x_i-mu)^2}
Upper{(x_j-mu)^2/(x_i-mu)^2}
Upper{sum_j (x_j-mu)^2} / Lower{(x_i-mu)^2}
"""
if isinstance(self.inputs[1], BoundSqrt):
input = self.inputs[0].inputs[0]
n = input.forward_value.shape[-1]
h_L, h_U = input.lower, input.upper
dev_lower = (
h_L * (1 - 1. / n) -
(h_U.sum(dim=-1, keepdim=True) - h_U) / n
)
dev_upper = (
h_U * (1 - 1. / n) -
(h_L.sum(dim=-1, keepdim=True) - h_L) / n
)
dev_sqr_lower = (1 - (dev_lower < 0).float() * (dev_upper > 0).float()) * \
torch.min(dev_lower.abs(), dev_upper.abs())**2
dev_sqr_upper = torch.max(dev_lower.abs(), dev_upper.abs())**2
sum_lower = (dev_sqr_lower.sum(dim=-1, keepdim=True) - dev_sqr_lower) / dev_sqr_upper.clamp(min=epsilon)
sqrt_lower = torch.sqrt(1. / n * (sum_lower + 1))
sum_upper = (dev_sqr_upper.sum(dim=-1, keepdim=True) - dev_sqr_upper) / \
dev_sqr_lower.clamp(min=epsilon)
sqrt_upper = torch.sqrt(1. / n * (sum_upper + 1))
lower = (dev_lower < 0).float() * (-1. / sqrt_lower) + (dev_lower > 0).float() * (1. / sqrt_upper)
upper = (dev_upper > 0).float() * (1. / sqrt_lower) + (dev_upper < 0).float() * (-1. / sqrt_upper)
return lower, upper
x, y = v[0], v[1]
assert (y[0] > 0).all()
return x[0] / y[1], x[1] / y[0]
def _convert_to_mul(self, x, y):
try:
reciprocal = BoundReciprocal(self.input_name, self.name + '/reciprocal', self.ori_name, {}, [], 0, None,
self.device)
mul = BoundMul(self.input_name, self.name + '/mul', self.ori_name, {}, [], 0, None, self.device)
except:
# to make it compatible with previous code
reciprocal = BoundReciprocal(self.input_name, self.name + '/reciprocal', None, {}, [], 0, None, self.device)
mul = BoundMul(self.input_name, self.name + '/mul', None, {}, [], 0, None, self.device)
reciprocal.output_shape = mul.output_shape = self.output_shape
reciprocal.batch_dim = mul.batch_dim = self.batch_dim
y_r = copy.copy(y)
if isinstance(y_r, LinearBound):
y_r = y_r._replace(lower=1. / y.upper, upper=1. / y.lower)
else:
y_r.lower = 1. / y.upper
y_r.upper = 1. / y.lower
return reciprocal, mul, y_r
def infer_batch_dim(self, batch_size, *x):
return BoundMul.infer_batch_dim(batch_size, *x)
class BoundAdd(Bound):
def __init__(self, input_name, name, ori_name, attr, inputs, output_index, options, device):
super().__init__(input_name, name, ori_name, attr, inputs, output_index, options, device)
self.mode = options.get("conv_mode", "matrix")
@Bound.save_io_shape
def forward(self, x, y):
self.x_shape = x.shape
self.y_shape = y.shape
return x + y
def bound_backward(self, last_lA, last_uA, x, y):
def _bound_oneside(last_A, w):
if last_A is None:
return None
return self.broadcast_backward(last_A, w)
uA_x = _bound_oneside(last_uA, x)
uA_y = _bound_oneside(last_uA, y)
lA_x = _bound_oneside(last_lA, x)
lA_y = _bound_oneside(last_lA, y)
return [(lA_x, uA_x), (lA_y, uA_y)], 0, 0
def bound_forward(self, dim_in, x, y):
x_lw, x_lb, x_uw, x_ub = Bound.broadcast_forward(dim_in, x, self.output_shape)
y_lw, y_lb, y_uw, y_ub = Bound.broadcast_forward(dim_in, y, self.output_shape)
lw, lb = x_lw + y_lw, x_lb + y_lb
uw, ub = x_uw + y_uw, x_ub + y_ub
return LinearBound(lw, lb, uw, ub)
def interval_propagate(self, x, y):
assert (not isinstance(y, torch.Tensor))
if Interval.use_relative_bounds(x) and Interval.use_relative_bounds(y):
return Interval(
None, None,
x.nominal + y.nominal,
x.lower_offset + y.lower_offset,
x.upper_offset + y.upper_offset)
return x[0] + y[0], x[1] + y[1]
def infer_batch_dim(self, batch_size, *x):
return BoundMul.infer_batch_dim(batch_size, *x)
class BoundSub(Bound):
def __init__(self, input_name, name, ori_name, attr, inputs, output_index, options, device):
super().__init__(input_name, name, ori_name, attr, inputs, output_index, options, device)
@Bound.save_io_shape
def forward(self, x, y):
self.x_shape = x.shape
self.y_shape = y.shape
return x - y
def bound_backward(self, last_lA, last_uA, x, y):
def _bound_oneside(last_A, w, sign=-1):
if last_A is None:
return None
return self.broadcast_backward(sign * last_A, w)
uA_x = _bound_oneside(last_uA, x, sign=1)
uA_y = _bound_oneside(last_uA, y, sign=-1)
lA_x = _bound_oneside(last_lA, x, sign=1)
lA_y = _bound_oneside(last_lA, y, sign=-1)
return [(lA_x, uA_x), (lA_y, uA_y)], 0, 0
def bound_forward(self, dim_in, x, y):
x_lw, x_lb, x_uw, x_ub = Bound.broadcast_forward(dim_in, x, self.output_shape)
y_lw, y_lb, y_uw, y_ub = Bound.broadcast_forward(dim_in, y, self.output_shape)
lw, lb = x_lw - y_uw, x_lb - y_ub
uw, ub = x_uw - y_lw, x_ub - y_lb
return LinearBound(lw, lb, uw, ub)
def interval_propagate(self, x, y):
if Interval.use_relative_bounds(x) and Interval.use_relative_bounds(y):
return Interval(
None, None,
x.nominal - y.nominal,
x.lower_offset - y.upper_offset,
x.upper_offset - y.lower_offset)
return x[0] - y[1], x[1] - y[0]
def infer_batch_dim(self, batch_size, *x):
return BoundMul.infer_batch_dim(batch_size, *x)
class BoundEqual(Bound):
def __init__(self, input_name, name, ori_name, attr, inputs, output_index, options, device):
super().__init__(input_name, name, ori_name, attr, inputs, output_index, options, device)
@Bound.save_io_shape
def forward(self, x, y):
return x == y
def infer_batch_dim(self, batch_size, *x):
return BoundMul.infer_batch_dim(batch_size, *x) |
# Copyright 2001 by Tarjei Mikkelsen. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""
This module provides code to convert Bio.Pathway.System objects into
a text file that can be used as input for the MetaTool program.
For more information on MetaTool, please refer to:
http://www2.bioinf.mdc-berlin.de/metabolic/metatool/
"""
from Bio.Pathway import Reaction
from Bio.Pathway import System
# NOTE: Change enzyme name creation to allow for a function to be passed
# in that can be applied to r.data to extract a name
def system_to_metatool(system, metext = [], metint = [], generate_names = 1):
"""Converts a Bio.Pathway.System object to a MetaTool input string.
Note that to be a valid input string, the enzyme names of the reactions
in the system must conform to the MetaTool requirements.
Enzyme names are automatically genrated from the catalys attribute of
each reaction using the following scheme:
enzyme_name = "_".join([str(x[0]) for x in r.catalysts])
If an enzyme name has already been used, a positive integer will be
appended to this name to meet the MetaTool input requirements. If this
behaviour is undesired, set the optional parameter 'generate_names' to
false. All enzyme names will the be 'E_x', where x is an unique integer.
The optional parameters metext and metint can be used to specify the
external and internal metabolites according to the following rules:
1. If metext is set, the species in it will be considered external.
All other species will be considered internal.
2. Otherwise, if metint is set, the species in it will be considered
internal. All other species will be considered external.
3. Otherwise, all species will be considered external.
If specified, metext and metint must not contains species that are not
contained in the input system.
"""
if not isinstance(system, System):
raise TypeError, "Input is not a System object"
# Build the ENZREV and ENZIRREV strings:
enzrev = []
enzirrev = []
enz_names = {}
enz_map = {}
for r in system.reactions():
# build an unique enzyme name
enz_name = ""
if r.catalysts and generate_names:
enz_name += "_".join([str(x[0]) for x in r.catalysts])
else:
enz_name += "E"
if enz_names.has_key(enz_name):
enz_names[enz_name] += 1
enz_name += "_" + str(enz_names[enz_name])
else:
enz_names[enz_name] = 0
# keep (name, reaction) pair for later
enz_map[enz_name] = r
# add to the corresponding list
if r.reversible:
enzrev.append(enz_name)
else:
enzirrev.append(enz_name)
# create the actual strings:
enzrev_str = "-ENZREV\n" + " ".join(enzrev) + "\n"
enzirrev_str = "-ENZIRREV\n" + " ".join(enzirrev) + "\n"
# Build the METINT and METEXT strings:
metabolites = system.species()
metint_str = "-METINT\n"
metext_str = "-METEXT\n"
if metext:
for m in metext:
if m in metabolites:
metabolites.remove(m)
else:
raise ValueError, "metext contains an unknown metabolite"
for m in metint:
if not m in metabolites:
raise ValueError, "metint contains an unknown metabolite"
metext_str += " ".join([str(m) for m in metext]) + "\n"
metint_str += " ".join([str(m) for m in metabolites]) + "\n"
elif metint:
for m in metint:
if m in metabolites:
metabolites.remove(m)
else:
raise ValueError, "metint contains an unknown metabolite"
for m in metext:
if not m in metabolites:
raise ValueError, "metext contains an unknown metabolite"
metint_str += " ".join([str(m) for m in metint]) + "\n"
metext_str += " ".join([str(m) for m in metabolites]) + "\n"
else:
metext_str += " ".join([str(m) for m in metabolites]) + "\n"
metint_str += "\n"
# Build the CAT string
cat_str = "-CAT\n"
for e in enz_map.keys():
r = enz_map[e]
cat_str += e + " : "
# change the reaction string rep to the MetaTool format
reaction_str = str(r)
reaction_str = reaction_str.replace("-->","=")
reaction_str = reaction_str.replace("<=>","=")
cat_str += reaction_str + " .\n"
# Return the complete MetaTool input string:
return enzrev_str + "\n" + \
enzirrev_str + "\n" + \
metint_str + "\n" + \
metext_str + "\n" + \
cat_str + "\n"
|
#
# PySNMP MIB module MBG-SNMP-XPT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/MBG-SNMP-XPT-MIB
# Produced by pysmi-0.3.4 at Wed May 1 14:10:49 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, SingleValueConstraint, ValueSizeConstraint, ConstraintsIntersection, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ValueRangeConstraint")
mbgSnmpRoot, = mibBuilder.importSymbols("MBG-SNMP-ROOT-MIB", "mbgSnmpRoot")
NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance")
IpAddress, MibIdentifier, Integer32, Bits, ModuleIdentity, NotificationType, iso, TimeTicks, ObjectIdentity, Counter64, Unsigned32, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "IpAddress", "MibIdentifier", "Integer32", "Bits", "ModuleIdentity", "NotificationType", "iso", "TimeTicks", "ObjectIdentity", "Counter64", "Unsigned32", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
mbgXPT = ModuleIdentity((1, 3, 6, 1, 4, 1, 5597, 10))
mbgXPT.setRevisions(('2012-01-25 00:00', '2006-01-20 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: mbgXPT.setRevisionsDescriptions(('Update to new format referencing MBG-SNMP-ROOT-MIB', 'Covering LAN-XPT and SCU-XPT modules from Meinberg',))
if mibBuilder.loadTexts: mbgXPT.setLastUpdated('201201250000Z')
if mibBuilder.loadTexts: mbgXPT.setOrganization('www.meinberg.de')
if mibBuilder.loadTexts: mbgXPT.setContactInfo('postal: Meinberg Funkuhren Auf der Landwehr 22 31812 Bad Pyrmont Germany email: info@meinberg.de')
if mibBuilder.loadTexts: mbgXPT.setDescription('Top-level infrastructure of the MBG-SNMP project enterprise MIB tree')
mbgGPSRefclock1 = MibIdentifier((1, 3, 6, 1, 4, 1, 5597, 10, 2))
mbgGPSRefclock2 = MibIdentifier((1, 3, 6, 1, 4, 1, 5597, 10, 3))
mbgSCU = MibIdentifier((1, 3, 6, 1, 4, 1, 5597, 10, 4))
mbgXPTTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5597, 10, 5))
mbgGPSRefclock1Type = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 2, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRefclock1Type.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRefclock1Type.setDescription('Type of clock')
mbgGPSRefclock1TypeVal = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 2, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRefclock1TypeVal.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRefclock1TypeVal.setDescription('Type of refclock as value')
mbgGPSRefclock1Mode = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 2, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRefclock1Mode.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRefclock1Mode.setDescription('current Mode of refclock')
mbgGPSRefclock1ModeVal = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 2, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRefclock1ModeVal.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRefclock1ModeVal.setDescription('current Mode of refclock as value')
mbgGPSRef1GpsState = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 2, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRef1GpsState.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRef1GpsState.setDescription('current State of GPS refclock ')
mbgGPSRef1GpsStateVal = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 2, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRef1GpsStateVal.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRef1GpsStateVal.setDescription('current State of GPS refclock as value')
mbgGPSRef1GpsPosition = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 2, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRef1GpsPosition.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRef1GpsPosition.setDescription('current Position of GPS refclock ')
mbgGPSRef1GpsSatellites = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 2, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRef1GpsSatellites.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRef1GpsSatellites.setDescription('current Satellites in view and good of GPS refclock ')
mbgGPSRef1GpsSatellitesGood = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 2, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRef1GpsSatellitesGood.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRef1GpsSatellitesGood.setDescription('current good Satellites of GPS refclock as value')
mbgGPSRef1GpsSatellitesInView = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 2, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRef1GpsSatellitesInView.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRef1GpsSatellitesInView.setDescription('current satellites in view of GPS refclock as value')
mbgGPSRefclock2Type = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 3, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRefclock2Type.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRefclock2Type.setDescription('Type of clock')
mbgGPSRefclock2TypeVal = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 3, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRefclock2TypeVal.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRefclock2TypeVal.setDescription('Type of refclock as value')
mbgGPSRefclock2Mode = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 3, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRefclock2Mode.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRefclock2Mode.setDescription('current Mode of refclock')
mbgGPSRefclock2ModeVal = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 3, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRefclock2ModeVal.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRefclock2ModeVal.setDescription('current Mode of refclock as value')
mbgGPSRef2GpsState = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 3, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRef2GpsState.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRef2GpsState.setDescription('current State of GPS refclock ')
mbgGPSRef2GpsStateVal = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 3, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRef2GpsStateVal.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRef2GpsStateVal.setDescription('current State of GPS refclock as value')
mbgGPSRef2GpsPosition = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 3, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRef2GpsPosition.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRef2GpsPosition.setDescription('current Position of GPS refclock ')
mbgGPSRef2GpsSatellites = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 3, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRef2GpsSatellites.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRef2GpsSatellites.setDescription('current Satellites in view and good of GPS refclock ')
mbgGPSRef2GpsSatellitesGood = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 3, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRef2GpsSatellitesGood.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRef2GpsSatellitesGood.setDescription('current good Satellites of GPS refclock as value')
mbgGPSRef2GpsSatellitesInView = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 3, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgGPSRef2GpsSatellitesInView.setStatus('current')
if mibBuilder.loadTexts: mbgGPSRef2GpsSatellitesInView.setDescription('current satellites in view of GPS refclock as value')
mbgSCUType = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 4, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgSCUType.setStatus('current')
if mibBuilder.loadTexts: mbgSCUType.setDescription('Type of clock')
mbgSCUTypeVal = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 4, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgSCUTypeVal.setStatus('current')
if mibBuilder.loadTexts: mbgSCUTypeVal.setDescription('Type of Switchcard as value')
mbgSCUMaster = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 4, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgSCUMaster.setStatus('current')
if mibBuilder.loadTexts: mbgSCUMaster.setDescription('current selected masterclock of switchcard')
mbgSCUMasterVal = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 4, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgSCUMasterVal.setStatus('current')
if mibBuilder.loadTexts: mbgSCUMasterVal.setDescription('current selected masterclock of switchcard as value')
mbgSCUMasterselect = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 4, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgSCUMasterselect.setStatus('current')
if mibBuilder.loadTexts: mbgSCUMasterselect.setDescription('current masterselect mode of GPS Switchcard ')
mbgSCUMasterselectVal = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 4, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgSCUMasterselectVal.setStatus('current')
if mibBuilder.loadTexts: mbgSCUMasterselectVal.setDescription('current masterselect mode of GPS switchcard as value')
mbgSCUTimeSync1 = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 4, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgSCUTimeSync1.setStatus('current')
if mibBuilder.loadTexts: mbgSCUTimeSync1.setDescription('current time sync status of clock 1')
mbgSCUTimeSync2 = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 4, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgSCUTimeSync2.setStatus('current')
if mibBuilder.loadTexts: mbgSCUTimeSync2.setDescription('current time sync status of clock 2')
mbgSCUTimelimitError = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 4, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgSCUTimelimitError.setStatus('current')
if mibBuilder.loadTexts: mbgSCUTimelimitError.setDescription('current state of time limit alarm (not used)')
mbgSCUDisableOutputs = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 4, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgSCUDisableOutputs.setStatus('current')
if mibBuilder.loadTexts: mbgSCUDisableOutputs.setDescription('current state of outputs (0=outputs disabled, 1=outputs enabled)')
mbgSCUSelectedInput = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 4, 11), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgSCUSelectedInput.setStatus('current')
if mibBuilder.loadTexts: mbgSCUSelectedInput.setDescription('current selected clock for status queries as a string')
mbgSCUSelectedInputVal = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 4, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgSCUSelectedInputVal.setStatus('current')
if mibBuilder.loadTexts: mbgSCUSelectedInputVal.setDescription('current selected clock for status queries as an integer')
mbgSCUACOMode = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 4, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgSCUACOMode.setStatus('current')
if mibBuilder.loadTexts: mbgSCUACOMode.setDescription('current state of ACO (access control override)')
mbgSCUPSUStatus = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 4, 14), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgSCUPSUStatus.setStatus('current')
if mibBuilder.loadTexts: mbgSCUPSUStatus.setDescription('current status of power supply units as a string')
mbgSCUPSU1Status = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 4, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgSCUPSU1Status.setStatus('current')
if mibBuilder.loadTexts: mbgSCUPSU1Status.setDescription('current status of power supply unit 1')
mbgSCUPSU2Status = MibScalar((1, 3, 6, 1, 4, 1, 5597, 10, 4, 16), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mbgSCUPSU2Status.setStatus('current')
if mibBuilder.loadTexts: mbgSCUPSU2Status.setDescription('current status of power supply unit 2')
mbgGPSTrapColdBoot = NotificationType((1, 3, 6, 1, 4, 1, 5597, 10, 5, 1))
if mibBuilder.loadTexts: mbgGPSTrapColdBoot.setStatus('current')
if mibBuilder.loadTexts: mbgGPSTrapColdBoot.setDescription('trap to be sent when Refclock is in Cold Boot mode')
mbgGPSTrapWarmBoot = NotificationType((1, 3, 6, 1, 4, 1, 5597, 10, 5, 2))
if mibBuilder.loadTexts: mbgGPSTrapWarmBoot.setStatus('current')
if mibBuilder.loadTexts: mbgGPSTrapWarmBoot.setDescription('trap to be sent when Refclock is in Warm Boot mode')
mbgGPSNavSolved = NotificationType((1, 3, 6, 1, 4, 1, 5597, 10, 5, 3))
if mibBuilder.loadTexts: mbgGPSNavSolved.setStatus('current')
if mibBuilder.loadTexts: mbgGPSNavSolved.setDescription('trap to be sent when Refclock calculated its actual position')
mbgGPSTrapReceiverNotResponding = NotificationType((1, 3, 6, 1, 4, 1, 5597, 10, 5, 4))
if mibBuilder.loadTexts: mbgGPSTrapReceiverNotResponding.setStatus('current')
if mibBuilder.loadTexts: mbgGPSTrapReceiverNotResponding.setDescription('trap to be sent when GPS receiver is not responding ')
mbgGPSTrapReceiverNotSync = NotificationType((1, 3, 6, 1, 4, 1, 5597, 10, 5, 5))
if mibBuilder.loadTexts: mbgGPSTrapReceiverNotSync.setStatus('current')
if mibBuilder.loadTexts: mbgGPSTrapReceiverNotSync.setDescription('trap to be sent when GPS receiver is not synchronised ')
mbgGPSTrapAntennaFaulty = NotificationType((1, 3, 6, 1, 4, 1, 5597, 10, 5, 6))
if mibBuilder.loadTexts: mbgGPSTrapAntennaFaulty.setStatus('current')
if mibBuilder.loadTexts: mbgGPSTrapAntennaFaulty.setDescription('trap to be sent when connection to antenna is broken ')
mbgGPSTrapAntennaReconnect = NotificationType((1, 3, 6, 1, 4, 1, 5597, 10, 5, 7))
if mibBuilder.loadTexts: mbgGPSTrapAntennaReconnect.setStatus('current')
if mibBuilder.loadTexts: mbgGPSTrapAntennaReconnect.setDescription('trap to be sent when antenna has been reconnected ')
mbgGPSTrapLANXPTBoot = NotificationType((1, 3, 6, 1, 4, 1, 5597, 10, 5, 8))
if mibBuilder.loadTexts: mbgGPSTrapLANXPTBoot.setStatus('current')
if mibBuilder.loadTexts: mbgGPSTrapLANXPTBoot.setDescription('trap to be sent when LANXPT has been rebooted')
mbgGPSTrapLeapSecondAnnounced = NotificationType((1, 3, 6, 1, 4, 1, 5597, 10, 5, 9))
if mibBuilder.loadTexts: mbgGPSTrapLeapSecondAnnounced.setStatus('current')
if mibBuilder.loadTexts: mbgGPSTrapLeapSecondAnnounced.setDescription('trap to be sent when a leap second has been announced ')
mbgGPSTrapMasterclockSwitchover = NotificationType((1, 3, 6, 1, 4, 1, 5597, 10, 5, 10))
if mibBuilder.loadTexts: mbgGPSTrapMasterclockSwitchover.setStatus('current')
if mibBuilder.loadTexts: mbgGPSTrapMasterclockSwitchover.setDescription('trap to be sent when masterclock changes ')
mbgGPSTrapPowerSupplyFailure = NotificationType((1, 3, 6, 1, 4, 1, 5597, 10, 5, 11))
if mibBuilder.loadTexts: mbgGPSTrapPowerSupplyFailure.setStatus('current')
if mibBuilder.loadTexts: mbgGPSTrapPowerSupplyFailure.setDescription('trap to be sent when a power supply unit fails')
mbgGPSTrapPowerSupplyOK = NotificationType((1, 3, 6, 1, 4, 1, 5597, 10, 5, 12))
if mibBuilder.loadTexts: mbgGPSTrapPowerSupplyOK.setStatus('current')
if mibBuilder.loadTexts: mbgGPSTrapPowerSupplyOK.setDescription('trap to be sent when a power supply unit restores operation')
mbgGPSTrapTestNotification = NotificationType((1, 3, 6, 1, 4, 1, 5597, 10, 5, 99))
if mibBuilder.loadTexts: mbgGPSTrapTestNotification.setStatus('current')
if mibBuilder.loadTexts: mbgGPSTrapTestNotification.setDescription('trap to be sent when a test notification has been requested ')
mbgXPTConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 5597, 10, 90))
mbgXPTCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 5597, 10, 90, 1))
mbgXPTGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 5597, 10, 90, 2))
mbgXPTCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 5597, 10, 90, 1, 1)).setObjects(("MBG-SNMP-XPT-MIB", "mbgXPTObjectsGroup"), ("MBG-SNMP-XPT-MIB", "mbgXPTTrapsGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
mbgXPTCompliance = mbgXPTCompliance.setStatus('current')
if mibBuilder.loadTexts: mbgXPTCompliance.setDescription('The compliance statement for SNMP entities which implement version 2 of the XPT MIB')
mbgXPTObjectsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 5597, 10, 90, 2, 1)).setObjects(("MBG-SNMP-XPT-MIB", "mbgGPSRefclock1Type"), ("MBG-SNMP-XPT-MIB", "mbgGPSRefclock1TypeVal"), ("MBG-SNMP-XPT-MIB", "mbgGPSRefclock1Mode"), ("MBG-SNMP-XPT-MIB", "mbgGPSRefclock1ModeVal"), ("MBG-SNMP-XPT-MIB", "mbgGPSRef1GpsState"), ("MBG-SNMP-XPT-MIB", "mbgGPSRef1GpsStateVal"), ("MBG-SNMP-XPT-MIB", "mbgGPSRef1GpsPosition"), ("MBG-SNMP-XPT-MIB", "mbgGPSRef1GpsSatellites"), ("MBG-SNMP-XPT-MIB", "mbgGPSRef1GpsSatellitesGood"), ("MBG-SNMP-XPT-MIB", "mbgGPSRef1GpsSatellitesInView"), ("MBG-SNMP-XPT-MIB", "mbgGPSRefclock2Type"), ("MBG-SNMP-XPT-MIB", "mbgGPSRefclock2TypeVal"), ("MBG-SNMP-XPT-MIB", "mbgGPSRefclock2Mode"), ("MBG-SNMP-XPT-MIB", "mbgGPSRefclock2ModeVal"), ("MBG-SNMP-XPT-MIB", "mbgGPSRef2GpsState"), ("MBG-SNMP-XPT-MIB", "mbgGPSRef2GpsStateVal"), ("MBG-SNMP-XPT-MIB", "mbgGPSRef2GpsPosition"), ("MBG-SNMP-XPT-MIB", "mbgGPSRef2GpsSatellites"), ("MBG-SNMP-XPT-MIB", "mbgGPSRef2GpsSatellitesGood"), ("MBG-SNMP-XPT-MIB", "mbgGPSRef2GpsSatellitesInView"), ("MBG-SNMP-XPT-MIB", "mbgSCUType"), ("MBG-SNMP-XPT-MIB", "mbgSCUTypeVal"), ("MBG-SNMP-XPT-MIB", "mbgSCUMaster"), ("MBG-SNMP-XPT-MIB", "mbgSCUMasterVal"), ("MBG-SNMP-XPT-MIB", "mbgSCUMasterselect"), ("MBG-SNMP-XPT-MIB", "mbgSCUMasterselectVal"), ("MBG-SNMP-XPT-MIB", "mbgSCUTimeSync1"), ("MBG-SNMP-XPT-MIB", "mbgSCUTimeSync2"), ("MBG-SNMP-XPT-MIB", "mbgSCUTimelimitError"), ("MBG-SNMP-XPT-MIB", "mbgSCUDisableOutputs"), ("MBG-SNMP-XPT-MIB", "mbgSCUSelectedInput"), ("MBG-SNMP-XPT-MIB", "mbgSCUSelectedInputVal"), ("MBG-SNMP-XPT-MIB", "mbgSCUACOMode"), ("MBG-SNMP-XPT-MIB", "mbgSCUPSUStatus"), ("MBG-SNMP-XPT-MIB", "mbgSCUPSU1Status"), ("MBG-SNMP-XPT-MIB", "mbgSCUPSU2Status"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
mbgXPTObjectsGroup = mbgXPTObjectsGroup.setStatus('current')
if mibBuilder.loadTexts: mbgXPTObjectsGroup.setDescription('The collection of objects for the MBG XPT MIB')
mbgXPTTrapsGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 5597, 10, 90, 2, 2)).setObjects(("MBG-SNMP-XPT-MIB", "mbgGPSTrapColdBoot"), ("MBG-SNMP-XPT-MIB", "mbgGPSTrapWarmBoot"), ("MBG-SNMP-XPT-MIB", "mbgGPSNavSolved"), ("MBG-SNMP-XPT-MIB", "mbgGPSTrapReceiverNotResponding"), ("MBG-SNMP-XPT-MIB", "mbgGPSTrapReceiverNotSync"), ("MBG-SNMP-XPT-MIB", "mbgGPSTrapAntennaFaulty"), ("MBG-SNMP-XPT-MIB", "mbgGPSTrapAntennaReconnect"), ("MBG-SNMP-XPT-MIB", "mbgGPSTrapLANXPTBoot"), ("MBG-SNMP-XPT-MIB", "mbgGPSTrapLeapSecondAnnounced"), ("MBG-SNMP-XPT-MIB", "mbgGPSTrapMasterclockSwitchover"), ("MBG-SNMP-XPT-MIB", "mbgGPSTrapPowerSupplyFailure"), ("MBG-SNMP-XPT-MIB", "mbgGPSTrapPowerSupplyOK"), ("MBG-SNMP-XPT-MIB", "mbgGPSTrapTestNotification"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
mbgXPTTrapsGroup = mbgXPTTrapsGroup.setStatus('current')
if mibBuilder.loadTexts: mbgXPTTrapsGroup.setDescription('The collection of traps for the MBG XPT MIB')
mibBuilder.exportSymbols("MBG-SNMP-XPT-MIB", mbgGPSRef1GpsSatellites=mbgGPSRef1GpsSatellites, mbgSCUTimelimitError=mbgSCUTimelimitError, mbgGPSTrapLANXPTBoot=mbgGPSTrapLANXPTBoot, mbgSCUMaster=mbgSCUMaster, mbgGPSRef1GpsPosition=mbgGPSRef1GpsPosition, mbgGPSTrapTestNotification=mbgGPSTrapTestNotification, mbgXPT=mbgXPT, mbgSCUPSU2Status=mbgSCUPSU2Status, mbgGPSTrapMasterclockSwitchover=mbgGPSTrapMasterclockSwitchover, mbgXPTTraps=mbgXPTTraps, mbgGPSTrapColdBoot=mbgGPSTrapColdBoot, mbgXPTObjectsGroup=mbgXPTObjectsGroup, mbgSCUPSU1Status=mbgSCUPSU1Status, mbgGPSRefclock1=mbgGPSRefclock1, mbgSCUTimeSync1=mbgSCUTimeSync1, mbgXPTConformance=mbgXPTConformance, mbgXPTGroups=mbgXPTGroups, mbgGPSRefclock2TypeVal=mbgGPSRefclock2TypeVal, mbgGPSTrapAntennaFaulty=mbgGPSTrapAntennaFaulty, mbgGPSRef1GpsState=mbgGPSRef1GpsState, mbgGPSRefclock2=mbgGPSRefclock2, mbgXPTCompliance=mbgXPTCompliance, mbgGPSTrapWarmBoot=mbgGPSTrapWarmBoot, mbgGPSRef2GpsStateVal=mbgGPSRef2GpsStateVal, mbgGPSRef2GpsSatellitesGood=mbgGPSRef2GpsSatellitesGood, mbgSCUPSUStatus=mbgSCUPSUStatus, mbgSCUMasterselectVal=mbgSCUMasterselectVal, mbgSCUTypeVal=mbgSCUTypeVal, mbgGPSRefclock2Type=mbgGPSRefclock2Type, mbgSCUMasterselect=mbgSCUMasterselect, mbgXPTCompliances=mbgXPTCompliances, mbgGPSTrapReceiverNotSync=mbgGPSTrapReceiverNotSync, mbgGPSRef1GpsSatellitesGood=mbgGPSRef1GpsSatellitesGood, mbgSCUTimeSync2=mbgSCUTimeSync2, mbgGPSRefclock2ModeVal=mbgGPSRefclock2ModeVal, mbgGPSRefclock1TypeVal=mbgGPSRefclock1TypeVal, mbgGPSTrapReceiverNotResponding=mbgGPSTrapReceiverNotResponding, mbgGPSTrapAntennaReconnect=mbgGPSTrapAntennaReconnect, mbgGPSRef2GpsState=mbgGPSRef2GpsState, mbgGPSRef2GpsSatellites=mbgGPSRef2GpsSatellites, mbgSCUACOMode=mbgSCUACOMode, mbgGPSRef2GpsPosition=mbgGPSRef2GpsPosition, mbgGPSRefclock1Type=mbgGPSRefclock1Type, mbgGPSNavSolved=mbgGPSNavSolved, mbgGPSTrapPowerSupplyFailure=mbgGPSTrapPowerSupplyFailure, mbgSCUSelectedInputVal=mbgSCUSelectedInputVal, mbgGPSRefclock1ModeVal=mbgGPSRefclock1ModeVal, mbgSCU=mbgSCU, mbgSCUType=mbgSCUType, mbgGPSRefclock2Mode=mbgGPSRefclock2Mode, mbgSCUMasterVal=mbgSCUMasterVal, mbgGPSTrapLeapSecondAnnounced=mbgGPSTrapLeapSecondAnnounced, mbgXPTTrapsGroup=mbgXPTTrapsGroup, PYSNMP_MODULE_ID=mbgXPT, mbgGPSRef1GpsSatellitesInView=mbgGPSRef1GpsSatellitesInView, mbgSCUDisableOutputs=mbgSCUDisableOutputs, mbgGPSTrapPowerSupplyOK=mbgGPSTrapPowerSupplyOK, mbgGPSRefclock1Mode=mbgGPSRefclock1Mode, mbgSCUSelectedInput=mbgSCUSelectedInput, mbgGPSRef2GpsSatellitesInView=mbgGPSRef2GpsSatellitesInView, mbgGPSRef1GpsStateVal=mbgGPSRef1GpsStateVal)
|
import itertools
import csv
import os
import requests
import click
from importers import CSVImportCommand
from utils import transform_polygons_to_multipolygon
import hmrc_addressbase
class CambridgeLandsImportCommand(CSVImportCommand):
def __init__(
self, file_names, api_url, token,
lr_api_url, lr_token,
voa_api_url, voa_token,
skip_header=True, encoding=None,
filter_uprn=None, vacant_csv_filename=None,
):
self.api_url = api_url
self.token = token
self.lr_api_url = lr_api_url
self.lr_token = lr_token
self.voa_api_url = voa_api_url
self.voa_token = voa_token
self.file_names = file_names
self.skip_header = skip_header
self.encoding = encoding
self.filter_uprn = filter_uprn
if vacant_csv_filename:
self.vacant_csv_filename = vacant_csv_filename
self.vacant_csv_file = open(vacant_csv_filename, 'w', newline='')
self.vacant_header = VACANT_CSV_HEADER
self.vacant_csv = csv.DictWriter(
self.vacant_csv_file, fieldnames=self.vacant_header,
dialect='excel')
self.vacant_csv.writeheader()
else:
self.vacant_csv = None
self.vacant_csv_file = None
assert self.lr_api_url.endswith('/api/')
def get_lr_data(self, uprn):
url = '{0}uprns/{1}/'.format(self.lr_api_url, uprn)
headers = {'Authorization': 'Token {0}'.format(self.lr_token)}
response = requests.get(url, headers=headers)
if response.status_code == 404:
return None
response.raise_for_status()
return response.json()
def get_lr_polygons_from_point(self, lat, long):
url = '{url}polygons-from-point?lat={lat}&long={long}'.format(
url=self.lr_api_url, lat=lat, long=long)
headers = {'Authorization': 'Token {0}'.format(self.lr_token)}
response = requests.get(url, headers=headers)
response.raise_for_status()
return response.json()
def get_voa_data(self, ba_ref):
url = '{0}{1}'.format(self.voa_api_url, ba_ref)
headers = {'Authorization': 'Token {0}'.format(self.voa_token)}
response = requests.get(url, headers=headers)
if response.status_code == 404:
return None
response.raise_for_status()
return response.json()
def process_row(self, row):
uprn = row[3]
if self.filter_uprn and uprn != self.filter_uprn:
return 'Filtered out by uprn'
if not (row[14] == 'EPRN' or row[14] == 'EPRI' or row[13] == 'VOID'):
return 'Ignore - not vacant'
def format_uprn_for_csv(uprn):
return "'{}".format(int(uprn)) if uprn else ''
def lookup_uprn_in_lr(uprn):
try:
lr_data = self.get_lr_data(uprn)
except Exception as e:
site_info['uprn_status'] = 'Error - contacting LR server'
raise FinishedProcessingRow(
'Error accessing LR server: {}'.format(
repr(e).replace(uprn, '<UPRN>')))
if lr_data:
site_info['uprn_status'] = 'ok'
return lr_data
try:
site_info = dict(
name=row[6],
address=row[7],
ba_ref=row[4],
uprn=format_uprn_for_csv(row[3]),
)
if (uprn in ('', '0')):
site_info['uprn_status'] = 'Error - none'
return 'Error - no uprn'
try:
lr_data = lookup_uprn_in_lr(uprn)
except FinishedProcessingRow as ex:
return str(ex)
finally:
addressbase_response = \
hmrc_addressbase.lookup_uprn_in_addressbase(uprn)
if addressbase_response:
if len(addressbase_response) > 1:
print('Warning too many addresses for single uprn')
site_info['address_from_addressbase_uprn'] = \
hmrc_addressbase.serialize_address(
addressbase_response[0]['address'])
lat_long = addressbase_response[0]['location']
site_info['latlong_from_addressbase_uprn'] = lat_long
# lookup lat/long in LR data
results = self.get_lr_polygons_from_point(
lat=lat_long[0], long=lat_long[1])
titles = [
result['title']
for result in results
if result['title']]
uprns = list(itertools.chain.from_iterable([
result['uprns']
for result in results]))
site_info['num_polygons_from_latlong_addressbase_uprn'] = \
len(results)
polygons = [result['polygon'] for result in results]
site_info['num_unique_polygons_from_latlong_addressbase_uprn'] = \
len(set(polygons))
# site_info['polygon_from_latlong_addressbase_uprn'] = \
# [result['polygon'] for result in results]
site_info['num_titles_from_latlong_addressbase_uprn'] = \
len(titles)
site_info['titles_from_latlong_addressbase_uprn'] = \
titles
site_info['num_uprns_from_latlong_addressbase_uprn'] = \
len(uprns)
site_info['uprns_from_latlong_addressbase_uprn'] = \
uprns
if site_info.get('uprn_status') != 'ok':
if len(uprns) > 0:
# we have derived the UPRN that Land Registry DOES
# have with a polygon in the same place.
uprn = uprns[0]
site_info['assumed_uprn'] = \
format_uprn_for_csv(uprn)
try:
lr_data = lookup_uprn_in_lr(uprn)
except FinishedProcessingRow as ex:
return str(ex) + ' (assumed uprn)'
if self.vacant_csv and not self.filter_uprn:
self.write_vacant_csv_row(site_info)
if not lr_data:
# We can't find polygons without a matching uprn, therefore
# discard
site_info['uprn_status'] = 'Error - LR has no matching title'
return 'Error - no LR data for uprn'
ba_ref = row[4]
try:
voa_data = self.get_voa_data(ba_ref)
except Exception as e:
# URL incorrect? Internet down? Do not fail silently
return 'Error accessing VOA server: {}'.format(e)
if voa_data:
estimated_floor_space = voa_data.get('total_area')
voa_status = 'with voa data'
else:
estimated_floor_space = 0
voa_status = 'without voa data'
data = {
"uprn": uprn,
"ba_ref": ba_ref,
"name": row[6],
"geom": transform_polygons_to_multipolygon(
list(itertools.chain.from_iterable(
title['polygons'] for title in lr_data['titles']
))
),
"authority": row[0],
#"owner": 'Cambridge',
"full_address": row[7],
"estimated_floor_space": estimated_floor_space,
"srid": 4326
}
headers = {'Authorization': 'Token {0}'.format(self.token)}
response = requests.post(
self.api_url,
json=data,
headers=headers)
if response.status_code == 201:
print('{0} imported correctly'.format(uprn))
return 'processed ' + voa_status
else:
print(
'ERROR: could not import {0} because of {1}'.format(
uprn, response.text))
return 'Error saving data to API: {} {}'.format(
response.status_code, response.text)
def write_vacant_csv_row(self, site_dict):
self.vacant_csv.writerow(site_dict)
def postprocess(self):
if self.vacant_csv_file and not self.filter_uprn:
self.vacant_csv_file.close()
print('Written {}'.format(self.vacant_csv_filename))
VACANT_CSV_HEADER = [
'name', 'address', 'ba_ref', 'uprn', 'uprn_status',
'address_from_addressbase_uprn',
'latlong_from_addressbase_uprn',
'num_polygons_from_latlong_addressbase_uprn',
'num_unique_polygons_from_latlong_addressbase_uprn',
#'polygon_from_latlong_addressbase_uprn',
'num_titles_from_latlong_addressbase_uprn',
'titles_from_latlong_addressbase_uprn',
'num_uprns_from_latlong_addressbase_uprn',
'uprns_from_latlong_addressbase_uprn',
'assumed_uprn',
]
class FinishedProcessingRow(Exception):
pass
@click.command()
@click.argument('filenames', nargs=-1, type=click.Path())
@click.option(
'--apiurl',
default='http://localhost:8000/api/locations/', help='API url')
@click.option('--apitoken', help='API authentication token')
@click.option(
'--lrapiurl',
default='http://localhost:8001/api/',
help='Land Registry API base url')
@click.option('--lrtoken', help='Land Registry API authentication token')
@click.option(
'--voaapiurl',
default='http://localhost:8002/api/voa/',
help='VOA API url')
@click.option('--voatoken', help='VOA API authentication token')
@click.option('-u', '--filter-uprn', help='Filter rows for a particular UPRN')
@click.option('--vacant-csv', metavar='FILENAME',
help='Output the vacant sites to a CSV')
def import_cambridge(
filenames, apiurl, apitoken, lrapiurl, lrtoken, voaapiurl, voatoken,
filter_uprn, vacant_csv):
'''Import Cambridge vacant properties data as Locations.
1. Get data from:
https://www.cambridge.gov.uk/open-data
titled "NDR accounts".
e.g. https://www.cambridge.gov.uk/sites/default/files/nndr_accounts_2017-04.xlsx
According to the council:
> You can find the date declared vacant in column F. A property is
> vacant if it states EPRN or EPRI in column O, or if it states VOID in
> column N.
2. Open in Excel and export as CSV
3. Run this import
'''
command = CambridgeLandsImportCommand(
filenames, apiurl, apitoken, lrapiurl, lrtoken, voaapiurl, voatoken,
vacant_csv_filename=vacant_csv, filter_uprn=filter_uprn)
command.run()
if __name__ == '__main__':
import_cambridge()
|
import motor.motor_asyncio
from fastapi import FastAPI
from fastapi_users import FastAPIUsers, models
from fastapi_users.authentication import JWTAuthentication
from fastapi_users.db import MongoDBUserDatabase
from httpx_oauth.clients.google import GoogleOAuth2
from starlette.requests import Request
DATABASE_URL = "mongodb://localhost:27017"
SECRET = "SECRET"
google_oauth_client = GoogleOAuth2("CLIENT_ID", "CLIENT_SECRET")
class User(models.BaseUser, models.BaseOAuthAccountMixin):
pass
class UserCreate(User, models.BaseUserCreate):
pass
class UserUpdate(User, models.BaseUserUpdate):
pass
class UserDB(User, models.BaseUserDB):
pass
client = motor.motor_asyncio.AsyncIOMotorClient(DATABASE_URL)
db = client["database_name"]
collection = db["users"]
user_db = MongoDBUserDatabase(UserDB, collection)
auth_backends = [
JWTAuthentication(secret=SECRET, lifetime_seconds=3600),
]
app = FastAPI()
fastapi_users = FastAPIUsers(
user_db, auth_backends, User, UserCreate, UserUpdate, UserDB, SECRET,
)
app.include_router(fastapi_users.router, prefix="/users", tags=["users"])
google_oauth_router = fastapi_users.get_oauth_router(google_oauth_client, SECRET)
app.include_router(google_oauth_router, prefix="/google-oauth", tags=["users"])
@fastapi_users.on_after_register()
def on_after_register(user: User, request: Request):
print(f"User {user.id} has registered.")
@fastapi_users.on_after_forgot_password()
def on_after_forgot_password(user: User, token: str, request: Request):
print(f"User {user.id} has forgot their password. Reset token: {token}")
|
from __future__ import absolute_import
import sys
import unittest
from django.test import TestCase
from django.conf import settings
from django.core.urlresolvers import clear_url_caches
from six.moves import reload_module
def reload_urls():
clear_url_caches()
reload_module(sys.modules[settings.ROOT_URLCONF])
class TestGraphQL(TestCase):
body = {
'query': '{projects{name}}'
}
def test_graphql_dev_get(self):
with self.settings(DEV=True):
response = self.client.get('/graphql', self.body, HTTP_ACCEPT="application/json")
self.assertEqual(response.status_code, 200)
def test_graphql_dev_post(self):
with self.settings(DEV=True):
response = self.client.post('/graphql', self.body, HTTP_ACCEPT="application/json")
self.assertEqual(response.status_code, 200)
@unittest.skip('Overriding DEV does not work.')
def test_graphql_prod_get(self):
with self.settings(DEV=False):
response = self.client.get('/graphql', self.body, HTTP_ACCEPT="application/json")
self.assertEqual(response.status_code, 200)
@unittest.skip('Overriding DEV does not work.')
def test_graphql_prod_post(self):
with self.settings(DEV=False):
response = self.client.post('/graphql', self.body, HTTP_ACCEPT="application/json")
self.assertEqual(response.status_code, 200)
class TestGraphiQL(TestCase):
body = {
'query': '{projects{name}}'
}
def test_graphiql_dev_get(self):
with self.settings(DEV=True):
response = self.client.get('/graphql', self.body, HTTP_ACCEPT="text/html")
self.assertEqual(response.status_code, 200)
def test_graphiql_dev_post(self):
with self.settings(DEV=True):
response = self.client.post('/graphql', self.body, HTTP_ACCEPT="text/html")
self.assertEqual(response.status_code, 200)
@unittest.skip('Overriding DEV does not work.')
def test_graphiql_prod_get(self):
with self.settings(DEV=False):
reload_urls()
response = self.client.get('/graphql', self.body, HTTP_ACCEPT="text/html")
self.assertEqual(response.status_code, 400)
@unittest.skip('Overriding DEV does not work.')
def test_graphiql_prod_post(self):
with self.settings(DEV=False):
reload_urls()
response = self.client.post('/graphql', self.body, HTTP_ACCEPT="text/html")
self.assertEqual(response.status_code, 400)
|
#!/usr/bin/env python
# coding: utf-8
# # Homework 3
# ## Download the Module 3 homework here: [Homework3](https://sites.psu.edu/math452/files/2022/02/HWWeek3.pdf)
# In[ ]:
|
from .JsonScraper import JSONScraper
from ._utils import get_minutes, normalize_string, dateCleaner
class PinchOfYum(JSONScraper):
@classmethod
def host(self):
return 'pinchofyum.com'
def title(self):
return self.data["name"]
'''{
"@context": "https:\/\/schema.org\/",
"@type": "Recipe",
"name": "Chopped Thai Noodle Salad with Peanut Dressing",
"description": "Thai Noodle Salad with Peanut Lime Dressing - veggies, chicken, brown rice noodles, and an easy homemade dressing. My favorite salad ever!",
"author": {
"@type": "Thing",
"name": "Pinch of Yum"
},
"image": [
"https:\/\/pinchofyum.com\/wp-content\/uploads\/Thai-Salad-Recipe-225x225.jpg",
"https:\/\/pinchofyum.com\/wp-content\/uploads\/Thai-Salad-Recipe-260x195.jpg",
"https:\/\/pinchofyum.com\/wp-content\/uploads\/Thai-Salad-Recipe-320x180.jpg",
"https:\/\/pinchofyum.com\/wp-content\/uploads\/Thai-Salad-Recipe.jpg"
],
"url": "https:\/\/pinchofyum.com\/thai-noodle-salad",
"recipeIngredient": [
"1\/2 cup canola oil",
"2 large cloves garlic, peeled",
"1\/3 cup low sodium soy sauce",
"1\/4 cup white distilled vinegar",
"2 tablespoons water",
"2 tablespoons honey",
"2 tablespoons sesame oil",
"1 tablespoon lemongrass or ginger paste",
"a couple BIG squeezes of lime juice (to taste)",
"1\/4 cup peanut butter",
"4 ounces brown rice noodles (affiliate link)",
"1 lb. boneless skinless chicken breasts",
"5-6 cups baby kale or spinach",
"3 large carrots, cut into small, thin pieces*",
"3 bell peppers, cut into small, thin pieces*",
"1 cup packed cilantro leaves, chopped",
"4 green onions, green parts only, chopped",
"1\/2 cup cashews or peanuts"
],
"recipeInstructions": [
"PREP: Start soaking the rice noodles in a bowl of cold water. Preheat the oven to 400 degrees.",
"DRESSING: Pulse all the dressing ingredients in a food processor EXCEPT peanut butter. Place the chicken in a plastic bag and use about 1\/4 to 1\/2 cup of the dressing (without peanut butter) to marinate the chicken in the fridge for about 15-30 minutes. Add the peanut butter to the dressing in the food processor; pulse, then taste and adjust. Set aside.",
"VEGGIES: Prep all your veggies and toss together in a bowl.",
"CHICKEN: Bake the marinated chicken for 15-20 minutes. Rest for 5-10 minutes, then cut and add to the veggies.",
"NOODLES: Drain the noodles (they should be softened at this point). Finish cooking them in a skillet over medium high heat. Add a little oil and a little dressing and toss them around until they are soft and pliable (if you need to add a little water to soften them, that works, too).",
"ASSEMBLY: Toss stir-fried noodles with the chicken and veggie mixture. Serve hot or cold. Top with extra peanuts and cilantro (and dressing, and lime juice, and sesame seeds, and...)"
],
"prepTime": "PT45M",
"cookTime": "PT20M",
"totalTime": "PT1H5M",
"recipeYield": "6",
"aggregateRating": {
"@type": "AggregateRating",
"reviewCount": 34,
"ratingValue": 4.7
}
'''
#need to figure out something for date published
def datePublished(self):
date = dateCleaner("null",6)
return date
def description(self):
return self.data["description"]
def total_time(self):
return get_minutes(data["totalTime"])
def ingredients(self):
ing = ""
ingList = self.data['recipeIngredient']
i = 0
while i < len(ingList):
ing += ingList[i] + "\n"
i += 1
return ing
def instructions(self):
#this is a nested array
instrList = self.data['recipeInstructions']
i = 0
instr = ""
while i < len(instrList):
instr += instrList[i] + "\n"
i += 1
return instr
def category(self):
return self.data["recipeCategory"][0]
def imgURL(self):
return self.data["image"][3]
def sodium(self):
return self.data["nutrition"]["sodiumContent"]
def fat(self):
return self.data["nutrition"]["fatContent"]
def carbs(self):
return self.data["nutrition"]["carbohydrateContent"]
def calories(self):
return self.data["nutrition"]["calories"]
|
from __future__ import print_function
import numpy as np
from . import checkers
from . import preference as pref
from . import common
class BipartiteGraphAnnealerBase :
def __init__(self, cext, dtype, b0, b1, W, optimize, prefdict) :
self._cext = cext
self.dtype = dtype
if not W is None :
self.set_qubo(b0, b1, W, optimize)
self.set_preferences(prefdict)
def __del__(self) :
if hasattr(self, '_cobj') :
self._cext.delete(self._cobj, self.dtype)
def seed(self, seed) :
self._cext.seed(self._cobj, seed, self.dtype)
def set_qubo(self, b0, b1, W, optimize = pref.minimize) :
checkers.bipartite_graph.qubo(b0, b1, W)
b0, b1, W = common.fix_type([b0, b1, W], self.dtype)
self._cext.set_qubo(self._cobj, b0, b1, W, optimize, self.dtype);
self._optimize = optimize
def get_optimize_dir(self) :
return self._optimize
def get_problem_size(self) :
return self._cext.get_problem_size(self._cobj, self.dtype)
def set_preferences(self, prefdict=None, **prefs) :
if not prefdict is None :
self._cext.set_preferences(self._cobj, prefdict, self.dtype)
self._cext.set_preferences(self._cobj, prefs, self.dtype)
def get_preferences(self) :
return self._cext.get_preferences(self._cobj, self.dtype)
def get_E(self) :
return self._cext.get_E(self._cobj, self.dtype)
def get_x(self) :
return self._cext.get_x(self._cobj, self.dtype)
def set_q(self, qpair) :
q0, q1 = qpair[0], qpair[1]
if q0.dtype != np.int8 :
q0 = np.asarray(q0, np.int8)
if q1.dtype != np.int8 :
q1 = np.asarray(q1, np.int8)
self._cext.set_q(self._cobj, (q0, q1), self.dtype)
def set_qset(self, qpairset) :
qin = []
for qpair in qpairset :
q0, q1 = qpair[0], qpair[1]
if q0.dtype != np.int8 :
q0 = np.asarray(q0, np.int8)
if q1.dtype != np.int8 :
q1 = np.asarray(q1, np.int8)
qin.append((q0, q1))
self._cext.set_qset(self._cobj, qin, self.dtype)
# Ising model / spins
def get_hamiltonian(self) :
N0, N1 = self.get_problem_size()
h0 = np.ndarray((N0), self.dtype);
h1 = np.ndarray((N1), self.dtype);
J = np.ndarray((N1, N0), self.dtype);
c = np.ndarray((1), self.dtype)
self._cext.get_hamiltonian(self._cobj, h0, h1, J, c, self.dtype)
return h0, h1, J, c[0]
def set_hamiltonian(self, h0, h1, J, c) :
checkers.bipartite_graph.hJc(h0, h1, J, c)
h0, h1, J = common.fix_type([h0, h1, J], self.dtype)
c = self.dtype(c)
self._cext.set_hamiltonian(self._cobj, h0, h1, J, c, self.dtype)
self._optimize = pref.minimize
def get_q(self) :
return self._cext.get_q(self._cobj, self.dtype)
def randomize_spin(self) :
self._cext.randomize_spin(self._cobj, self.dtype)
def calculate_E(self) :
self._cext.calculate_E(self._cobj, self.dtype)
def prepare(self) :
self._cext.prepare(self._cobj, self.dtype)
def make_solution(self) :
self._cext.make_solution(self._cobj, self.dtype)
def get_system_E(self, G, beta) :
G, beta = self.dtype(G), self.dtype(beta)
return self._cext.get_system_E(self._cobj, G, beta, self.dtype)
def anneal_one_step(self, G, beta) :
G, beta = self.dtype(G), self.dtype(beta)
self._cext.anneal_one_step(self._cobj, G, beta, self.dtype)
|
import discord
from discord.ext import commands, menus
from extra.menu import ConfirmSkill
import os
from typing import Dict, Any, List
from mysqldb import the_database
mod_role_id = int(os.getenv('MOD_ROLE_ID'))
admin_role_id = int(os.getenv('ADMIN_ROLE_ID'))
owner_role_id = int(os.getenv('OWNER_ROLE_ID'))
event_manager_role_id = int(os.getenv('EVENT_MANAGER_ROLE_ID'))
preference_role_id = int(os.getenv('PREFERENCE_ROLE_ID'))
class EventManagement(commands.Cog):
""" A category for event related commands. """
def __init__(self, client) -> None:
""" Class initializing method. """
self.client = client
@commands.Cog.listener()
async def on_ready(self) -> None:
""" Tells when the cog is ready to use. """
print("EventManagement cog is online!")
async def get_event_permissions(self, guild: discord.Guild) -> Dict[Any, Any]:
""" Gets permissions for event rooms. """
# Get some roles
event_manager_role = discord.utils.get(guild.roles, id=event_manager_role_id)
preference_role = discord.utils.get(guild.roles, id=preference_role_id)
mod_role = discord.utils.get(guild.roles, id=mod_role_id)
overwrites = {}
overwrites[guild.default_role] = discord.PermissionOverwrite(
read_messages=False, send_messages=False, connect=False,
speak=False, view_channel=False)
overwrites[preference_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=False, connect=False, view_channel=True)
overwrites[event_manager_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=True, manage_messages=True,
mute_members=True, embed_links=True, connect=True,
speak=True, move_members=True, view_channel=True,
manage_permissions=True)
overwrites[mod_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=True, manage_messages=True,
mute_members=True, embed_links=True, connect=True,
speak=True, move_members=True, view_channel=True)
return overwrites
# CREATE EVENT
@commands.group()
async def create_event(self, ctx) -> None:
""" Creates an event. """
if ctx.invoked_subcommand:
return
cmd = self.client.get_command('create_event')
prefix = self.client.command_prefix
subcommands = [f"{prefix}{c.qualified_name}" for c in cmd.commands]
subcommands = '\n'.join(subcommands)
embed = discord.Embed(
title="Subcommads",
description=f"```apache\n{subcommands}```",
color=ctx.author.color,
timestamp=ctx.message.created_at
)
await ctx.send(embed=embed)
@create_event.command()
@commands.has_any_role(*[event_manager_role_id, mod_role_id, admin_role_id, owner_role_id])
@commands.cooldown(1, 60, commands.BucketType.user)
async def movie(self, ctx) -> None:
""" Creates a Movie Night voice and text channel. """
if await self.get_event_room_by_user_id(ctx.author.id):
return await ctx.send(f"**{ctx.author.mention}, you already have an event room going on!**")
confirm = await ConfirmSkill("Do you want to create a `Movie Night`?").prompt(ctx)
if not confirm:
return await ctx.send("**Not creating it then!**")
overwrites = await self.get_event_permissions(ctx.guild)
tv_club_role = discord.utils.get(
ctx.guild.roles, id=int(os.getenv('TV_CLUB_ROLE_ID'))
)
# Adds some perms to the Movie Club role
overwrites[tv_club_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=True,
connect=True, speak=True, view_channel=True)
events_category = discord.utils.get(
ctx.author.guild.categories, id=int(os.getenv('EVENTS_CAT_ID')))
try:
# Creating text channel
text_channel = await events_category.create_text_channel(
name=f"๐ฅ Movie Night ๐ฅ",
overwrites=overwrites)
# Creating voice channel
voice_channel = await events_category.create_voice_channel(
name=f"๐ฅ Movie Night ๐ฅ",
user_limit=None,
overwrites=overwrites)
# Inserts it into the database
await self.insert_event_room(
user_id=ctx.author.id, vc_id=voice_channel.id, txt_id=text_channel.id)
except Exception as e:
print(e)
await ctx.send(f"**{ctx.author.mention}, something went wrong, try again later!**")
else:
await ctx.send(f"**{ctx.author.mention}, {text_channel.mention} is up and running!**")
@create_event.command()
@commands.has_any_role(*[event_manager_role_id, mod_role_id, admin_role_id, owner_role_id])
@commands.cooldown(1, 60, commands.BucketType.user)
async def karaoke(self, ctx) -> None:
""" Creates a Karaoke Night voice and text channel. """
if await self.get_event_room_by_user_id(ctx.author.id):
return await ctx.send(f"**{ctx.author.mention}, you already have an event room going on!**")
confirm = await ConfirmSkill("Do you want to create a `Karaoke Night`?").prompt(ctx)
if not confirm:
return await ctx.send("**Not creating it then!**")
overwrites = await self.get_event_permissions(ctx.guild)
karaoke_club_role = discord.utils.get(
ctx.guild.roles, id=int(os.getenv('KARAOKE_CLUB_ROLE_ID'))
)
# Adds some perms to the Karaoke Club role
overwrites[karaoke_club_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=True,
connect=True, speak=True, view_channel=True)
events_category = discord.utils.get(
ctx.author.guild.categories, id=int(os.getenv('EVENTS_CAT_ID')))
try:
# Creating text channel
text_channel = await events_category.create_text_channel(
name=f"๐ค Karaoke Night ๐ค",
overwrites=overwrites)
# Creating voice channel
voice_channel = await events_category.create_voice_channel(
name=f"๐ค Karaoke Night ๐ค",
user_limit=None,
overwrites=overwrites)
# Inserts it into the database
await self.insert_event_room(
user_id=ctx.author.id, vc_id=voice_channel.id, txt_id=text_channel.id)
except Exception as e:
print(e)
await ctx.send(f"**{ctx.author.mention}, something went wrong, try again later!**")
else:
await ctx.send(f"**{ctx.author.mention}, {text_channel.mention} is up and running!**")
@create_event.command()
@commands.has_any_role(*[event_manager_role_id, mod_role_id, admin_role_id, owner_role_id])
@commands.cooldown(1, 60, commands.BucketType.user)
async def culture(self, ctx) -> None:
""" Creates a Culture Event voice and text channel. """
if await self.get_event_room_by_user_id(ctx.author.id):
return await ctx.send(f"**{ctx.author.mention}, you already have an event room going on!**")
confirm = await ConfirmSkill("Do you want to create a `Culture Night`?").prompt(ctx)
if not confirm:
return await ctx.send("**Not creating it then!**")
overwrites = await self.get_event_permissions(ctx.guild)
culture_club_role = discord.utils.get(
ctx.guild.roles, id=int(os.getenv('CULTURE_CLUB_ROLE_ID'))
)
# Adds some perms to the Culture Club role
overwrites[culture_club_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=True,
connect=True, speak=True, view_channel=True)
events_category = discord.utils.get(
ctx.author.guild.categories, id=int(os.getenv('EVENTS_CAT_ID')))
try:
# Creating text channel
text_channel = await events_category.create_text_channel(
name=f"๐ Culture Event ๐",
overwrites=overwrites)
# Creating voice channel
voice_channel = await events_category.create_voice_channel(
name=f"๐ Culture Event ๐",
user_limit=None,
overwrites=overwrites)
# Inserts it into the database
await self.insert_event_room(
user_id=ctx.author.id, vc_id=voice_channel.id, txt_id=text_channel.id)
except Exception as e:
print(e)
await ctx.send(f"**{ctx.author.mention}, something went wrong, try again later!**")
else:
await ctx.send(f"**{ctx.author.mention}, {text_channel.mention} is up and running!**")
@create_event.command()
@commands.has_any_role(*[event_manager_role_id, mod_role_id, admin_role_id, owner_role_id])
@commands.cooldown(1, 60, commands.BucketType.user)
async def reading(self, ctx) -> None:
""" Creates a Reading Session voice and text channel. """
if await self.get_event_room_by_user_id(ctx.author.id):
return await ctx.send(f"**{ctx.author.mention}, you already have an event room going on!**")
confirm = await ConfirmSkill("Do you want to create a `Reading Session`?").prompt(ctx)
if not confirm:
return await ctx.send("**Not creating it then!**")
overwrites = await self.get_event_permissions(ctx.guild)
culture_club_role = discord.utils.get(
ctx.guild.roles, id=int(os.getenv('READING_CLUB_ROLE_ID'))
)
# Adds some perms to the Culture Club role
overwrites[culture_club_role] = discord.PermissionOverwrite(
read_messages=True, send_messages=True,
connect=True, speak=True, view_channel=True)
events_category = discord.utils.get(
ctx.author.guild.categories, id=int(os.getenv('EVENTS_CAT_ID')))
try:
# Creating text channel
text_channel = await events_category.create_text_channel(
name=f"๐ Reading Session ๐",
overwrites=overwrites)
# Creating voice channel
voice_channel = await events_category.create_voice_channel(
name=f"๐ Reading Session ๐",
user_limit=None,
overwrites=overwrites)
# Inserts it into the database
await self.insert_event_room(
user_id=ctx.author.id, vc_id=voice_channel.id, txt_id=text_channel.id)
except Exception as e:
print(e)
await ctx.send(f"**{ctx.author.mention}, something went wrong, try again later!**")
else:
await ctx.send(f"**{ctx.author.mention}, {text_channel.mention} is up and running!**")
# DELETE EVENT
@commands.command()
@commands.has_any_role(*[event_manager_role_id, mod_role_id, admin_role_id, owner_role_id])
async def delete_event(self, ctx) -> None:
""" Deletes an event room. """
member = ctx.author
perms = ctx.channel.permissions_for(member)
delete = False
if not (room := await self.get_event_room_by_txt_id(ctx.channel.id)):
return await ctx.send(f"**{member.mention}, this is not an event room**")
# Checks whether member can delete room
if room[0] == member.id: # If it's the owner of the room
delete = True
elif perms.administrator or mod_role_id in [r.id for r in member.roles]: # If it's a staff member
delete = True
if delete:
confirm = await ConfirmSkill(f"**{member.mention}, are you sure you want to delete the event rooms?**").prompt(ctx)
if confirm:
try:
await self.client.get_channel(room[1]).delete()
await self.client.get_channel(room[2]).delete()
await self.delete_event_room_by_txt_id(ctx.channel.id)
except Exception as e:
print(e)
await ctx.send(f"**Something went wrong with it, try again later, {member.mention}!**")
else:
await ctx.send(f"**Not deleting them, then, {member.mention}!**")
# ======
# INSERT
async def insert_event_room(self, user_id: int, vc_id: int = None, txt_id: int = None) -> None:
""" Inserts an Event Room by VC ID.
:param user_id: The ID of the user who's gonna be attached to the rooms.
:param vc_id: The ID of the VC.
:param txt_id: The ID of the txt. """
mycursor, db = await the_database()
await mycursor.execute("""
INSERT INTO EventRooms (user_id, vc_id, txt_id)
VALUES (%s, %s, %s)""", (user_id, vc_id, txt_id))
await db.commit()
await mycursor.close()
# GET
async def get_event_room_by_user_id(self, user_id: int) -> List[int]:
""" Gets an Event Room by VC ID.
:param user_id: The ID of the user that you are looking for. """
mycursor, db = await the_database()
await mycursor.execute("SELECT * FROM EventRooms WHERE user_id = %s", (user_id,))
event_room = await mycursor.fetchone()
await mycursor.close()
return event_room
async def get_event_room_by_vc_id(self, vc_id: int) -> List[int]:
""" Gets an Event Room by VC ID.
:param vc_id: The ID of the VC that you are looking for. """
mycursor, db = await the_database()
await mycursor.execute("SELECT * FROM EventRooms WHERE vc_id = %s", (vc_id,))
event_room = await mycursor.fetchone()
await mycursor.close()
return event_room
async def get_event_room_by_txt_id(self, txt_id: int) -> List[int]:
""" Gets an Event Room by VC ID.
:param txt_id: The ID of the txt that you are looking for. """
mycursor, db = await the_database()
await mycursor.execute("SELECT * FROM EventRooms WHERE txt_id = %s", (txt_id,))
event_room = await mycursor.fetchone()
await mycursor.close()
return event_room
# DELETE
async def delete_event_room_by_user_id(self, user_id: int) -> None:
""" Deletes an Event Room by VC ID.
:param user_id: The ID of the user that you want to delete event rooms from. """
mycursor, db = await the_database()
await mycursor.execute("DELETE FROM EventRooms WHERE user_id = %s", (user_id,))
await db.commit()
await mycursor.close()
async def delete_event_room_by_vc_id(self, vc_id: int) -> None:
""" Deletes an Event Room by VC ID.
:param vc_id: The ID of the txt that you want to delete. """
mycursor, db = await the_database()
await mycursor.execute("DELETE FROM EventRooms WHERE vc_id = %s", (vc_id,))
await db.commit()
await mycursor.close()
async def delete_event_room_by_txt_id(self, txt_id: int) -> None:
""" Deletes an Event Room by VC ID.
:param txt_id: The ID of the txt that you want to delete. """
mycursor, db = await the_database()
await mycursor.execute("DELETE FROM EventRooms WHERE txt_id = %s", (txt_id,))
await db.commit()
await mycursor.close()
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def create_table_event_rooms(self, ctx) -> None:
""" (ADM) Creates the EventRooms table. """
if await self.table_event_rooms_exists():
return await ctx.send("**The `EventRooms` table already exists!**")
mycursor, db = await the_database()
await mycursor.execute("""
CREATE TABLE EventRooms (
user_id BIGINT NOT NULL, vc_id BIGINT DEFAULT NULL,
txt_id BIGINT DEFAULT NULL
)""")
await db.commit()
await mycursor.close()
await ctx.send("**Created `EventRooms` table!**")
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def drop_table_event_rooms(self, ctx) -> None:
""" (ADM) Drops the EventRooms table. """
if not await self.table_event_rooms_exists():
return await ctx.send("**The `EventRooms` table doesn't exist!**")
mycursor, db = await the_database()
await mycursor.execute("DROP TABLE EventRooms")
await db.commit()
await mycursor.close()
await ctx.send("**Dropped `EventRooms` table!**")
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def reset_table_event_rooms(self, ctx) -> None:
""" (ADM) Resets the EventRooms table. """
if not await self.table_event_rooms_exists():
return await ctx.send("**The `EventRooms` table doesn't exist yet!**")
mycursor, db = await the_database()
await mycursor.execute("DELETE FROM EventRooms")
await db.commit()
await mycursor.close()
await ctx.send("**Reset `EventRooms` table!**")
async def table_event_rooms_exists(self) -> bool:
""" Checks whether the EventRooms table exists. """
mycursor, db = await the_database()
await mycursor.execute("SHOW TABLE STATUS LIKE 'EventRooms'")
table_info = await mycursor.fetchall()
await mycursor.close()
if len(table_info) == 0:
return False
else:
return True
def setup(client) -> None:
""" Cog's setup function. """
client.add_cog(EventManagement(client))
|
# -*- coding: utf-8 -*-
"""
rio.blueprints.event.controllers
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
import json
import logging
from uuid import uuid4
from rio.core import cache
from rio.core import graph
from rio.models import get_data_by_slug_or_404
from rio.tasks import exec_event
logger = logging.getLogger('rio.event')
class MissingSender(Exception):
pass
class WrongSenderSecret(Exception):
pass
class NotAllowed(Exception):
pass
def emit_event(project_slug, action_slug, payload, sender_name, sender_secret,
event_uuid=None):
"""Emit Event.
:param project_slug: the slug of the project
:param action_slug: the slug of the action
:param payload: the payload that emit with action
:param sender_name: name that identified the sender
:parma sender_secret: secret string
:return: dict with task_id and event_uuid
raise MissingSender if sender does not exist
raise WrongSenderSecret if sender_secret is wrong
raise NotAllowed if sender is not allowed to emit action to project
"""
project_graph = graph.get_project_graph(project_slug)
project_graph.verify_sender(sender_name, sender_secret)
action = project_graph.get_action(action_slug)
project = project_graph.project
# execute event
event_uuid = event_uuid or uuid4()
event = {'uuid': event_uuid, 'project': project['slug'], 'action': action['slug']}
res = exec_event(event, action['webhooks'], payload)
logger.info('EMIT %s "%s" "%s" %s',
event_uuid, project_slug, action_slug, json.dumps(payload))
return dict(
task=dict(
id=res.id,
),
event=dict(
uuid=event_uuid,
),
)
|
# Uma variavel รฉ algo que salvamos na memoria no computador. Ela armazena uma
# inofrmaรงรฃo que pode ser chamada depois. Podendo ser algo simples como um texto
# ou atรฉ mesmo algo mais complicado como uma funรงรฃo
# Para criar um variavel apenas delcare seu nome, coloque igual e em seguida o
# que vocรช deseja colocar dentro da variavel
nome = "Paulo"
print(nome)
# ร possivel ver a classe de uma variavel
print(type(nome))
# Como dito anteriormente รฉ possivel criar uma variavel com alguma funรงรฃo
# Nรฃo รฉ necessario utilizar espaรงo na variavel entre o seu nome e o que ela faz.
#Isso รฉ feito somente por aparencia
soma=2+2
print(soma)
# Variaveis podem ser alterada ao longo do caminho em Python.
nome = "Joรฃo"
print(nome) |
import numpy as np
import pandas as pd
from scipy.optimize import curve_fit
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
__all__ = ['make_df_for_ml',
'make_simple_poly',
'get_linear_regression',
'get_cost_curve_coefs']
def make_df_for_ml(df1):
poly2 = PolynomialFeatures(3, include_bias=False)
df1 = df1.copy(deep=True)
df1 = df1.T
X = []
for column_name in df1.columns:
X.append(list(df1[column_name]))
X2 = np.array(X)
X_poly = poly2.fit_transform(X2)
X_poly_feature_name = poly2.get_feature_names(
['Feature' + str(l) for l in range(1, 4)]
)
df_poly = pd.DataFrame(X_poly, columns=X_poly_feature_name)
return df_poly
def make_simple_poly(df, y_value):
df['y'] = df[y_value]
del df[y_value]
n = 0
for c_name in df.columns:
if c_name != 'y':
n = n + 1
df[('Feature%s' % n)] = df[c_name]
del df[c_name]
poly2 = PolynomialFeatures(3, include_bias=False)
df1 = df.copy(deep=True)
del df1['y']
df1 = df1.T
X = []
for column_name in df1.columns:
X.append(list(df1[column_name]))
X2 = np.array(X)
X_poly = poly2.fit_transform(X2)
X_poly_feature_name = poly2.get_feature_names(
['Feature' + str(l) for l in range(1, 4)]
)
df_poly = pd.DataFrame(X_poly, columns=X_poly_feature_name)
poly = PolynomialFeatures(3, include_bias=False)
df1 = df.copy(deep=True)
del df1['y']
df1 = df1.T
X = []
for column_name in df1.columns:
X.append(list(df1[column_name]))
X2 = np.array(X)
X_poly = poly.fit_transform(X2)
X_poly_feature_name = poly.get_feature_names(
['Feature' + str(l) for l in range(1, 4)]
)
df_poly = pd.DataFrame(X_poly, columns=X_poly_feature_name)
df_poly['y'] = df['y']
X_train = df_poly.drop('y', axis=1)
y_train = df_poly['y']
poly = LinearRegression(normalize=True)
model_poly = poly.fit(X_train, y_train)
y_poly = poly.predict(X_train)
RMSE_poly = np.sqrt(np.sum(np.square(y_poly - y_train)))
coeff_poly = pd.DataFrame(
model_poly.coef_,
index=df_poly.drop('y', axis=1).columns,
columns=['Coefficients'],
)
return poly, coeff_poly
def get_linear_regression(x_values, y_values, variable=None):
X = np.array(x_values).reshape(-1, 1)
y = np.array(y_values).reshape(-1, 1)
reg = LinearRegression().fit(X, y)
a = reg.coef_[0]
b = reg.intercept_
return a[0], b[0]
def power_law(x, a, b):
return a * np.power(x, b)
def get_cost_curve_coefs(flow_in=None, data_id=None, xs=None, ys=None):
if data_id == None:
pars, cov = curve_fit(f=power_law, xdata=xs, ydata=ys)
ys_new = pars[0] * xs ** pars[1]
r2_result = np.corrcoef(ys, ys_new)[0, 1]
else:
df = pd.read_csv('data/chlorine_dose_cost_twb.csv')
xs = df[((df.Flow_mgd == flow_in) & (df.VariableID == 2))].Value.values
ys = df[((df.Cost == data_id) & (df.VariableID == 1))].Value.values
return pars, r2_result, xs, ys, ys_new
# def main():
# print('importing something')
# if __name__ == '__main__':
# main() |
#!/usr/bin/env python3
import sys
import Pyro4
import publisher
def main():
sys.excepthook = Pyro4.util.excepthook
p1 = publisher.Publisher('P1', 'X')
p2 = publisher.Publisher('P2', 'Y')
with Pyro4.locateNS() as ns:
i1 = Pyro4.Proxy('PYRONAME:intermediary1')
i2 = Pyro4.Proxy('PYRONAME:intermediary2')
i3 = Pyro4.Proxy('PYRONAME:intermediary3')
p1.intermediary = i1
p2.intermediary = i3
p1.publish()
print
p2.publish()
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
"""Copyright Header Details
Copyright
---------
Copyright (C) Guya , PLC - All Rights Reserved (As Of Pending...)
Unauthorized copying of this file, via any medium is strictly prohibited
Proprietary and confidential
LICENSE
-------
This file is subject to the terms and conditions defined in
file 'LICENSE.txt', which is part of this source code package.
Authors
-------
* [Simon Belete](https://github.com/Simonbelete)
Project
-------
* Name:
- Guya E-commerce & Guya Express
* Sub Project Name:
- Catalog Service
* Description
- Catlog Catalog Service
"""
"""Package details
Application features:
--------------------
Python 3.7
Flask
PEP-8 for code style
"""
import pytest
from faker import Faker
from catalog import create_app
from catalog.repository.category import Category
class TestCategoryApi:
AM_WORD_LIST = (
'แ แแแแ', 'แ แจแแต', 'แจแแตแฉแแต', 'แ แฉแ', 'แแแแฃแต',
'แ แจแฉ', 'แฐแแทแ', 'แขแตแฎแตแซ', 'แแตแฅ', 'แแต', 'แจแฐแ'
)
def setup_class(self):
# init faker object
self.faker = Faker()
# creating a FlaskClient instance to interact with the app
self.app = create_app().test_client()
def test_categories_get_api(self):
url = '/api/v1/categories'
# calling api endpoint
categories = self.app.get(url)
# asserting status code
assert categories.status_code == 200
def test_categories_post_api(self):
url = '/api/v1/categories'
data = {
'names': {
'en': self.faker.sentence(),
'am': self.faker.sentence(ext_word_list = self.AM_WORD_LIST)
},
'image': {
'src': self.faker.url(),
'priority': 1,
'height': self.faker.random_int(min=100, max=1900),
'width': self.faker.random_int(min=100, max=1900)
}
}
# calling api endpoint
categories = self.app.post(url, json = data)
# asserting status code
assert categories.status_code == 201
def test_categories_post_without_image(self):
url = '/api/v1/categories'
data = {
'names': {
'en': self.faker.sentence(),
'am': self.faker.sentence(ext_word_list = self.AM_WORD_LIST)
}
}
# calling api endpoint
categories = self.app.post(url, json = data)
# asserting status code
assert categories.status_code == 201
def test_category_get_api(self):
pass
def test_category_get_api_with_invalid_id(self):
id = self.faker.sentence()
url = '/api/v1/categories/%s' % id
# calling apis endpoint
category = self.app.get(url)
# asserting status code
assert category.status_code == 400
def test_category_get_api_with_empty_id(self):
id = self.faker.sentence()
url = '/api/v1/categories/%s' % id
# calling apis endpoint
category = self.app.get(url)
# asserting status code
assert category.status_code == 400
def test_category_put_api(self):
pass
def test_category_put_api_with_invalid_id(self):
id = self.faker.sentence()
url = '/api/v1/categories/%s' % id
# calling apis endpoint
category = self.app.put(url)
# asserting status code
assert category.status_code == 400
def test_category_put_api_with_empty_id(self):
id = self.faker.sentence()
url = '/api/v1/categories/%s' % id
# calling apis endpoint
category = self.app.put(url)
# asserting status code
assert category.status_code == 400
def test_category_delete_api(self):
id = self.faker.sentence()
url = '/api/v1/categories/%s' % id
# calling apis endpoint
category = self.app.delete(url)
# asserting status code
assert category.status_code == 405
|
#
# Chip8
#
import pygame
import random, math
import time
import code
windowScale = 5
windowH = 32 * windowScale
windowW = 64 * windowScale
pixQt = windowH*windowW
chip8_fontset = (
0xF0, 0x90, 0x90, 0x90, 0xF0, # 0
0x20, 0x60, 0x20, 0x20, 0x70, # 1
0xF0, 0x10, 0xF0, 0x80, 0xF0, # 2
0xF0, 0x10, 0xF0, 0x10, 0xF0, # 3
0x90, 0x90, 0xF0, 0x10, 0x10, # 4
0xF0, 0x80, 0xF0, 0x10, 0xF0, # 5
0xF0, 0x80, 0xF0, 0x90, 0xF0, # 6
0xF0, 0x10, 0x20, 0x40, 0x40, # 7
0xF0, 0x90, 0xF0, 0x90, 0xF0, # 8
0xF0, 0x90, 0xF0, 0x10, 0xF0, # 9
0xF0, 0x90, 0xF0, 0x90, 0x90, # A
0xE0, 0x90, 0xE0, 0x90, 0xE0, # B
0xF0, 0x80, 0x80, 0x80, 0xF0, # C
0xE0, 0x90, 0x90, 0x90, 0xE0, # D
0xF0, 0x80, 0xF0, 0x80, 0xF0, # E
0xF0, 0x80, 0xF0, 0x80, 0x80 # F
)
def fhex(i, l=4):
return "{0:#0{1}x}".format(i,l+2)
class screen(object):
def __init__(self):
pygame.init()
self.window = pygame.display.set_mode([windowW,windowH])
def drawPixel(self, i):
# pixelSize = self.windowScale
x = i % windowW
y = math.floor(i / windowW)
#print("Pixel at:",x,y)
pygame.draw.rect(self.window,
(0, 255, 255), # Color
(x*windowScale, y*windowScale,\
windowScale, windowScale) # Rect
)
def drawFrame(self, display):
for i, pixel in enumerate(display):
if pixel:
self.drawPixel(i)
pygame.display.flip()
class Chip8(object):
def __init__(self):
self.level = 0
self.memory = [0]*4096 # RAM
self.V = [0]*16 # GP Reg
self.I = 0 # Index Reg
self.PC = 0x200 # Program Counter
self.DelayT = 0 # Delay Timer
self.SoundT = 0 # Sound Timer
self.Stack = [0]*16 # Program Counter Stack
self.SP = 0 # Stack Pointer
self.Keys = [0]*16 # Keyboard storage
self.display = [0]*64*32
self.updateDisplay = False
self.Screen = screen()
for i, byte in enumerate(chip8_fontset):
self.memory[i] = byte
def loadProgram(self,file):
with open(file, 'rb') as game:
for i, byte in enumerate(game.read()):
self.memory[0x200+i] = byte
print("Loaded file:", file)
if __debug__:
print(" Addr | Instr. | V Reg.")
def runOp(self,offset):
opCode = self.memory[offset] << 8 | self.memory[offset+1]
if __debug__:
print("-"*self.level,fhex(offset,3), "|", fhex(opCode), "|",\
self.V, "|", [hex(x) for x in self.Stack if x!=0])
fo = opCode & 0xF000
if fo == 0x0000:
if opCode&0xFFF == 0xE0:
# 0x00E0 Clear the display
self.display = [0]*32*64
self.updateDisplay = True
self.PC += 2
elif opCode&0xFFF == 0xEE:
# 0x00EE Return
self.PC = self.Stack[self.SP] + 2
self.Stack[self.SP] = 0
self.SP -= 1
else:
# 0x0NNN Call
self.SP += 1
self.Stack[self.SP] = self.PC
self.PC = opCode & 0x0FFF
print("This shouldn't happen")
assert False
elif fo == 0x1000:
# 0x1NNN goto
self.PC = opCode & 0x0FFF
elif fo == 0x2000:
# 0x2NNN Call subroutine
self.SP += 1
self.Stack[self.SP] = self.PC
self.PC = opCode & 0x0FFF
elif fo == 0x3000:
# 0x3XNN Skip next if Vx == NN
self.PC += 2
if self.V[(opCode&0xF00) >> 8] == opCode&0xFF:
self.PC += 2
elif fo == 0x4000:
# 0x4XNN Skip next if Vx != NN
self.PC += 2
if self.V[(opCode&0xF00) >> 8] != opCode&0xFF:
self.PC += 2
elif fo == 0x5000:
# 0x5XY0 Skip next if Vx == Vy
self.PC += 2
if self.V[(opCode&0xF00) >> 8] == self.V[(opCode&0x0F0) >> 4]:
self.PC += 2
elif fo == 0x6000:
# 0x6XNN Set Vx to NN
self.V[(opCode&0xF00) >> 8] = opCode&0xFF
self.PC += 2
elif fo == 0x7000:
# 0x7XNN Add NN to Vx (no carry flag)
self.V[(opCode&0xF00) >> 8] += opCode&0xFF
self.V[(opCode&0xF00) >> 8] &= 0xFF
self.PC += 2
elif fo == 0x8000:
# 0x8000 2-Var math
x = opCode&0xF00 >> 8
y = opCode&0x0F0 >> 4
selector = opCode&0x00F
if selector == 0x0:
# 0x8XY0 Set Vx to Vy
self.V[x] = self.V[y]
elif selector == 0x1:
# 0x8YX1 Set Vx to Vx|Vy
self.V[x] |= self.V[y]
elif selector == 0x2:
# 0x8YX2 Set Vx to Vx&Vy
self.V[x] &= self.V[y]
elif selector == 0x3:
# 0x8YX3 Set Vx to Vx^Vy
self.V[x] ^= self.V[y]
elif selector == 0x4:
# 0x8YX4 Set Vx to Vx+Vy
self.V[x] += self.V[y]
elif selector == 0x5:
# 0x8YX5 Set Vx to Vx-Vy
self.V[x] -= self.V[y]
elif selector == 0x6:
# 0x8YX6 Set Vx and Vy to Vy >> 1
self.V[x] = self.V[y] >> 1
self.V[y] = self.V[x]
elif selector == 0x7:
# 0x8YX7 Set Vx to Vy-Vx
self.V[x] = self.V[y] - V[x]
if self.V[x]&0xF0 > self.V[y]&0xF0:
self.V[0xF] = 1
elif self.V[x]&0xF > self.V[y]&0xF:
self.V[0xF] = 1
else:
self.V[0xF] = 0
elif selector == 0xE:
# 0x8XYE Set Vx and Vy to Vy << 1
self.V[x] = self.V[y] << 1
self.V[y] = self.V[x]
self.PC += 2
elif fo == 0x9000:
# 0x9XY0 Skip next if Vx != Vy
self.PC += 2
if self.V[(opCode&0xF00) >> 8] != self.V[(opCode&0x0F0) >> 4]:
self.PC += 2
elif fo == 0xA000:
# 0xANNN Set I to NNN
self.I = opCode&0xFFF
self.PC += 2
elif fo == 0xB000:
# 0xBNNN Jump to V0 + NNN
self.PC = self.V[0] + opCode&0xFFF
elif fo == 0xC000:
# 0xCXNN Set Vx to (Rand&NN)
self.V[(opCode&0xF00) >> 8] = random.randint(0x00,0xFF)\
& (opCode&0xFF)
self.PC += 2
elif fo == 0xD000:
# 0xDXYN Draw sprite at (Vx,Vy) width of 8px height of N
x = self.V[(opCode&0xF00) >> 8]
y = self.V[(opCode&0x0F0) >> 4] * 64
h = opCode&0x00F
#print("New sprite at",x,int(y/64), h)
self.V[0xF] = 0 # Collision flag
for line in range(h):
byte = [int(i) for i in bin(self.memory[self.I\
+ line*8])[2:].rjust(8,'0')]
for i, new in enumerate(byte): # Width of 8px
old = self.display[x + y + i]
self.display[x + y + i] = new^old # Set bit
if new and old:
self.V[0xF] = 1 # Collision made
self.updateDisplay = True
self.PC += 2
elif fo == 0xE000:
# 0xE000 Keyboard Handling
if opCode&0xFF == 0x9E:
# 0xEX9E Skip next if key in Vx is pressed
self.PC += 2
if self.Keys[self.V[(opCode&0xF00) >> 8]]:
self.PC += 2
else:
# 0xEXA1 Skip next if key in Vx is not pressed
self.PC += 2
if not self.Keys[self.V[(opCode&0xF00) >> 8]]:
self.PC += 2
elif fo == 0xF000:
# 0xF000 Misc Functions
selector = opCode&0xFF
x = opCode&0xF00 >> 8
if selector == 0x07:
# 0xFX07 Set Vx to value of delayT
self.V[x] = self.DelayT
self.PC += 2
elif selector == 0x0A:
lastKey = self.Keys[:]
while True:
for i, key in enumerate(lastKey):
self.updateKeys()
if self.Keys[i] != key:
self.V[x] = self.Keys[i]
break
self.PC += 2
elif selector == 0x15:
# 0xFX15 Set DelayT to Vx
self.DelayT = self.V[x]
self.PC += 2
elif selector == 0x18:
# 0xFX18 Set SoundT to Vx
self.SoundT = self.V[x]
self.PC += 2
elif selector == 0x1E:
# 0xFX1E Add Vx to I
self.I = self.I + self.V[x]
if self.I > 0xFFF:
self.I &= 0xFFF
self.V[0xF] = 1
else:
self.V[0xF] = 0
self.PC += 2
elif selector == 0x29:
# 0xFX29 Sets I to the address for charactor in Vx
self.I = self.V[x]*5
self.PC += 2
elif selector == 0x33:
# 0xFX33 BCD of Vx stored in I
num = self.V[x]
#print(num)
self.memory[self.I] = int(num / 100)
self.memory[self.I+1] = int((num / 10) % 10)
self.memory[self.I+2] = (num%100) % 10
#print(int(num/100), int((num/10)%10),int((num%100)%10))
self.PC += 2
elif selector == 0x55:
# 0xFX55 Store V0-x in self.memory starting at I
for num in range(x):
self.I += num
self.memory[self.I] = self.V[num]
self.PC += 2
elif selector == 0x65:
# 0xFX65 Store self.memory to V0-x starting at I
for num in range(x):
self.I += num
self.V[num] = self.memory[self.I]
self.PC += 2
else:
print(hex(opCode),"not found. Address", offset)
assert False
else:
# Unhandled Opcode
print("Cannot parse %s. Opcode not found."\
.format(str(hex(opCode))))
assert False
def updateKeys(self):
cKey = pygame.key.get_pressed()
self.Keys = [
cKey[pygame.K_1], cKey[pygame.K_2],
cKey[pygame.K_4], cKey[pygame.K_4],
cKey[pygame.K_q], cKey[pygame.K_w],
cKey[pygame.K_e], cKey[pygame.K_r],
cKey[pygame.K_a], cKey[pygame.K_s],
cKey[pygame.K_d], cKey[pygame.K_f],
cKey[pygame.K_z], cKey[pygame.K_x],
cKey[pygame.K_c], cKey[pygame.K_v]]
def runCycle(self):
self.updateKeys()
self.runOp(self.PC)
if self.updateDisplay:
self.Screen.drawFrame(self.display)
if __name__ == "__main__":
emu = Chip8()
emu.loadProgram('pong2.c8')
#code.interact(local=locals())
while True:
emu.runCycle()
time.sleep(1.0/60) |
malphabet = {".-": 'a', "-...": 'b', "-.-.": 'c', "-..": 'd', '.': 'e', "..-.": 'f', "--.": 'g', "....": 'h', "..": 'i',
".---": 'j', "-.-": 'k', ".-..": 'l', "--": 'm', "-.": 'n', "---": 'o', ".--.": 'p', "--.-": 'q',
".-.": 'r', "...": 's', '-': 't', "..-": 'u', "...-": 'v', ".--": 'w', "-..-": 'x', "-.--": 'y',
"--..": 'z', "-----": '0', ".----": '1', "..---": '2', "...--": '3', "....-": '4', ".....": '5',
"-....": '6', "--...": '7', "---..": '8', "----.": '9', "-.-.--": '!', '|': ' '}
ealphabet = {}
for mor, eng in malphabet.items():
ealphabet[eng] = mor
def morse2english(morse):
if morse == "":
return ""
english = ""
tenp = morse
tenp = tenp.replace('_', '-')
tenp = tenp.replace('/', '|')
for i in tenp.split(' '):
if malphabet.__contains__(i):
english += malphabet[i]
return english
def english2morse(english):
if english == "":
return ""
morse = ""
tenp = english.lower()
for i in tenp:
if ealphabet.__contains__(i):
morse += ealphabet[i]
morse += ' '
return morse
print('\n')
nod = "this is a cool python program!"
print(nod)
nod = english2morse(nod)
print(nod)
nod = morse2english(nod)
print(nod) |
import requests
import json
import os
import time
import sys,argparse
from datetime import datetime
parser=argparse.ArgumentParser()
parser.add_argument('--apikey', help='IBM Cloud API Key',required=True)
parser.add_argument('--rgid', help='Resource group ID',required=True)
parser.add_argument('--region', help='Region of cluster',required=True)
parser.add_argument('--cluster', help='name of the cluster',required=True)
# Read supplied arguments
args = parser.parse_args()
api_key = args.apikey
resource_group_id = args.rgid
region = args.region
cluster = args.cluster
################# Get IAM token ################
def get_token(api_key):
get_token_url = 'https://iam.cloud.ibm.com/identity/token'
headers = { "Accept" : "application/json" , "Content-Type" : "application/x-www-form-urlencoded"}
params = { "grant_type" : 'urn:ibm:params:oauth:grant-type:apikey',
"apikey" : api_key}
r1 = requests.post(get_token_url,headers=headers,params=params)
token = r1.json()['access_token']
return token
###################################################
token = get_token(api_key)
get_cluster_url = 'https://containers.bluemix.net/v1/clusters/'+cluster
headers={ "Authorization":"Bearer "+token ,
"X-Region": region,
#"Account":account,
"X-Auth-Resource-Group": resource_group_id,
}
count=0
print("****************** Monitoring status **********************")
loop_startTime = time.time()
masterStatusModifiedTime = 0
cluster_created_date = 0
timeout = time.time() + 60*60
status_old = ""
ready_flag = 0
while(time.time() < timeout):
r=requests.get(get_cluster_url , headers=headers)
#print("Returned Status Code: ",r.status_code)
if r.status_code == 200:
data = r.json()
#status_new = data['masterStatus']
if(status_old != data['masterStatus']):
print(datetime.utcnow().strftime("%d-%b-%Y (%H:%M:%S.%f)")+ " -> Master Status of the Cluster - ",data['masterStatus'])
status_old = data['masterStatus']
if(data['masterStatus']=='Ready'):
cluster_created_date = data['createdDate']
masterStatusModifiedTime = data['masterStatusModifiedDate']
ready_flag = 1
break
elif r.status_code == 404:
print("Status Code: ",r.status_code)
print("Reason: ",r.reason)
break
elif r.status_code == 401:
print("Status Code: ",r.status_code)
print("Reason: ",r.reason)
token = get_token(api_key)
headers = {"Authorization":"Bearer "+token ,
"X-Region": region,
# "Account":account,
"X-Auth-Resource-Group": resource_group_id,
}
continue
else:
print("Status Code: ",r.status_code)
print("Get Cluster Info API call failed. Reason: ",r.reason)
if(ready_flag==1):
print("LOOP ENDED")
if(ready_flag==0):
print("LOOP TIMED OUT - Cluster Master Status did not reach 'Ready' State in 1 hr after kps is enabled")
print("Latest Master Status polled - ", status_old)
loop_endTime = time.time()
elapsedTime = (loop_endTime - loop_startTime)
print("Elapsed Time (seconds) = %s" % elapsedTime)
print("Master Status Modified Time: ", masterStatusModifiedTime)
|
from .ParameterSearch import ParameterSearch
import concurrent.futures
class ThreadedParameterSearch(ParameterSearch):
"""Threaded version of :py:class:`alpenglow.utils.ParameterSearch`.
"""
def __init__(self, model, Score, threads=4, use_process_pool=True):
ParameterSearch.__init__(self, model, Score)
self.threads = threads
self.use_process_pool = use_process_pool
def run(self, *run_parameters, **run_kw_parameters):
self.results = []
configurations = self._get_configurations()
parameter_tuples = [(parameters, (run_parameters, run_kw_parameters)) for parameters in configurations]
if(self.use_process_pool):
executor = concurrent.futures.ProcessPoolExecutor(self.threads)
else:
executor = concurrent.futures.ThreadPoolExecutor(self.threads)
results = list(executor.map(self._mapper, parameter_tuples))
return self._to_dataframe(results)
|
import discord
from discord.ext import commands
import asyncio
class Mod(commands.Cog):
def __init__(self, bot):
self.bot = bot
async def format_mod_embed(self, ctx, user, success, method, duration = None, location=None):
'''Helper func to format an embed to prevent extra code'''
emb = discord.Embed(timestamp=ctx.message.created_at)
emb.set_author(name=method.title(), icon_url=user.avatar_url)
emb.color = discord.Colour.orange()
emb.set_footer(text=f'User ID: {user.id}')
if success:
if method == 'ban' or method == 'hackban':
emb.description = f'{user} was just {method}ned.'
elif method == 'unmute':
emb.description = f'{user} was just {method}d.'
elif method == 'mute':
emb.description = f'{user} was just {method}d for {duration}.'
elif method == 'channel-lockdown' or method == 'server-lockdown':
emb.description = f'`{location.name}` is now in lockdown mode!'
else:
emb.description = f'{user} was just {method}ed.'
else:
if method == 'lockdown' or 'channel-lockdown':
emb.description = f"You do not have the permissions to {method} `{location.name}`"
else:
emb.description = f"You do not have the permissions to {method} {user.name}."
@commands.command()
@commands.has_permissions(ban_members=True)
async def ban(self, ctx, member: discord.Member, *, reason=None):
await ctx.guild.ban(member, reason=reason)
if reason == None:
em = discord.Embed(
title=f':exclamation: | Ban {member}',
description=f'{member} was banned from the server by {ctx.author}',
color=discord.Colour.green())
em.add_field(name="Target", value=f"{member}")
em.add_field(name="Moderator", value=f"{ctx.author}")
em.add_field(name="Reason", value=f"none")
await ctx.send(embed=em)
else:
em = discord.Embed(
title=f':exclamation: | Ban {member}',
description=f'{member} was banned from the server by {ctx.author}',
color=discord.Colour.green())
em.add_field(name="Target", value=f"{member}")
em.add_field(name="Moderator", value=f"{ctx.author}")
em.add_field(name="Reason", value=f"{reason}")
await ctx.send(embed=em)
@ban.error
async def ban_error(self, msg,error):
if isinstance(error, commands.MissingPermissions):
return await msg.send(em=discord.Embed(title=":exclamarion: | Error", description=""))
@commands.command()
@commands.has_permissions(ban_members=True)
async def unban(self, ctx, name_or_id, *, reason=None):
'''Unban someone from the server.'''
ban = await ctx.get_ban(name_or_id)
try:
await ctx.guild.unban(ban.user, reason=reason)
except:
success = False
else:
success = True
emb = await self.format_mod_embed(ctx, ban.user, success, 'unban')
await ctx.send(embed=emb)
@commands.command(aliases=['del','prune'])
@commands.has_permissions(kick_members=True)
async def purge(self, ctx, limit : int, member:discord.Member=None):
'''Clean a number of messages'''
if member is None:
await ctx.purge(limit=limit+1)
else:
async for message in ctx.channel.history(limit=limit+1):
if message.author is member:
await message.delete()
@commands.command()
async def clean(self, ctx, quantity: int):
''' Clean a number of your own messages
Usage: {prefix}clean 5 '''
if quantity <= 15:
total = quantity +1
async for message in ctx.channel.history(limit=total):
if message.author == ctx.author:
await message.delete()
await asyncio.sleep(3.0)
else:
async for message in ctx.channel.history(limit=6):
if message.author == ctx.author:
await message.delete()
await asyncio.sleep(3.0)
@commands.command()
async def bans(self, ctx):
'''See a list of banned users in the guild'''
try:
bans = await ctx.guild.bans()
except:
return await ctx.send('You dont have the perms to see bans.')
em = discord.Embed(title=f'List of Banned Members ({len(bans)}):')
em.description = ', '.join([str(b.user) for b in bans])
em.color = discord.Colour.orange()
await ctx.send(embed=em)
@commands.command()
@commands.has_permissions(ban_members=True)
async def baninfo(self, ctx, *, name_or_id):
'''Check the reason of a ban from the audit logs.'''
ban = await ctx.get_ban(name_or_id)
em = discord.Embed()
em.color = discord.Colour.orange()
em.set_author(name=str(ban.user), icon_url=ban.user.avatar_url)
em.add_field(name='Reason', value=ban.reason or 'None')
em.set_thumbnail(url=ban.user.avatar_url)
em.set_footer(text=f'User ID: {ban.user.id}')
await ctx.send(embed=em)
@commands.command()
@commands.has_permissions(manage_roles=True)
async def addrole(self, ctx, member: discord.Member, *, rolename: str):
'''Add a role to someone else.'''
role = discord.utils.find(lambda m: rolename.lower() in m.name.lower(), ctx.message.guild.roles)
if not role:
return await ctx.send('That role does not exist.')
try:
await member.add_roles(role)
await ctx.send(f'Added: `{role.name}`')
except:
await ctx.send("I don't have the perms to add that role.")
@commands.command()
@commands.has_permissions(manage_roles=True)
async def removerole(self, ctx, member: discord.Member, *, rolename: str):
'''Remove a role from someone else.'''
role = discord.utils.find(lambda m: rolename.lower() in m.name.lower(), ctx.message.guild.roles)
if not role:
return await ctx.send('That role does not exist.')
try:
await member.remove_roles(role)
await ctx.send(f'Removed: `{role.name}`')
except:
await ctx.send("I don't have the perms to add that role.")
@commands.command()
@commands.has_permissions(administrator=True)
async def mute(self, ctx, member:discord.Member, duration, *, reason=None):
'''Denies someone from chatting in all text channels and talking in voice channels for a specified duration'''
unit = duration[-1]
if unit == 's':
time = int(duration[:-1])
longunit = 'seconds'
elif unit == 'm':
time = int(duration[:-1]) * 60
longunit = 'minutes'
elif unit == 'h':
time = int(duration[:-1]) * 60 * 60
longunit = 'hours'
else:
await ctx.send('Invalid Unit! Use `s`, `m`, or `h`.')
return
progress = await ctx.send('Muting user!')
try:
for channel in ctx.guild.text_channels:
await channel.set_permissions(member, overwrite=discord.PermissionOverwrite(send_messages = False), reason=reason)
for channel in ctx.guild.voice_channels:
await channel.set_permissions(member, overwrite=discord.PermissionOverwrite(speak=False), reason=reason)
except:
success = False
else:
success = True
emb = await self.format_mod_embed(ctx, member, success, 'mute', f'{str(duration[:-1])} {longunit}')
progress.delete()
await ctx.send(embed=emb)
await asyncio.sleep(time)
try:
for channel in ctx.guild.channels:
await channel.set_permissions(member, overwrite=None, reason=reason)
except:
pass
@commands.command()
@commands.has_permissions(administrator=True)
async def unmute(self, ctx, member:discord.Member, *, reason=None):
'''Removes channel overrides for specified member'''
progress = await ctx.send('Unmuting user!')
try:
for channel in ctx.message.guild.channels:
await channel.set_permissions(member, overwrite=None, reason=reason)
except:
success = False
else:
success = True
emb = await self.format_mod_embed(ctx, member, success, 'unmute')
progress.delete()
await ctx.send(embed=emb)
@commands.group(invoke_without_command=True)
@commands.has_permissions(administrator=True)
async def lockdown(self, ctx):
"""Server/Channel lockdown"""
pass
@lockdown.command(aliases=['channel'])
async def chan(self, ctx, channel:discord.TextChannel = None, *, reason=None):
if channel is None: channel = ctx.channel
try:
await channel.set_permissions(ctx.guild.default_role, overwrite=discord.PermissionOverwrite(send_messages = False), reason=reason)
except:
success = False
else:
success = True
emb = await self.format_mod_embed(ctx, ctx.author, success, 'channel-lockdown', 0, channel)
await ctx.send(embed=emb)
@lockdown.command()
async def server(self, ctx, server:discord.Guild = None, *, reason=None):
if server is None: server = ctx.guild
progress = await ctx.send(f'Locking down {server.name}')
try:
for channel in server.channels:
await channel.set_permissions(ctx.guild.default_role, overwrite=discord.PermissionOverwrite(send_messages = False), reason=reason)
except:
success = False
else:
success = True
emb = await self.format_mod_embed(ctx, ctx.author, success, 'server-lockdown', 0, server)
progress.delete()
await ctx.send(embed=emb)
@commands.command()
@commands.has_permissions(administrator=True)
async def block(self, ctx, user: discord.Member):
"""
Blocks a user from chatting in current channel.
Similar to mute but instead of restricting access
to all channels it restricts in current channel.
"""
if not user: # checks if there is user
return await ctx.send("You must specify a user")
await ctx.channel.set_permissions(user, send_messages=False)
embed = discord.Embed(title="Block", description=f"{user} user can no longer write to this channel!")
await ctx.send(embed=embed)
@commands.command()
@commands.has_permissions(administrator=True)
async def unblock(self, ctx, user: discord.Member):
"""Unblocks a user from current channel"""
if not user: # checks if there is user
return await ctx.send("You must specify a user")
await ctx.channel.set_permissions(user, send_messages=True)
await ctx.channel.set_permissions(user, send_messages=False)
embed = discord.Embed(title="Unblock", description=f"{user} user can write to this channel!")
await ctx.send(embed=embed)
def setup(bot):
bot.add_cog(Mod(bot)) |
def ackermann(m,n):
#opening the file to check for values
file = open('ack_vals.txt','r')
values = file.read()
for line in values.split('\n'):
#each line has the parameters and their corresponding result
line_arr = line.split(' ')
#converted into array of numbers
if line != '':
#there is a chance that a line might be skipped, so we skip that here
print(line_arr)
if int(line_arr[0]) == m and int(line_arr[1]) == n:
#if they have the required values
#ans will be found out here itself
ans = int(line_arr[2])
file.close()
#closing the file and returning the answer
return ans
#if the answer was not found, we need to find it manually
file.close()
#this is the true Ackermann function
if m==0:
ans = n+1
elif n==0:
ans = ackermann(m-1,1)
else:
ans = ackermann(m-1,ackermann(m,n-1))
#By the time it reaches here, it would have calculated
#the value, so we add to the file for future calculations
file = open('ack_vals.txt','a')
file.write(str(m)+' '+str(n)+' '+str(ans)+'\n')
file.close()
return ans
if __name__ == '__main__':
print("Be careful with the values, the function grows beyond exponentially,")
print("This function might not find the value in any reasonable amount of time")
m = int(input("Enter the value of m: "))
n = int(input("Enter the value of n: "))
print(ackermann(m,n))
|
import numpy as np
import pandas as pd
import neurokit2 as nk
# =============================================================================
# Events
# =============================================================================
def test_events_find():
signal = np.cos(np.linspace(start=0, stop=20, num=1000))
events = nk.events_find(signal)
assert list(events["Onset"]) == [0, 236, 550, 864]
events = nk.events_find(signal, duration_min = 150)
assert list(events["Onset"]) == [236, 550]
events = nk.events_find(signal, inter_min = 300)
assert list(events["Onset"]) == [0, 550, 864]
def test_events_to_mne():
signal = np.cos(np.linspace(start=0, stop=20, num=1000))
events = nk.events_find(signal)
events, event_id = nk.events_to_mne(events)
assert event_id == {'Event': 0}
def test_plot_events_in_signal():
signal = np.cos(np.linspace(start=0, stop=20, num=1000))
events = nk.events_find(signal)
data = nk.plot_events_in_signal(signal, events, show=False)
assert len(data['Event_Onset']) == 1000 |
"""A simple entrypoint-free plugin system for python"""
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple
import inspect
import warnings
from importlib import import_module
from collections import namedtuple
from enum import Enum
from diot import OrderedDiot
try: # pragma: no cover
import importlib_metadata
except ImportError: # pragma: no cover
# pylint: disable=ungrouped-imports
from importlib import metadata as importlib_metadata
__version__ = '0.0.6'
SimplugImpl = namedtuple('SimplugImpl', ['impl', 'has_self'])
SimplugImpl.__doc__ = """A namedtuple wrapper for hook implementation.
This is used to mark the method/function to be an implementation of a hook.
Args:
impl: The hook implementation
"""
class SimplugException(Exception):
"""Base exception class for simplug"""
class NoSuchPlugin(SimplugException):
"""When a plugin cannot be imported"""
class PluginRegistered(SimplugException):
"""When a plugin with a name already registered"""
class NoPluginNameDefined(SimplugException):
"""When the name of the plugin cannot be found"""
class HookSignatureDifferentFromSpec(SimplugException):
"""When the hook signature is different from spec"""
class NoSuchHookSpec(SimplugException):
"""When implemented a undefined hook or calling a non-exist hook"""
class HookRequired(SimplugException):
"""When a required hook is not implemented"""
class HookSpecExists(SimplugException):
"""When a hook has already been defined"""
class SyncImplOnAsyncSpecWarning(Warning):
"""When a sync implementation on an async hook"""
class SimplugResult(Enum):
"""Way to get the results from the hooks
Attributes:
ALL: Get all the results from the hook, as a list
including `NONE`s
ALL_BUT_NONE: Get all the results from the hook, as a list
not including `NONE`s
FIRST: Get the none-`None` result from the first plugin only
(ordered by priority)
LAST: Get the none-`None` result from the last plugin only
"""
ALL = 'all'
ALL_BUT_NONE = 'all_but_none'
FIRST = 'first'
LAST = 'last'
class SimplugWrapper:
"""A wrapper for plugin
Args:
plugin: A object or a string indicating the plugin as a module
batch_index: The batch_index when the plugin is registered
>>> simplug = Simplug()
>>> simplug.register('plugin1', 'plugin2') # batch 0
>>> # index:0, index:1
>>> simplug.register('plugin3', 'plugin4') # batch 1
>>> # index:0, index:1
index: The index when the plugin is registered
Attributes:
plugin: The raw plugin object
priority: A 2-element tuple used to prioritize the plugins
- If `plugin.priority` is specified, use it as the first element
and batch_index will be the second element
- Otherwise, batch_index the first and index the second.
- Smaller number has higher priority
- Negative numbers allowed
Raises:
NoSuchPlugin: When a string is passed in and the plugin cannot be
imported as a module
"""
def __init__(self, plugin: Any, batch_index: int, index: int):
self.plugin = self._name = None
if isinstance(plugin, str):
try:
self.plugin = import_module(plugin)
except ImportError as exc:
raise NoSuchPlugin(plugin).with_traceback(
exc.__traceback__
) from None
elif isinstance(plugin, tuple):
# plugin load from entrypoint
# name specified as second element explicitly
self.plugin, self._name = plugin
else:
self.plugin = plugin
priority = getattr(self.plugin, 'priority', None)
self.priority = ((batch_index, index)
if priority is None
else (priority, batch_index)) # type: Tuple[int, int]
self.enabled = True # type: bool
@property
def version(self) -> Optional[str]:
"""Try to get the version of the plugin.
If the attribute `version` is definied, use it. Otherwise, try to check
if `__version__` is defined. If neither is defined, return None.
Returns:
In the priority order of plugin.version, plugin.__version__ and None
"""
return getattr(self.plugin,
'version',
getattr(self.plugin, '__version__', None))
__version__ = version
@property
def name(self) -> str:
"""Try to get the name of the plugin.
A lowercase name is recommended.
if `<plugin>.name` is defined, then the name is used. Otherwise,
`<plugin>.__name__` is used. Finally, `<plugin>.__class__.__name__` is
tried.
Raises:
NoPluginNameDefined: When a name cannot be retrieved.
Returns:
The name of the plugin
"""
if self._name is not None:
return self._name
try:
return self.plugin.name
except AttributeError:
pass
try:
return self.plugin.__name__.lower()
except AttributeError:
pass
try:
return self.plugin.__class__.__name__.lower()
except AttributeError: # pragma: no cover
pass
raise NoPluginNameDefined(str(self.plugin)) # pragma: no cover
def enable(self) -> None:
"""Enable this plugin"""
self.enabled = True
def disable(self):
"""Disable this plugin"""
self.enabled = False
def hook(self, name: str) -> Optional[SimplugImpl]:
"""Get the hook implementation of this plugin by name
Args:
name: The name of the hook
Returns:
The wrapper of the implementation. If the implementation is not
found or it's not decorated by `simplug.impl`, None will be
returned.
"""
ret = getattr(self.plugin, name, None)
if not isinstance(ret, SimplugImpl):
return None
return ret
def __eq__(self, other: Any) -> bool:
if not isinstance(other, self.__class__):
return False
return self.plugin is other.plugin
def __ne__(self, other: Any) -> bool:
return not self.__eq__(other)
class SimplugHook:
"""A hook of a plugin
Args:
simplug_hooks: The SimplugHooks object
spec: The specification of the hook
required: Whether this hook is required to be implemented
result: Way to collect the results from the hook
Attributes:
name: The name of the hook
simplug_hooks: The SimplugHooks object
spec: The specification of the hook
required: Whether this hook is required to be implemented
result: Way to collect the results from the hook
_has_self: Whether the parameters have `self` as the first. If so,
it will be ignored while being called.
"""
def __init__(self, # pylint: disable=too-many-arguments
simplug_hooks: "SimplugHooks",
spec: Callable,
required: bool,
result: SimplugResult,
warn_sync_impl_on_async: bool = False):
self.simplug_hooks = simplug_hooks
self.spec = spec
self.name = spec.__name__
self.required = required
self.result = result
self.warn_sync_impl_on_async = warn_sync_impl_on_async
def _get_results(self, results: List[Any]) -> Any:
"""Get the results according to self.result"""
if self.result == SimplugResult.ALL:
return results
results = [result for result in results if result is not None]
if self.result == SimplugResult.FIRST:
return results[0] if results else None
if self.result == SimplugResult.LAST:
return results[-1] if results else None
# ALL_BUT_NONE
return results
def __call__(self, *args, **kwargs):
"""Call the hook in your system
Args:
*args: args for the hook
**kwargs: kwargs for the hook
Returns:
Depending on `self.result`:
- SimplugResult.ALL: Get all the results from the hook, as a list
including `NONE`s
- SimplugResult.ALL_BUT_NONE: Get all the results from the hook,
as a list, not including `NONE`s
- SimplugResult.FIRST: Get the none-`None` result from the
first plugin only (ordered by priority)
- SimplugResult.LAST: Get the none-`None` result from
the last plugin only
"""
self.simplug_hooks._sort_registry()
results = []
for plugin in self.simplug_hooks._registry.values():
if not plugin.enabled:
continue
hook = plugin.hook(self.name)
if hook is not None:
plugin_args = (plugin.plugin, *args) if hook.has_self else args
results.append(hook.impl(*plugin_args, **kwargs))
return self._get_results(results)
class SimplugHookAsync(SimplugHook):
"""Wrapper of an async hook"""
# invalid-overridden-method
# pylint: disable=bad-option-value,W0236
async def __call__(self, *args, **kwargs):
"""Call the hook in your system asynchronously
Args:
*args: args for the hook
**kwargs: kwargs for the hook
Returns:
Depending on `self.result`:
- SimplugResult.ALL: Get all the results from the hook, as a list
including `NONE`s
- SimplugResult.ALL_BUT_NONE: Get all the results from the hook,
as a list, not including `NONE`s
- SimplugResult.FIRST: Get the none-`None` result from the
first plugin only (ordered by priority)
- SimplugResult.LAST: Get the none-`None` result from
the last plugin only
"""
self.simplug_hooks._sort_registry()
results = []
for plugin in self.simplug_hooks._registry.values():
if not plugin.enabled:
continue
hook = plugin.hook(self.name)
if hook is None:
continue
plugin_args = (plugin.plugin, *args) if hook.has_self else args
result = hook.impl(*plugin_args, **kwargs)
if inspect.iscoroutine(result):
results.append(await result)
else:
results.append(result)
return self._get_results(results)
class SimplugHooks:
"""The hooks manager
Methods in this class are prefixed with a underscore to attributes clean
for hooks.
To call a hook in your system:
>>> simplug.hooks.<hook_name>(<args>)
Attributes:
_registry: The plugin registry
_specs: The registry for the hook specs
_registry_sorted: Whether the plugin registry has been sorted already
"""
def __init__(self):
self._registry = OrderedDiot() # type: OrderedDiot
self._specs = {} # type: Dict[str, SimplugHook]
self._registry_sorted = False # type: bool
def _register(self, plugin: SimplugWrapper) -> None:
"""Register a plugin (already wrapped by SimplugWrapper)
Args:
plugin: The plugin wrapper
Raises:
HookRequired: When a required hook is not implemented
HookSignatureDifferentFromSpec: When the arguments of a hook
implementation is different from its specification
"""
if (plugin.name in self._registry and
plugin != self._registry[plugin.name]):
raise PluginRegistered(f'Another plugin named {plugin.name} '
'has already been registered.')
# check if required hooks implemented
# and signature
for specname, spec in self._specs.items():
hook = plugin.hook(specname)
if spec.required and hook is None:
raise HookRequired(f'{specname}, but not implemented '
f'in plugin {plugin.name}')
if hook is None:
continue
impl_params = list(inspect.signature(hook.impl).parameters.keys())
spec_params = list(inspect.signature(spec.spec).parameters.keys())
if impl_params[0] == 'self':
impl_params = impl_params[1:]
if spec_params[0] == 'self':
spec_params = spec_params[1:]
if impl_params != spec_params:
raise HookSignatureDifferentFromSpec(
f'{specname!r} in plugin {plugin.name}\n'
f'Expect {spec_params}, '
f'but got {impl_params}'
)
if (isinstance(spec, SimplugHookAsync) and
spec.warn_sync_impl_on_async and
not inspect.iscoroutinefunction(hook.impl)):
warnings.warn(f"Sync implementation on async hook "
f"{specname!r} in plugin {plugin.name}",
SyncImplOnAsyncSpecWarning)
self._registry[plugin.name] = plugin
def _sort_registry(self) -> None:
"""Sort the registry by the priority only once"""
if self._registry_sorted:
return
orderedkeys = self._registry.__diot__['orderedkeys']
self._registry.__diot__['orderedkeys'] = sorted(
orderedkeys,
key=lambda plug: self._registry[plug].priority
)
self._registry_sorted = True
def __getattr__(self, name: str) -> "SimplugHook":
"""Get the hook by name
Args:
name: The hook name
Returns:
The SimplugHook object
Raises:
NoSuchHookSpec: When the hook has no specification defined.
"""
try:
return self._specs[name]
except KeyError as exc:
raise NoSuchHookSpec(name).with_traceback(
exc.__traceback__
) from None
class SimplugContext:
"""The context manager for enabling or disabling a set of plugins"""
def __init__(self, simplug: "Simplug", plugins: Optional[Iterable[Any]]):
self.plugins = plugins
if plugins is not None:
self.simplug = simplug
self.orig_registry = simplug.hooks._registry.copy()
self.orig_status = {name: plugin.enabled
for name, plugin in self.orig_registry.items()}
def __enter__(self):
if self.plugins is None:
return
orig_registry = self.orig_registry.copy()
# raw
orig_names = list(orig_registry.keys())
orig_raws = [plugin.plugin for plugin in orig_registry.values()]
for plugin in self.plugins:
if isinstance(plugin, str) and plugin in orig_registry:
orig_registry[plugin].enable()
del orig_registry[plugin]
elif plugin in orig_registry.values():
plugin.enable()
del orig_registry[plugin.name]
elif plugin in orig_raws:
name = orig_names[orig_raws.index(plugin)]
orig_registry[name].enable()
del orig_registry[name]
else:
self.simplug.register(plugin)
for plugin in orig_registry.values():
plugin.disable()
def __exit__(self, *exc):
if self.plugins is None:
return
self.simplug.hooks._registry = self.orig_registry
for name, status in self.orig_status.items():
self.simplug.hooks._registry[name].enabled = status
class _SimplugContextOnly(SimplugContext):
"""The context manager with only given plugins enabled"""
class _SimplugContextBut(SimplugContext):
"""The context manager with only given plugins disabled"""
def __enter__(self):
if self.plugins is None:
return
orig_registry = self.orig_registry.copy()
# raw
orig_names = list(orig_registry.keys())
orig_raws = [plugin.plugin for plugin in orig_registry.values()]
for plugin in self.plugins:
if isinstance(plugin, str) and plugin in orig_registry:
orig_registry[plugin].disable()
del orig_registry[plugin]
elif plugin in orig_registry.values():
plugin.disable()
del orig_registry[plugin.name]
elif plugin in orig_raws:
name = orig_names[orig_raws.index(plugin)]
orig_registry[name].disable()
del orig_registry[name]
# ignore plugin not existing
for plugin in orig_registry.values():
plugin.enable()
class Simplug:
"""The plugin manager for simplug
Attributes:
PROJECTS: The projects registry, to make sure the same `Simplug`
object by the name project name.
_batch_index: The batch index for plugin registration
hooks: The hooks manager
_inited: Whether `__init__` has already been called. Since the
`__init__` method will be called after `__new__`, this is used to
avoid `__init__` to be called more than once
"""
PROJECTS: Dict[str, "Simplug"] = {}
def __new__(cls, project: str) -> "Simplug":
if project not in cls.PROJECTS:
obj = super().__new__(cls)
obj.__init__(project)
cls.PROJECTS[project] = obj
return cls.PROJECTS[project]
def __init__(self, project: str):
if getattr(self, '_inited', None):
return
self._batch_index = 0
self.hooks = SimplugHooks()
self.project = project
self._inited = True
def load_entrypoints(self, group: Optional[str] = None):
"""Load plugins from setuptools entry_points"""
group = group or self.project
for dist in importlib_metadata.distributions():
for epoint in dist.entry_points:
if epoint.group != group:
continue
plugin = epoint.load()
self.register((plugin, epoint.name))
def register(self, *plugins: Any) -> None:
"""Register plugins
Args:
*plugins: The plugins, each of which could be a str, indicating
that the plugin is a module and will be imported by
`__import__`; or an object with the hook implementations as
its attributes.
"""
for i, plugin in enumerate(plugins):
plugin = SimplugWrapper(plugin, self._batch_index, i)
self.hooks._register(plugin)
self._batch_index += 1
def get_plugin(self, name: str, raw: bool = False) -> object:
"""Get the plugin wrapper or the raw plugin object
Args:
name: The name of the plugin
raw: Get the raw plugin object (the one when it's registered)
If a plugin is a module and registered by its name, the
module is returned
Raises:
NoSuchPlugin: When the plugin does not exist
Returns:
The plugin wrapper or raw plugin
"""
if name not in self.hooks._registry:
raise NoSuchPlugin(name)
wrapper = self.hooks._registry[name]
return wrapper.plugin if raw else wrapper
def get_all_plugins(self,
raw: bool = False) -> Dict[str, SimplugWrapper]:
"""Get a mapping of all plugins
Args:
raw: Whether return the raw plugin or not
(the one when it's registered)
If a plugin is registered as a module by its name, the module
is returned.
Returns:
The mapping of all plugins
"""
if not raw:
return self.hooks._registry
return OrderedDiot([(name, plugin.plugin)
for name, plugin
in self.hooks._registry.items()])
def get_enabled_plugins(self,
raw: bool = False) -> Dict[str, SimplugWrapper]:
"""Get a mapping of all enabled plugins
Args:
raw: Whether return the raw plugin or not
(the one when it's registered)
If a plugin is registered as a module by its name, the module
is returned.
Returns:
The mapping of all enabled plugins
"""
return OrderedDiot([(name, plugin.plugin if raw else plugin)
for name, plugin
in self.hooks._registry.items()
if plugin.enabled])
def get_all_plugin_names(self) -> List[str]:
"""Get the names of all plugins
Returns:
The names of all plugins
"""
return list(self.hooks._registry.keys())
def get_enabled_plugin_names(self) -> List[str]:
"""Get the names of all enabled plugins
Returns:
The names of all enabled plugins
"""
return [name for name, plugin in self.hooks._registry.items()
if plugin.enabled]
def plugins_only_context(
self,
plugins: Optional[Iterable[Any]]
) -> _SimplugContextOnly:
"""A context manager with only given plugins enabled
Args:
plugins: The plugin names or plugin objects
If the given plugin does not exist, register it.
None to not enable or disable anything
Returns:
The context manager
"""
return _SimplugContextOnly(self, plugins)
def plugins_but_context(
self,
plugins: Optional[Iterable[Any]]
) -> _SimplugContextBut:
"""A context manager with all plugins but given plugins
enabled
Args:
*plugins: The plugin names or plugin objects to exclude
If the given plugin does not exist, ignore it
Returns:
The context manager
"""
return _SimplugContextBut(self, plugins)
def enable(self, *names: str) -> None:
"""Enable plugins by names
Args:
*names: The names of the plugin
"""
for name in names:
self.get_plugin(name).enable()
def disable(self, *names: str) -> None:
"""Disable plugins by names
Args:
names: The names of the plugin
"""
for name in names:
self.get_plugin(name).disable()
def spec(self,
hook: Optional[Callable] = None,
required: bool = False,
result: SimplugResult = SimplugResult.ALL_BUT_NONE,
warn_sync_impl_on_async: bool = True) -> Callable:
"""A decorator to define the specification of a hook
Args:
hook: The hook spec. If it is None, that means this decorator is
called with arguments, and it should be keyword arguments.
Otherwise, it is called like this `simplug.spec`
required: Whether this hook is required to be implemented.
result: How should we collect the results from the plugins
Raises:
HookSpecExists: If a hook spec with the same name (`hook.__name__`)
is already defined.
Returns:
A decorator function of other argument is passed, or the hook spec
itself.
"""
def decorator(hook_func: Callable):
hook_name = hook_func.__name__
if hook_name in self.hooks._specs:
raise HookSpecExists(hook_name)
if inspect.iscoroutinefunction(hook_func):
self.hooks._specs[hook_name] = SimplugHookAsync(
self.hooks, hook_func, required, result,
warn_sync_impl_on_async
)
else:
self.hooks._specs[hook_name] = SimplugHook(
self.hooks, hook_func, required, result
)
return hook_func
return decorator(hook) if hook else decorator
def impl(self, hook: Callable):
"""A decorator for the implementation of a hook
Args:
hook: The hook implementation
Raises:
NoSuchHookSpec: When no specification is defined for this hook
Returns:
The wrapped hook implementation by `SimplugImpl`
"""
if hook.__name__ not in self.hooks._specs:
raise NoSuchHookSpec(hook.__name__)
return SimplugImpl(hook, 'self' in inspect.signature(hook).parameters)
|
import logging
logger = logging.getLogger(__name__)
START_REWARD = 200
class LandType:
CONSTRUCTABLE = 0
INFRASTRUCTURE = 1
START = 2
PARKING = 3
JAIL = 4
CHANCE = 5
@staticmethod
def description(val):
ret = ["Constructable",
"Infrastructure",
"New Start",
"Parking ",
"AIV Jail",
"Chance Card"]
return ret[val]
class Land:
def __init__(self, pos, description, content):
self.pos = pos
self.description = description
self.content = content
@property
def type(self):
return self.content.type
def evaluate(self):
if self.type in [LandType.INFRASTRUCTURE, LandType.CONSTRUCTABLE]:
return self.content.valuation
return 0
def __repr__(self):
return f"Land [position: {self.pos}, " \
f"description: {self.description}, " \
f"content: {self.content}]"
def __str__(self):
return self.description
class BuildingType:
HOUSE = 0
HOTEL = 1
NOTHING = 2
class Constructable:
RATIO_RENT_TO_PRICE_NOTHING = 5
RATIO_RENT_TO_PRICE_FOR_HOUSE = 4
RATIO_RENT_TO_PRICE_FOR_HOTEL = 2
HOTEL_CONSTRUCTION_COST = 150
HOUSE_CONSTRUCTION_COST = 100
def __init__(self, price):
self.price = price
self.property_type = BuildingType.NOTHING
self.building_num = 0
self.owner = None
@property
def valuation(self):
building_value = self.price
if self.property_type == BuildingType.HOUSE:
building_value += Constructable.HOUSE_CONSTRUCTION_COST * self.building_num
elif self.property_type == BuildingType.HOTEL:
building_value += Constructable.HOUSE_CONSTRUCTION_COST * 3 + Constructable.HOTEL_CONSTRUCTION_COST
return building_value
@property
def toll(self):
if self.property_type == BuildingType.NOTHING:
rent = self.price / Constructable.RATIO_RENT_TO_PRICE_NOTHING
elif self.property_type == BuildingType.HOTEL:
rent = (self.price + Constructable.HOTEL_CONSTRUCTION_COST) / Constructable.RATIO_RENT_TO_PRICE_FOR_HOTEL
else:
rent = (self.price + self.building_num * Constructable.HOUSE_CONSTRUCTION_COST) / \
Constructable.RATIO_RENT_TO_PRICE_FOR_HOUSE
logger.info(f"[Constructable: {self}, toll: {rent}]")
return rent
@property
def type(self):
return LandType.CONSTRUCTABLE
@property
def construction_price(self):
return Constructable.HOTEL_CONSTRUCTION_COST \
if self.property_type == BuildingType.HOUSE and self.building_num == 3 \
else Constructable.HOUSE_CONSTRUCTION_COST
def clear_properties(self):
self.property_type = BuildingType.NOTHING
self.building_num = 0
def is_constructable(self):
return not self.property_type == BuildingType.HOTEL
def incr_property(self) -> bool:
if self.property_type == BuildingType.NOTHING or \
(self.property_type == BuildingType.HOUSE and
self.building_num < 3):
self.building_num += 1
self.property_type = BuildingType.HOUSE
return True
elif self.property_type == BuildingType.HOUSE and self.building_num == 3:
self.property_type = BuildingType.HOTEL
self.building_num = 1
return True
elif self.property_type == BuildingType.HOTEL:
return False
class Infrastructure:
def __init__(self, price):
self.price = price
self.owner = None
@property
def type(self):
return LandType.INFRASTRUCTURE
@property
def payment(self):
return self.price / 4
@property
def valuation(self):
return self.price
class OwnRejection:
@property
def owner(self):
return None
class Start(OwnRejection):
def __init__(self, reward):
self.reward = reward
@property
def type(self):
return LandType.START
class Jail(OwnRejection):
def __init__(self, stops: int):
self.stops = stops
@property
def type(self):
return LandType.JAIL
class Parking(OwnRejection):
@property
def type(self):
return LandType.PARKING
class Chance(OwnRejection):
@property
def type(self):
return LandType.CHANCE
|
"""
Deprecated stuff. This is expected to be removed partially or
completely at some point, so don't pollute main logic with it.
"""
import os
import distutils.dir_util
from logging import getLogger
from .iotprovision_main import STATUS_SUCCESS, STATUS_FAILURE
from .config import Config
def deprecated(args):
"""
Deal with deprecation oddities
:param args: Invocation arguments
:return: Possibly modifed arguments or None if failure
"""
logger = getLogger(__name__)
#FIXME: DSG-1382 when can this be removed?
if _move_certs_dir() != STATUS_SUCCESS:
return None
# Early versions of iotprovision used "custom" for AWS JITR but after MAR was introduced the user must be explicit
if args.cloud_provider == "aws":
if args.provision_method == "custom":
logger.warning("")
logger.warning('AWS provisioning method name "custom" is deprecated - use "jitr" instead')
logger.warning("")
args.provision_method = "jitr"
if args.cloud_provider == "azure" and "wincupgrade" not in args.actions:
# Always do winc upgrade for Azure (Until all kits have new winc FW from factory?)
args.actions.append("wincupgrade")
return args
def _move_certs_dir():
"""
DSG-1382: Check if the previously used certificate-folder exists, in case
rename it before proceeding. Things get a little dodgy if both exist,
try to handle it sanely.
This function must be called prior to instantiating Provisioner object.
The certs-folder move was done in iotprovision version 1.0.0
"""
logger = getLogger(__name__)
# DSG-1382: Name of certificates folder used prior to version 1.0
_OLD_CERTS_DIR = os.path.join(os.path.expanduser("~"), ".avr-iot-aws")
# name of backup folder, created if files had to be copied.
_BACKUP_CERTS_DIR = Config.Certs.certs_dir + "-backup"
if os.path.isdir(_OLD_CERTS_DIR):
if os.path.exists(Config.Certs.certs_dir):
# Dodginess follows
logger.info("Copying content from '%s' to '%s'", _OLD_CERTS_DIR, Config.Certs.certs_dir)
try:
distutils.dir_util.copy_tree(_OLD_CERTS_DIR, Config.Certs.certs_dir,
update=1)
logger.info("Create backup certs folder in %s", _BACKUP_CERTS_DIR)
os.rename(_OLD_CERTS_DIR, _BACKUP_CERTS_DIR)
logger.info("")
except Exception as e:
logger.error("Copy certificate folder failed: %s", e)
return STATUS_FAILURE
else:
# This should be the normal case.
logger.info("Rename certificate folder '%s' to '%s'", _OLD_CERTS_DIR, Config.Certs.certs_dir)
try:
os.rename(_OLD_CERTS_DIR, Config.Certs.certs_dir)
except Exception as e:
logger.error("Rename certificate folder failed: %s", e)
return STATUS_FAILURE
logger.info("")
return STATUS_SUCCESS
|
from .cameras import Camera
from .scene import Scene, split_scene
# add to __all__ as per pep8
__all__ = [Camera, Scene, split_scene]
|
#!/usr/bin/env python3
from collections import OrderedDict
try:
from lxml import etree
except:
from xml.etree import ElementTree as etree
class Node(object):
def __init__(self, name=None, depth=0):
self.name = name
self.depth = depth
self.sources = set()
self.children = OrderedDict()
def process(self, elem, source):
self.sources.add(source)
for attr in elem.attrib:
self.add('@' + attr, source)
if not list(elem) and elem.text and elem.text.strip():
self.add('#text', source)
for child in elem:
name = child.tag if child.tag != etree.Comment else '#comment'
node = self.add(name, source)
node.process(child, source)
def add(self, name, source):
node = self.children.setdefault(name, Node(name, self.depth + 1))
node.sources.add(source)
return node
def visualize(node, indent=2, source_count=0, parent_ratio=None):
source_count = source_count or len(node.sources)
ratio = len(node.sources) / source_count
if node.name:
note = ""
if ratio < 1.0 and ratio != parent_ratio:
note = " # in %.3f%%" % (ratio*100)
print((' ' * indent) * node.depth + node.name + note)
for child in node.children.values():
visualize(child, indent, source_count, ratio)
if __name__ == '__main__':
import glob, os, sys
args = sys.argv[1:]
tree = Node()
for pth in args:
sources = glob.glob(pth) if not os.path.isdir(pth) else (
os.path.join(dirpath, fname)
for dirpath, dirnames, filenames in os.walk(pth)
for fname in filenames if not fname.startswith('.'))
for source in sources:
try:
print("Processing", source, file=sys.stderr)
root = etree.parse(source).getroot()
except etree.XMLSyntaxError:
print("XML syntax error in:", source, file=sys.stderr)
else:
tree.name = root.tag
tree.process(root, source)
visualize(tree)
|
# Keras
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
from keras.models import Sequential
from keras.layers import Dense, Flatten, LSTM, Conv1D, MaxPooling1D, Dropout, Activation
from keras.layers.embeddings import Embedding
## Plot
import plotly.offline as py
import plotly.graph_objs as go
py.init_notebook_mode(connected=True)
import matplotlib as plt
# NLTK
import nltk
from nltk.corpus import stopwords
from nltk.stem import SnowballStemmer
# Other
import re
import string
import numpy as np
import pandas as pd
from sklearn.manifold import TSNE
from sklearn.preprocessing import LabelBinarizer
df = pd.read_csv('data/train.csv')
df= df.dropna()
# df = df[df['SOC_A'].apply(lambda x: x.isnumeric())]
# df = df[df['SOC_A'].apply(lambda x: x !="")]
df = df[df.Task.apply(lambda x: x !="")]
print(df.head())
enc = LabelBinarizer()
labels = np.array(df.SOC_A).reshape(-1,1)
labels = enc.fit_transform(labels)
labels.shape
def clean_text(text):
## Remove puncuation
text = text.translate(string.punctuation)
## Convert words to lower case and split them
text = text.lower().split()
## Remove stop words
stops = set(stopwords.words("english"))
text = [w for w in text if not w in stops and len(w) >= 3]
text = " ".join(text)
# Clean the text
text = re.sub(r"[^A-Za-z0-9^,!.\/'+-=]", " ", text)
text = re.sub(r"what's", "what is ", text)
text = re.sub(r"\'s", " ", text)
text = re.sub(r"\'ve", " have ", text)
text = re.sub(r"n't", " not ", text)
text = re.sub(r"i'm", "i am ", text)
text = re.sub(r"\'re", " are ", text)
text = re.sub(r"\'d", " would ", text)
text = re.sub(r"\'ll", " will ", text)
text = re.sub(r",", " ", text)
text = re.sub(r"\.", " ", text)
text = re.sub(r"!", " ! ", text)
text = re.sub(r"\/", " ", text)
text = re.sub(r"\^", " ^ ", text)
text = re.sub(r"\+", " + ", text)
text = re.sub(r"\-", " - ", text)
text = re.sub(r"\=", " = ", text)
text = re.sub(r"'", " ", text)
text = re.sub(r"(\d+)(k)", r"\g<1>000", text)
text = re.sub(r":", " : ", text)
text = re.sub(r" e g ", " eg ", text)
text = re.sub(r" b g ", " bg ", text)
text = re.sub(r" u s ", " american ", text)
text = re.sub(r"\0s", "0", text)
text = re.sub(r" 9 11 ", "911", text)
text = re.sub(r"e - mail", "email", text)
text = re.sub(r"j k", "jk", text)
text = re.sub(r"\s{2,}", " ", text)
text = text.split()
stemmer = SnowballStemmer('english')
stemmed_words = [stemmer.stem(word) for word in text]
text = " ".join(stemmed_words)
return text
df['Task'] = df['Task'].map(lambda x: clean_text(x))
print(df['Task'])
vocabulary_size = 20000
tokenizer = Tokenizer(num_words= vocabulary_size)
tokenizer.fit_on_texts(df['Task'])
sequences = tokenizer.texts_to_sequences(df['Task'])
data = pad_sequences(sequences, maxlen=200)
model_lstm = Sequential()
model_lstm.add(Embedding(20000, 100, input_length=200))
model_lstm.add(LSTM(100, dropout=0.2, recurrent_dropout=0.2))
model_lstm.add(Dense(22, activation='softmax'))
model_lstm.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
model_lstm.fit(data, np.array(labels), validation_split=0.4, epochs=3)
def create_conv_model():
model_conv = Sequential()
model_conv.add(Embedding(vocabulary_size, 200, input_length=200))
model_conv.add(Dropout(0.2))
model_conv.add(Conv1D(64, 5, activation='relu'))
model_conv.add(MaxPooling1D(pool_size=4))
model_conv.add(LSTM(100))
model_conv.add(Dense(22, activation='softmax'))
model_conv.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
return model_conv
model_conv = create_conv_model()
model_conv.fit(data, np.array(labels), validation_split=0.4, epochs = 3)
df_save = pd.DataFrame(data)
df_label = pd.DataFrame(np.array(labels))
result = pd.concat([df_save, df_label], axis = 1)
result.to_csv('train_dense_word_vectors.csv', index=False)
embeddings_index = dict()
f = open('glove.6B/glove.6B.100d.txt')
for line in f:
values = line.split()
word = values[0]
coefs = np.asarray(values[1:], dtype='float32')
embeddings_index[word] = coefs
f.close()
print('Loaded %s word vectors.' % len(embeddings_index))
# create a weight matrix for words in training docs
embedding_matrix = np.zeros((vocabulary_size, 100))
for word, index in tokenizer.word_index.items():
if index > vocabulary_size - 1:
break
else:
embedding_vector = embeddings_index.get(word)
if embedding_vector is not None:
embedding_matrix[index] = embedding_vector
model_glove = Sequential()
model_glove.add(Embedding(vocabulary_size, 100, input_length=50, weights=[embedding_matrix], trainable=False))
model_glove.add(Dropout(0.2))
model_glove.add(Conv1D(64, 5, activation='relu'))
model_glove.add(MaxPooling1D(pool_size=4))
model_glove.add(LSTM(100))
model_glove.add(Dense(22, activation='sigmoid'))
model_glove.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
lstm_embds = model_lstm.layers[0].get_weights()[0]
conv_embds = model_conv.layers[0].get_weights()[0]
glove_emds = model_glove.layers[0].get_weights()[0]
word_list = []
for word, i in tokenizer.word_index.items():
word_list.append(word)
def plot_words(data, start, stop, step):
trace = go.Scatter(
x = data[start:stop:step,0],
y = data[start:stop:step, 1],
mode = 'markers',
text= word_list[start:stop:step]
)
layout = dict(title= 't-SNE 1 vs t-SNE 2',
yaxis = dict(title='t-SNE 2'),
xaxis = dict(title='t-SNE 1'),
hovermode= 'closest')
fig = dict(data = [trace], layout= layout)
py.iplot(fig)
from gensim.models import Word2Vec
import nltk
nltk.download('punkt')
df['tokenized'] = df.apply(lambda row : nltk.word_tokenize(row['text']), axis=1)
df.head()
model_w2v = Word2Vec(df['tokenized'], size=100)
X = model_w2v[model_w2v.wv.vocab]
from sklearn.decomposition import TruncatedSVD
tsvd = TruncatedSVD(n_components=5, n_iter=10)
result = tsvd.fit_transform(X)
|
import os
import re
import sys, time
import numpy as np
final=''#global vars to save results of op
fresult=''#global vars to save results of for
fcall=''#global vars to save results of call
def check(newcontext):
nc=newcontext
#TODO:cannot deal with multiple problems,need help
lk=nc.count('(')
rk=nc.count(')')
ll=nc.count('[')
rl=nc.count(']')
ld=nc.count('{')
rd=nc.count('}')
kc=lk-rk
lc=ll-rl
dc=ld-rd
while kc>0:
nc+=')'
kc-=1
while lc>0:
nc+=']'
lc-=1
while dc>0:
nc+='}'
dc-=1
'''
if tryflag==1:
i=0
for i in range(0,len(trycache)):
if trycache[i]!=' ':
break
nc=nc+'\n'+trycache[:i]+'except Exception:\n'+trycache[:i]+' '+'pass'
'''
return nc
def recheck(l):
line=l
line=re.sub('return ','',line)
line=re.sub('\[\'.*\'\]','',line)
line=re.sub('\[\".*\"\]','',line)
line=re.sub('\(\'.*\'\)','',line)
line=re.sub('\(\".*\"\)','',line)
line=re.sub('\[[0-9\.\-\s\:]+\]','',line)
line=re.sub('\([0-9\.\-\s\:]+\)','',line)
line=re.sub('\{[0-9\.\-\s\:]+\}','',line)
line=re.sub('\[.*[\+\:]+.*\]','',line)
line=re.sub('\+\=','=',line)
#line=re.sub(' ','',line)
line=re.sub('r\'.*\'\,*\s*','',line)
line=re.sub('b\'.*\'\,*\s*','',line)
line=re.sub('rb\'.*\'\,*\s*','',line)
line=re.sub('f\'.*\'\,*\s*','',line)
line=re.sub('\'.*\'\,*\s*','',line)
line=re.sub('\".*\"\,*\s*','',line)
line=re.sub('r\".*\"\,*\s*','',line)
line=re.sub('b\".*\"\,*\s*','',line)
line=re.sub('rb\".*\"\,*\s*','',line)
line=re.sub('f\".*\"\,*\s*','',line)
line=re.sub('\(\)','',line)
line=re.sub('\{\}','',line)
line=re.sub('\[\]','',line)
#line=recheck(line)
line=line.strip()
return line
def del_arg_op(op):
starti=endi=0
for i in range(0,len(op)):
if op[i]=='(':
starti=i
elif op[i]==')':
endi=i
return op[:starti]+'-->'+op[starti+1:endi]+op[endi+1:len(op)]
def dealarg_for(ty):
#print "yes!"
starti=endi=0
left=right=0
ret=''
for i in range(0,len(ty)):
if ty[i]=='(':
if left==right:
starti=i
left=left+1
else:
left=left+1
elif ty[i]==')':
if left==right+1:
endi=i
right=right+1
#print left,right,starti,endi
if starti+1<endi:
#print "okkk",y[starti+1:endi]+" --> "+y[:starti]
#print "here!",ty,starti+1,endi,left,right
ret=ret+ty[:starti]+"-->"+ty[starti+1:endi]
#print ret
break
else:
right=right+1
#ret=ret[:-3]
return ret+ty[(endi+1):len(ty)]
def dealarg_call(ty):
#print "yes!"
starti=endi=0
left=right=0
ret=''
for i in range(0,len(ty)):
if ty[i]=='(':
if left==right:
starti=i
left=left+1
else:
left=left+1
elif ty[i]==')':
if left==right+1:
endi=i
right=right+1
#print left,right,starti,endi
if starti+1<endi:
#print "okkk",y[starti+1:endi]+" --> "+y[:starti]
#print "here!",ty,starti+1,endi,left,right
ret=ret+ty[:starti]+"-->"+ty[starti+1:endi]+ty[endi+1:len(ty)]
#print ret
break
else:
right=right+1
#ret=ret[:-3]
if ret=='':
return ty
else:
return ret
def dealarg(ty):
starti=endi=0
for i in range(0,len(ty)):
if ty[i]=='(':
starti=i
break
i=len(ty)-1
while(i>0):
if ty[i]==')':
endi=i
break
i=i-1
return ty[:starti]+"-->"+ty[starti+1:endi]+ty[endi+1:len(ty)]
#apart from consdering data-flow relationship, also consider which var is more relevant to target api, so the order of list is inverse to arg.
def dealist(ty):
starti=endi=0
for i in range(0,len(ty)):
if ty[i]=='[':
starti=i
break
i=len(ty)-1
while(i>0):
if ty[i]==']':
endi=i
break
i=i-1
return ty[:starti]+'-->'+ty[starti+1:endi]
def deallist(ty):
#print "yes"
starti=endi=0
for i in range(0,len(ty)):
if ty[i]=='[':
starti=i
elif ty[i]==']':
endi=i
return ty[:starti]+"-->"+ty[starti+1:endi]
def del_multi_arg(ty):
si=ei=0
for i in range(0,len(ty)):
if ty[i]=='(':
si=i
break
i=len(ty)-1
while(i>-1):
if ty[i]==')':
ei=i
break
i=i-1
args=ty[si+1:ei]
#print "args:",args
larg=args.split(',')
sarg=''
for arg in larg:
if '=' in arg:
lr=arg.split('=')
sarg=sarg+lr[1]+'-->'+lr[0]+'|'
else:
sarg=sarg+arg+'|'
sarg=sarg[:-1]
return sarg+'-->'+ty[:si]
def addty(ty,i,lsy):
ret=''
#print ty,i,lsy
if len(lsy)==1:
ret = ty
#print "ret:",ret,"\n"
return ret
else:
for j in range(0,i):
ret=ret+lsy[j]+'-->'
ret=ret+ty+"-->"
for j in range(i+1,len(lsy)):
ret=ret+lsy[j]+'-->'
ret=ret[:-3]
#print "ret:",ret,"\n"
return ret
def delop(op):
lsop=op.split('-->')
global final
for i in range(0,len(lsop)):
ty=lsop[i]
if re.match('[_a-zA-Z0-9\.\[\]\|]+\(.*\)',ty) and ',' in ty and '=' in ty:
#print "yes!",ty
ty=del_multi_arg(ty)
#print "multi_arg:",ty
op=addty(ty,i,lsop)
#print "new op:",op
final=op
delop(op)
elif ',' in ty:
ty=re.sub(',','|',ty)
#print "a|b:",ty
op=addty(ty,i,lsop)
#print "new op:",op
final=op
delop(op)
elif re.match('[_a-zA-Z0-9\.\[\]\|]+\(.*=.*\)',ty):
ty=del_arg_op(ty)
#print "call-op:",ty
op=addty(ty,i,lsop)
#print "new op:",op
final=op
delop(op)
elif '=' in ty:
lr=ty.split('=')
ty=lr[1]+'-->'+lr[0]
#print "deal with op:",ty
op=addty(ty,i,lsop)
final=op
#print "new op:",op
delop(op)
elif re.match('[_a-zA-Z0-9\.\[\]]+\(.*\)',ty):
ty=dealarg_for(ty)
#print "deal with arg:",ty
op=addty(ty,i,lsop)
#print "new op:",op
final=op
delop(op)
elif re.match('[_a-zA-Z0-9\.\[\]]+\[.*\]',ty):
ty=dealist(ty)
#print "deal with list:",ty
op=addty(ty,i,lsop)
#print "new op:",op
final=op
delop(op)
elif '.' in ty:
ty=re.sub('\.','-->',ty)
#print "deal with point:",ty
op=addty(ty,i,lsop)
#print "new op:",op
final=op
delop(op)
def GetMiddleStr(content,startStr,endStr):
startIndex = content.index(startStr)
if startIndex>=0:
startIndex += len(startStr)
endIndex = content.index(endStr)
return content[startIndex:endIndex]
def prex(x):
x=re.sub(' ','',x)
if re.match('\(.*,.*\)\,[a-zA-Z0-9_\'\"\(\)|]+',x) or re.match('[a-zA-Z0-9_\'\"\(\)|]+\,\(.*,.*\)',x) or re.match('\(.*,.*\)\,\(.*,.*\)',x):
x=re.sub('[\(\)]+','',x)
#print "yes:",x
return x
def dealtuple(ty):
my=re.sub(' ','',ty)
my=my[1:-1]
lsmy=my.split(",")
ret=''
for i in lsmy:
ret=ret+i+"|"
ret=ret[:-1]
#print "ret1:",ret
return ret
def deald(ty):
return re.sub(',','|',ty)
def dealcall(ty):
#print "ty:",ty
#print "re:",re.sub('\.','-->',ty)
return re.sub('\.','-->',ty)
def rbl(tempy):
ls=0
rs=0
ln=0
rn=0
ret=0
for i in range(0,len(tempy)):
if tempy[i]=='(':
ls=i
ln+=1
elif tempy[i]==')':
rs=i
rn+=1
if rn>ln:
ret=1
return ret
elif rs<ls:
ret=1
return ret
return ret
def findcircle_call(tempy):
global fcall
if tempy.count('(') != tempy.count(')') or rbl(tempy)!=0:
#global fcall
fcall=''
return
tempy=recheck(tempy)
ls=tempy.split('-->')
for i in range(0,len(ls)):
ty=ls[i]
#print ty
ty=re.sub(' ','',ty)
if ',' in ty:
#print 'yes!',ty
ty=re.sub(',','|',ty)
#print 'later',ty
tempy=addty(ty,i,ls)
fcall=tempy
#print 2,ty,tempy
findcircle_call(tempy)
elif '.' in ty and not re.match('.*\(.*\..*\).*',ty):
#print "ty1",ty
ty=re.sub('\.','-->',ty)
tempy=addty(ty,i,ls)
#print 3,ty,tempy
#global final
fcall=tempy
findcircle_call(tempy)
elif re.match('.*[a-zA-Z0-9_]+\(.*[a-zA-Z0-9_\'\"\(\)\|\-\>\:\[\]\,\.]+\).*',ty) and re.match('.*\(.*[a-zA-Z0-9_]+.*\).*',ty):
ty=re.sub('\(\)','',ty)
ty=re.sub('\(\[\]\)','',ty)
if not (re.match('.*[a-zA-Z0-9_]+\(.*[a-zA-Z0-9_\'\"\(\)\|\-\>\:\[\]\,\.]+\).*',ty) and re.match('.*\(.*[a-zA-Z0-9_]+.*\).*',ty)):
tempy=addty(ty,i,ls)
final=tempy
#print "4.1",ty,tempy
findcircle_call(tempy)
continue
#print ty
ty=dealarg_call(ty)
tempy=addty(ty,i,ls)
#print 4,ty,tempy
#global final
fcall=tempy
findcircle_call(tempy)
elif '.' in ty :
#print "ty2",ty
ty=re.sub('\.','-->',ty)
tempy=addty(ty,i,ls)
#print 5,ty,tempy
#global final
fcall=tempy
findcircle_call(tempy)
elif re.match('[a-zA-Z0-9_]+\[[a-zA-Z0-9_]+\]',ty):
ty=deallist(ty)
tempy=addty(ty,i,ls)
fcall=tempy
#print 6,ty,tempy
findcircle_call(tempy)
#return tempy
def del_call(line):
#print(line)
calls=re.findall('[_a-zA-Z0-9\.\[\]\'\"\(\)\{\}\,\:]+\(.*\)',line)
#print(calls)
call=''
if len(calls)>0:
call=calls[0]
else:
return call
call=re.sub('\(\'.*\'\)','',call)
call=re.sub('\(\".*\"\)','',call)
call=re.sub('\[\'.*\'\]','',call)
call=re.sub('\[\".*\"\]','',call)
call=re.sub('\(\)','',call)
call=re.sub('\([0-9]+\)','',call)
call=re.sub('\[[0-9:\-]+\]','',call)
call=call.strip()
call=re.sub(' ','',call)
call=recheck(call)
findcircle_call(call)
#print(fcall,'\n')
return fcall
def findcircle(tempy):
global fresult
#print "temp:",tempy
lsy=tempy.split("-->")
#print "lsy:",lsy
for i in range(0,len(lsy)):
ty=lsy[i]
ty=ty.strip()
#print "i:",i,ty
if re.match(r'\(.*,.*\)',ty):
#print "matchtuple:",ty
ty=dealtuple(ty)
#print "addty"
tempy=addty(ty,i,lsy)
fresult=tempy
#print fresult
findcircle(tempy)
elif ',' in ty and '\',\'' not in ty:
#print "matchmulti"
#print "2:",ty,i,lsy
ty=deald(ty)
tempy=addty(ty,i,lsy)
#print "yes!",ty,tempy
fresult=tempy
#print fresult
findcircle(tempy)
elif re.match('.*[a-zA-Z0-9_]+\(.*[a-zA-Z0-9_\'\"\(\)\|\-\>\:]+\).*',ty):
#print "matcharg:",ty
ty=dealarg_for(ty)
#print "addty"
tempy=addty(ty,i,lsy)
fresult=tempy
#print fresult
#print "1:",ty,i,lsy
findcircle(tempy)
elif '.' in ty and '\'\.\'' not in ty:
#print "matchpoint"
ty=dealcall(ty)
tempy=addty(ty,i,lsy)
#print "yes!",tempy
fresult=tempy
#print fresult
findcircle(tempy)
elif re.match('.*\[\'.*\'\].*',ty) or re.match('.*\[\".*\"\].*',ty) or re.match('.*\[[0-9:]+\].*',ty):
#print "yes:",ty
tempy=re.sub('\[.*\]','',ty)
#print "new:",tyy
fresult=tempy
#print fresult
findcircle(tempy)
#elif re.match('[a-zA-Z0-9_]+',ty):
#print "result:",tempy,"\n"
#global fresult
#print "tempy:",ty,tempy
#fresult=tempy
#print lsy
#if ty==lsy[len(lsy)-1]:
#break
#findcircle(tempy)
#return tempy
#fresult=tempy
#return tempy
def delfor(line):
#if re.match('.*\[.*for\s.*\sin\s.*\].*',line):
#return
#forp=line.find('for ')
#print forp
#print line[forp+4:]
#ls=line[forp+4:].split(" in ")
#print ls
#x=ls[0]
#if len(ls) < 2:
#return
#ls2=ls[1].split(":\n")
#print ls2
#y=ls2[0]
#print x
#print y
ops=re.findall('for\s[_a-zA-Z0-9\.\,\s]+\sin\s[_a-zA-Z0-9\,\.\[\]\(\)\{\}\s]+',line)
#print(ops)
s=''
if len(ops)>0:
s=ops[0]
#s=recheck(s)
else:
return s
if s.endswith(','):
s=s[:-1]
if (s.endswith(']') and s.count('[')<s.count(']')) or (s.endswith(')') and s.count('(')<s.count(')')) or (s.endswith('}') and s.count('{')<s.count('}')):
s=s[:-1]
s=recheck(s)
if s.strip().endswith('in'):
return ''
#print(s)
try:
x=GetMiddleStr(s,'for ',' in ')
except Exception:
return ''
#y=GetMiddleStr(line,'in',':')
x=x.strip()
y=s.split(' in ')[1].strip()
#print('x,y')
#print(x,y)
#print "x:",x
#print "START"+"#"+str(num)
#print(line[:-1])
y=re.sub(' ','',y)
x=re.sub(' ','',x)
x=re.sub('\(\)','',x)
y=re.sub('\(\)','',y)
y=re.sub('\[\'.*\'\]','',y)
y=re.sub('\[\".*\"\]','',y)
y=re.sub('\(\'.*\'\)','',y)
y=re.sub('\(\".*\"\)','',y)
y=re.sub('\[[0-9:]+\]','',y)
y=re.sub('\([0-9:]+\)','',y)
y=re.sub('\[.*[\+\:]+.*\]','',y)
y=re.sub('\+\=','',y)
y=re.sub('r\'.*\'\,','',y)
x=re.sub('\[\'.*\'\]','',x)
x=re.sub('\[\".*\"\]','',x)
x=re.sub('\(\'.*\'\)','',x)
x=re.sub('\(\".*\"\)','',x)
x=re.sub('\[[0-9:]+\]','',x)
x=re.sub('\([0-9:]+\)','',x)
x=re.sub('\[.*[\+\:]+.*\]','',x)
x=re.sub('\+\=','',x)
x=re.sub('r\'.*\'\,','',x)
#print(x,y)
#TODO:meici xu tiao cichu
y=recheck2(y)
findcircle(y)
global fresult
if fresult=='':
rety=y
else:
rety=fresult
fresult=''
x=prex(x)
findcircle(x)
if fresult=='':
retx=x
else:
retx=fresult
#print "result:",rety,"-->",retx,"\n"
fresult=''
forx=rety+"-->"+retx
#if forx.count('-->') >10:
#s="START:\n"+line+rety+"-->"+retx+"\n"+"END\n"
s2=rety+"-->"+retx+"\n"
#print(s)
#print(s2)
return s2
def finalcheck(s):
s=re.sub('\*\*','',s)
s=re.sub('\*args','args',s)
s=re.sub('[\+\/\*]','|',s)
s=re.sub('\n','',s)
if s.count('-->')==1:
ls=s.split('-->')
if ls[0]==ls[1]:
s=''
return s
class ShowProcess():
i = 0
max_steps = 0
max_arrow = 50
infoDone = 'done'
def __init__(self, max_steps, infoDone = 'Done'):
self.max_steps = max_steps
self.i = 0
self.infoDone = infoDone
def show_process(self, i=None):
if i is not None:
self.i = i
else:
self.i += 1
num_arrow = int(self.i * self.max_arrow / self.max_steps)
num_line = self.max_arrow - num_arrow
percent = self.i * 100.0 / self.max_steps
process_bar = '[' + '>' * num_arrow + '-' * num_line + ']'\
+ '%.2f' % percent + '%' + '\r'
sys.stdout.write(process_bar)
sys.stdout.flush()
if self.i >= self.max_steps:
self.close()
def close(self):
print('')
print(self.infoDone)
self.i = 0
def recheck2(l):
line=l
line=re.sub('return ','',line)
line=re.sub('\[.*\]','',line)
line=re.sub('\(.*\)','',line)
line=re.sub('\{.*\}','',line)
line=re.sub('\+\=','=',line)
#line=re.sub(' ','',line)
line=re.sub('r\'.*\'\,*\s*','',line)
line=re.sub('b\'.*\'\,*\s*','',line)
line=re.sub('rb\'.*\'\,*\s*','',line)
line=re.sub('f\'.*\'\,*\s*','',line)
line=re.sub('\'.*\'\,*\s*','',line)
line=re.sub('\".*\"\,*\s*','',line)
line=re.sub('r\".*\"\,*\s*','',line)
line=re.sub('b\".*\"\,*\s*','',line)
line=re.sub('rb\".*\"\,*\s*','',line)
line=re.sub('f\".*\"\,*\s*','',line)
#line=recheck(line)
line=line.strip()
return line
def get_current_dataflow2(current_context,caller):
dataflows=[]
lines=current_context.split('\n')
#process_bar = ShowProcess(len(lines), 'Start to deal with the file')
for line in lines:
if (not caller in line) and (caller!='__all__') :
continue
if not ('.' in line and '(' in line):
continue
line=line.strip()
if line == '' or line.endswith('='):
continue
#print('NOTE!',line)
tpline=line
if line.startswith('#') or line.startswith('def ') or line.startswith('class '):
continue
elif 'lambda' in line:
continue
elif re.match('.*=\s*[0-9\.\:\-]+',line):
continue
line2=re.sub(' ','',line)
if re.match('.*=\'.*\'.*',line2) or re.match('.*=\".*\".*',line2) or re.match('.*=[0-9\.]+.*',line2) or re.match('.*=None.*',line2) or re.match('.*=True.*',line2) or re.match('.*=False.*',line2) or "==" in line2 or line2.endswith('='):
#print('yes!')
continue
#print(tpline,line)
line=re.sub('#.*','',line)
if '=' in line:
#print(line)
#print('yes!')
line=recheck2(line)
if line.endswith('='):
continue
text = re.compile(r".*[a-zA-Z]$")
if not text.match(line):
continue
ops=re.findall('[_a-zA-Z0-9\.\[\]\"\'\(\)\{\}]+\s*=\s*[_a-zA-Z0-9\.\[\]\"\'\(\)\{\}\*\/\-\%\*\,\=\s\+]+',line)
if len(ops)==0:
continue
line=ops[0]
line=re.sub('[\+\-\/\*]+','|',line)
#print('op',tpline,line)
ls=line.split('=')
x=ls[0]
y=ls[1]
x=re.sub('\.','-->',x)
y=re.sub('\.','-->',y)
tf=y+'-->'+x
#print(tf)
opps=re.findall('[\(\{\)\}\[\]\'\"]',tf)
if len(opps)!=0:
continue
tf=tf.strip()
if tf!='' and not tf in dataflows:
dataflows.append(tf)
elif re.match('.*for\s.*\sin\s.*',line):
line=recheck(line)
#print('FOR_EXPR')
#print(file,tpline)
fors=delfor(line)
#print('FOR DATAFLOW:')
#print(str(fors),'\n')
tff=str(fors)
tff=finalcheck(tff)
#print('for',tpline)
#print(tff)
opps=re.findall('[\(\{\)\}\[\]\'\"]',tff)
if len(opps)!=0:
continue
tff=tff.strip()
if tff!='' and not tff in dataflows:
dataflows.append(tff)
#print(tff)
#with open('tmp_dataflow/for_expr.txt','a+') as ff:
#ff.write(file+'#'+str(num)+": "+tpline+'\n'+str(fors)+'\n\n')
elif re.match('.*[_a-zA-Z0-9\.\[\]\'\"\(\)\{\}\,\:]+\(.*\).*',line) and not line.startswith('def ') and not line.startswith('class '):
#print(file)
#print(line,'\n')
#line=recheck(line)
#print(line)
#cas=del_call(line)
#print('CALL DATAFLOW:')
#print(cas,'\n')
#cas=finalcheck(cas)
calls=re.findall('[_a-zA-Z0-9\.\[\]\'\"\(\)\{\}\,\:]+\(.*\)',line)
call=''
if len(calls)>0:
call=calls[0]
else:
continue
line=recheck2(call)
line=re.sub('[\+\-\/]+','|',line)
#print('call',tpline,line)
cas=re.sub('\.','-->',line)
#print(cas)
opps=re.findall('[\(\{\)\}\[\]\'\"]',cas)
if len(opps)!=0:
continue
if not '-->' in cas:
continue
cas=cas.strip()
if cas!='' and not cas in dataflows:
dataflows.append(cas)
#print(cas)
#callflow.append(ls2.strip())
#with open('tmp_dataflow/call_expr.txt','a+') as fc:
#fc.write(file+'#'+str(num)+'\n'+line+'\n')
#process_bar.show_process()
newflows=[]
oldflows=dataflows
lens=5*len(dataflows)
used=[0]*lens
for i in range(0,len(dataflows)):
#flag=0
current_flow_end=dataflows[i].split('-->')[-1]
current_flow_head=dataflows[i].split('-->')[0]
if current_flow_end==current_flow_head:
continue
for j in range(i,len(dataflows)):
#print(j,len(dataflows))
current_flow_end=dataflows[i].split('-->')[-1]
next_flow_head=dataflows[j].split('-->')[0]
s1=current_flow_end+'|'
s2='|'+current_flow_end
s3=next_flow_head+'|'
s4='|'+next_flow_head
if current_flow_end == next_flow_head or s1 in next_flow_head or s2 in next_flow_head:
y=dataflows[j].replace(next_flow_head,'',1)
#y=re.sub(next_flow_head,'',dataflows[j])
newflow=dataflows[i]+y
#print('yes1!')
#print(i,current_flow_end,next_flow_head,s1,s2)
#print(next_flow_head)
#print(dataflows[i])
#print(dataflows[j])
#print(y)
#print(newflow)
if not newflow in newflows:
tmp=[i,newflow]
newflows.append(tmp)
#if not newflow in dataflows:
#dataflows.append(newflow)
#print(newflow)
#dataflows[i]=newflow
#print('yes!')
#print(dataflows[i],' , ',dataflows[j])
#print(newflow)
#i=i-1
#used[j]=1
#del dataflows[j]
#j=j-1
#flag=1
elif s3 in current_flow_end or s4 in current_flow_end:
#x=re.sub(current_flow_end,'',dataflows[i])
x=dataflows[i].replace(current_flow_end,'')
#print('flow_end:',current_flow_end)
#print('xxxx',x)
newflow=x+dataflows[j]
#dataflows[i]=newflow
#print('yes2!')
#print(dataflows[i])
#print(dataflows[j])
#print(x)
#print(newflow)
if not newflow in newflows:
tmp=[i,newflow]
newflows.append(tmp)
#if not newflow in dataflows:
#dataflows.append(newflow)
#print(newflow)
#dataflows[i]=newflow
#print('yes2!')
#print(dataflows[i],' , ',dataflows[j])
#print(newflow)
#i=i-1
#used[j]=1
#del dataflows[j]
#j=j-1
#flag=1
#print('\n')
updateflow=[]
for i in range(0,len(newflows)):
#flag=0
pos=newflows[i][0]
flow=newflows[i][1]
for j in range(pos+1,len(dataflows)):
#print(j,len(dataflows))
current_flow_end=flow.split('-->')[-1]
next_flow_head=dataflows[j].split('-->')[0]
s1=current_flow_end+'|'
s2='|'+current_flow_end
s3=next_flow_head+'|'
s4='|'+next_flow_head
if current_flow_end == next_flow_head or s1 in next_flow_head or s2 in next_flow_head:
y=dataflows[j].replace(next_flow_head,'',1)
#y=re.sub(next_flow_head,'',dataflows[j])
newflow=flow+y
if not newflow in updateflow:
#print('yes!',newflow)
updateflow.append(newflow)
elif s3 in current_flow_end or s4 in current_flow_end:
#x=re.sub(current_flow_end,'',dataflows[i])
x=flow.replace(current_flow_end,'')
#print('flow_end:',current_flow_end)
#print('xxxx',x)
newflow=x+dataflows[j]
if not newflow in updateflow:
#print('yes!',newflow)
updateflow.append(newflow)
for i in range(0,len(newflows)):
flow=newflows[i][1]
dataflows.append(flow)
#process_bar.show_process()
retflow=[]
for flow in dataflows:
if 'unknown_api' in flow:
retflow.append(flow)
if caller=='__all__':
return dataflows
else:
return retflow
def get_current_dataflow(current_context,caller):
dataflows=[]
lines=current_context.split('\n')
#process_bar = ShowProcess(len(lines), 'Start to deal with the file')
for line in lines:
if (not caller in line) and (caller!='__all__') :
continue
if line.strip()=='':
continue
#print('NOTE!',line)
tpline=line.strip()
line=line.strip()
if line.startswith('#') or line.startswith('def ') or line.startswith('class '):
continue
elif line.endswith('(') or line.endswith('[') or line.endswith('{'):
line=line[:-1]
elif line.startswith(')') or line.startswith('}') or line.startswith(']'):
continue
elif line.count('(') != line.count(')') or line.count('[') != line.count(']') or line.count('{') != line.count('}'):
continue
elif 'lambda' in line:
continue
elif re.match('.*=\s*[0-9\.]+',line.strip()):
continue
line2=re.sub(' ','',line)
if re.match('.*=\'.*\'.*',line2) or re.match('.*=\".*\".*',line2) or re.match('.*=[0-9\.]+.*',line2) or re.match('.*=None.*',line2) or re.match('.*=True.*',line2) or re.match('.*=False.*',line2) or re.match('.*=\{\}.*',line2) or re.match('.*=\(\).*',line2) or re.match('.*=\[\].*',line2) or "==" in line2 or line2.endswith('='):
#print('yes!')
continue
line=re.sub('#.*','',line)
if '=' in line:
#print(line)
#print('yes!')
line=recheck(line)
if line.endswith('='):
continue
if line.endswith(',') or line.endswith(':') or line.endswith('+') or line.endswith('-') or line.endswith('*') or line.endswith('/'):
line=line[:-1].strip()
#print(line)
ops=re.findall('[_a-zA-Z0-9\.\[\]\"\'\(\)\{\}]+\s*=\s*[_a-zA-Z0-9\.\[\]\"\'\(\)\{\}\*\/\-\%\*\,\=\s\+]+',line)
#print(ops)
if len(ops)>0:
s=ops[0]
s=recheck(s)
rs=s.split('=')[1]
ps=re.findall('[\,\-\+\*\/\%]+',rs)
if len(ps)==0 and rs.count(' ')>1:
#print('ignored\n')
continue
elif s.endswith(')') and s.count(')')-s.count('(')==1:
s=s[:-1]
elif s.endswith(', )'):
s=s[:-3]+')'
s=re.sub('\)\,.*$','',s)
s=check(s)
if s.count('(') != s.count(')') or s.count('[') != s.count(']') or s.count('{') != s.count('}'):
#print('ignored\n')
continue
else:
#s=re.sub('\)\,.*$','',s)
#print(s)
s=re.sub(' ','',s)
delop(s)
#print(file)
#print(s,final,'\n')
#print('OP DATAFLOW:')
#print(final,'\n')
tf=final
tf=finalcheck(tf)
if tf!='' and not tf in dataflows:
dataflows.append(tf)
#print(tf)
#with open('tmp_dataflow/op_expr.txt','a+') as fo:
#fo.write(file+'#'+str(num)+": "+tpline+'\n'+s+'\n'+final+'\n\n')
elif re.match('.*for\s.*\sin\s.*',line):
line=recheck(line)
#print('FOR_EXPR')
#print(file,tpline)
fors=delfor(line)
#print('FOR DATAFLOW:')
#print(str(fors),'\n')
tff=str(fors)
tff=finalcheck(tff)
if tff!='' and not tff in dataflows:
dataflows.append(tff)
#print(tff)
#with open('tmp_dataflow/for_expr.txt','a+') as ff:
#ff.write(file+'#'+str(num)+": "+tpline+'\n'+str(fors)+'\n\n')
elif re.match('.*[_a-zA-Z0-9\.\[\]\'\"\(\)\{\}\,\:]+\(.*\).*',line) and not line.startswith('def ') and not line.startswith('class '):
#print(file)
#print(line,'\n')
#line=recheck(line)
#print(line)
cas=del_call(line)
#print('CALL DATAFLOW:')
#print(cas,'\n')
cas=finalcheck(cas)
if cas!='' and not cas in dataflows:
dataflows.append(cas)
#print(cas)
#callflow.append(ls2.strip())
#with open('tmp_dataflow/call_expr.txt','a+') as fc:
#fc.write(file+'#'+str(num)+'\n'+line+'\n')
#process_bar.show_process()
newflows=[]
oldflows=dataflows
lens=5*len(dataflows)
used=[0]*lens
for i in range(0,len(dataflows)):
#flag=0
current_flow_end=dataflows[i].split('-->')[-1]
current_flow_head=dataflows[i].split('-->')[0]
if current_flow_end==current_flow_head:
continue
for j in range(i,len(dataflows)):
#print(j,len(dataflows))
current_flow_end=dataflows[i].split('-->')[-1]
next_flow_head=dataflows[j].split('-->')[0]
s1=current_flow_end+'|'
s2='|'+current_flow_end
s3=next_flow_head+'|'
s4='|'+next_flow_head
if current_flow_end == next_flow_head or s1 in next_flow_head or s2 in next_flow_head:
y=dataflows[j].replace(next_flow_head,'',1)
#y=re.sub(next_flow_head,'',dataflows[j])
newflow=dataflows[i]+y
#print('yes1!')
#print(i,current_flow_end,next_flow_head,s1,s2)
#print(next_flow_head)
#print(dataflows[i])
#print(dataflows[j])
#print(y)
#print(newflow)
if not newflow in newflows:
tmp=[i,newflow]
newflows.append(tmp)
#if not newflow in dataflows:
#dataflows.append(newflow)
#print(newflow)
#dataflows[i]=newflow
#print('yes!')
#print(dataflows[i],' , ',dataflows[j])
#print(newflow)
#i=i-1
#used[j]=1
#del dataflows[j]
#j=j-1
#flag=1
elif s3 in current_flow_end or s4 in current_flow_end:
#x=re.sub(current_flow_end,'',dataflows[i])
x=dataflows[i].replace(current_flow_end,'')
#print('flow_end:',current_flow_end)
#print('xxxx',x)
newflow=x+dataflows[j]
#dataflows[i]=newflow
#print('yes2!')
#print(dataflows[i])
#print(dataflows[j])
#print(x)
#print(newflow)
if not newflow in newflows:
tmp=[i,newflow]
newflows.append(tmp)
#if not newflow in dataflows:
#dataflows.append(newflow)
#print(newflow)
#dataflows[i]=newflow
#print('yes2!')
#print(dataflows[i],' , ',dataflows[j])
#print(newflow)
#i=i-1
#used[j]=1
#del dataflows[j]
#j=j-1
#flag=1
'''
if flag==0 and used[i]==0:
if not dataflows[i] in newflows:
newflows.append(dataflows[i])
if flag==1:
i=i-1
'''
#print('\n')
updateflow=[]
for i in range(0,len(newflows)):
#flag=0
pos=newflows[i][0]
flow=newflows[i][1]
for j in range(pos+1,len(dataflows)):
#print(j,len(dataflows))
current_flow_end=flow.split('-->')[-1]
next_flow_head=dataflows[j].split('-->')[0]
s1=current_flow_end+'|'
s2='|'+current_flow_end
s3=next_flow_head+'|'
s4='|'+next_flow_head
if current_flow_end == next_flow_head or s1 in next_flow_head or s2 in next_flow_head:
y=dataflows[j].replace(next_flow_head,'',1)
#y=re.sub(next_flow_head,'',dataflows[j])
newflow=flow+y
if not newflow in updateflow:
#print('yes!',newflow)
updateflow.append(newflow)
elif s3 in current_flow_end or s4 in current_flow_end:
#x=re.sub(current_flow_end,'',dataflows[i])
x=flow.replace(current_flow_end,'')
#print('flow_end:',current_flow_end)
#print('xxxx',x)
newflow=x+dataflows[j]
if not newflow in updateflow:
#print('yes!',newflow)
updateflow.append(newflow)
for i in range(0,len(newflows)):
flow=newflows[i][1]
dataflows.append(flow)
#process_bar.show_process()
retflow=[]
for flow in dataflows:
if 'unknown_api' in flow:
retflow.append(flow)
if caller=='__all__':
return dataflows
else:
return retflow
def lcs(X, Y):
# find the length of the strings
m = len(X)
n = len(Y)
L = [[None]*(n + 1) for i in range(m + 1)]
for i in range(m + 1):
for j in range(n + 1):
if i == 0 or j == 0 :
L[i][j] = 0
elif X[i-1] == Y[j-1]:
L[i][j] = L[i-1][j-1]+1
else:
L[i][j] = max(L[i-1][j], L[i][j-1])
# L[m][n] contains the length of LCS of X[0..n-1] & Y[0..m-1]
return L[m][n]
# end of function lcs
def get_sim_score(api,token,d):
lcsn=lcs(api,token)
lcsn=float(lcsn)
ret=float((lcsn*2.0) / (float(d)*float(len(api)+len(token))))
#print(api,token,ret)
return ret
def get_tosim_score(api,maxflow):
if ' ' in maxflow:
flows=maxflow.split(' ')
for flow in flows:
if 'unknown_api' in flow:
mfx=flow
break
else:
mfx=maxflow
ls=mfx.split('-->')
apindex=len(ls)
for k in range(0,len(ls)):
if 'unknown_api' in ls[k]:
apindex=k
tosim=0.0
for i in range(0,len(ls)):
if i!=apindex:
sim_score=get_sim_score(api,ls[i],abs(apindex-i))
tosim+=sim_score
tosim=float(tosim/float(len(ls)))
#print(tosim)
return tosim
def standard(scsk):
scs=scsk
data=[]
for k in scs.keys():
scs[k]=pow(10,scs[k])
data.append(scs[k])
lenth = len(data)
if lenth==0:
return scsk
total = sum(data)
ave = float(total)/lenth
tempsum = sum([pow(data[i] - ave,2) for i in range(lenth)])
tempsum = pow(float(tempsum)/lenth,0.5)
try:
for k in scs.keys():
scs[k] = (scs[k] - ave)/tempsum
scs[k] = 1 / (1 + np.exp(-scs[k]))
except Exception:
return scsk
return scs
def get_ngram_scores(flows,apis,callee):
s=''
#print(apis)
#print(flows)
ngramscore={}
for flow in flows:
s=s+flow+'\n'
with open('output/test.txt','w+') as f:
f.write(s)
#print(s)
#os.chdir('dataflow/')
os.system('srilm-1.7.2/lm/bin/i686-m64/ngram -ppl output/test.txt -order 4 -lm trainfile.lm -debug 2 > output/'+callee+'.ppl')
with open('output/'+callee+'.ppl',encoding='ISO-8859-1') as f:
lines=f.readlines()
for key in apis:
flag=0
for i in range(0,len(lines)):
kname=lines[i].strip().split(' ')
for item in kname:
if item==key:
flag=1
break
if flag==1:
#print(lines[i])
j=i+1
while 'logprob=' not in lines[j]:
j=j+1
score=re.findall('logprob=\s[0-9\-\.]+',lines[j])
ngramscore[key]=float(score[0][9:])
break
if flag==0:
ngramscore[key]=0.0
#ngramscore=standard(ngramscore)
#print(ngramscore)
#ngramscore=sorted(ngramscore.items(), key=lambda x: x[1], reverse=True)
#print(ngramscore)
os.system('rm output/'+callee+'.ppl')
#os.chdir('../')
return ngramscore
def get_ngram_score(apis,current_dataflow,baseflag,basetype,callee):
flows=[]
if baseflag==1:
for api in apis:
if api.startswith('__') or re.match('[A-Z0-9_]+$',api) or api.strip()=='_':
continue
#print(api)
flow=basetype+' '+api
flows.append(flow)
#ngram_score=get_basetype_score(flow)
else:
#print(current_dataflow)
#print(apis)
for flow in current_dataflow:
for api in apis:
if api.startswith('__') or re.match('[A-Z0-9_]+$',api) or api.strip()=='_':
continue
flow1=re.sub('unknown_api',api,flow)
#print(flow1)
flow2=re.sub('-->',' ',flow1)
#print(flow2)
flows.append(flow2)
#print(flows,apis,callee)
dataflow_ngram_scores=get_ngram_scores(flows,apis,callee)
#print('data1',dataflow_ngram_scores)
return dataflow_ngram_scores
def get_api_scores(apis,maxflow,current_dataflow,ft,callee):
dataflow_ngram_score={}
basetypes=['int','str','float','list','dict','set','tuple','buffer','frozenset','complex','bool','unicode','bytes','bytearray']
basetype=''
baseflag=0
for bt in basetypes:
if bt==ft:
#print(bt,api)
basetype=bt
if re.match('List\[.*\]',ft):
#print('list',api)
basetype='list'
ft='list'
elif re.match('Dict\[.*\]',ft):
#print('dict',api)
basetype='dict'
ft='dict'
if basetype!='':
baseflag=1
dataflow_ngram_scores=get_ngram_score(apis,current_dataflow,baseflag,ft,callee)
#print("data",dataflow_ngram_scores)
final_scores={}
tosim_scores={}
for api in apis:
if api.startswith('__') or re.match('[A-Z0-9_]+$',api) or api.strip()=='_':
continue
tosim_scores[api]=get_tosim_score(api,maxflow)
tosim_scores=standard(tosim_scores)
#tosim_scores = sorted(tosim_scores.items(),key = lambda x:x[1],reverse = True)
#print(tosim_scores)
#for k in tosim_scores.keys():
#final_scores[k]=0.5+float(dataflow_ngram_scores[k]+tosim_scores[k])/4.0
dataflow_ngram_scores=sorted(dataflow_ngram_scores.items(), key=lambda x: x[1], reverse=True)
tosim_scores = sorted(tosim_scores.items(),key = lambda x:x[1],reverse = True)
#final_scores= sorted(final_scores.items(),key = lambda x:x[1],reverse = True)
#print(final_scores)
print("NGRAM-SCORE: ",dataflow_ngram_scores[:20])
print("SIMILAR-SCORE: ",tosim_scores[:20])
#print("ADD-SCORE: ",final_scores[:20])
#return final_scores
drank=21
nrank=21
if len(dataflow_ngram_scores)<20:
k=len(dataflow_ngram_scores)
else:
k=20
for i in range(0,k):
if dataflow_ngram_scores[i][0]==callee:
drank=i+1
if tosim_scores[i][0]==callee:
nrank=i+1
print(drank,nrank)
return drank,nrank
def get_dataflow_scores(apis,maxflow,current_dataflow,ft,callee):
dataflow_ngram_score={}
basetypes=['int','str','float','list','dict','set','tuple','buffer','frozenset','complex','bool','unicode','bytes','bytearray']
basetype=''
baseflag=0
for bt in basetypes:
if bt==ft:
#print(bt,api)
basetype=bt
if re.match('List\[.*\]',ft):
#print('list',api)
basetype='list'
ft='list'
elif re.match('Dict\[.*\]',ft):
#print('dict',api)
basetype='dict'
ft='dict'
if basetype!='':
baseflag=1
dataflow_ngram_scores=get_ngram_score(apis,current_dataflow,baseflag,ft,callee)
return dataflow_ngram_scores
def get_tosim_scores(apis,maxflow,current_dataflow,ft,callee):
tosim_scores={}
for api in apis:
if api.startswith('__') or re.match('[A-Z0-9_]+$',api) or api.strip()=='_':
continue
tosim_scores[api]=get_tosim_score(api,maxflow)
#tosim_scores=standard(tosim_scores)
return tosim_scores
|
import math
from numba import cuda
SQRT_TWOPI = 2.506628274631
@cuda.jit
def get_mhat(mag, err, sed, sed_err, output):
pos, col = cuda.grid(2)
if col >= len(sed) or pos >= len(mag):
return
part1 = 0
part2 = 0
for i in range(sed.shape[1]):
if not math.isnan(mag[pos, i]) and not math.isnan(err[pos, i]):
denom = 1. / (err[pos, i]**2 + sed_err[col, i]**2)
part1 += (mag[pos, i] - sed[col, i]) * denom
part2 += denom
output[pos, col] = part1 / part2
@cuda.jit
def get_p_zyt(m, err, mhat, w, mu, sigma, p_zyt_out):
"""
Calculate (A4).
"""
row, col = cuda.grid(2)
if col >= len(w) or row >= len(m):
return
p = w[col]
for b in range(m.shape[1]):
if not math.isnan(m[row, b]) and not math.isnan(err[row, b]):
delta2 = err[row, b]**2 + sigma[col, b]**2
p *= math.exp(-0.5 *
(m[row, b] - mhat[row, col] - mu[col, b]) *
(m[row, b] - mhat[row, col] - mu[col, b])
/ delta2) / (SQRT_TWOPI * math.sqrt(delta2))
p_zyt_out[row, col] = p
@cuda.jit
def get_chy2(m, err, mhat, w, mu, sigma, chy2_out):
row, col = cuda.grid(2)
if col >= len(w) or row >= len(m):
return
p = 0
for b in range(m.shape[1]):
if not math.isnan(m[row, b]) and not math.isnan(err[row, b]):
delta2 = err[row, b]**2 + sigma[col, b]**2
p += (m[row, b] - mhat[row, col] - mu[col, b]) * \
(m[row, b] - mhat[row, col] - mu[col, b]) / delta2
chy2_out[row, col] = p
@cuda.jit
def balance(p_zyt_out):
"""
Calculate (A5).
"""
# Thread id in a 1D block
tx = cuda.threadIdx.x
# Block id in a 1D grid
ty = cuda.blockIdx.x
# Block width, i.e. number of threads per block
bw = cuda.blockDim.x
# Compute flattened index inside the array
pos = tx + ty * bw
if pos < p_zyt_out.shape[0]:
summ = 0
for i in range(p_zyt_out.shape[1]):
summ += p_zyt_out[pos, i]
if summ > 0:
for i in range(p_zyt_out.shape[1]):
p_zyt_out[pos, i] = p_zyt_out[pos, i] / summ
@cuda.jit
def fill_2and3(p1, p2, p3, sigma, delta):
"""
Calculate (1+delta^2/sigma^2)^n for n=1 (p2) and n=2 (p3).
"""
row, col = cuda.grid(2)
if col < p1.shape[1] and row < p1.shape[0]:
for b in range(p2.shape[2]):
# Update to speed-up (A9) and (A11)
p2[row, col, b] = p1[row, col] / \
(1.0 + (delta[row, b]/sigma[col, b])**2)
p3[row, col, b] = p1[row, col] / \
(1.0 + (delta[row, b]/sigma[col, b])**2)**2
@cuda.jit
def update_params(p2, p3, mags, mhat, mu, tmu, tsigma):
"""
Updating mu_t and sigma_t.
"""
component, band = cuda.grid(2)
if component >= p2.shape[1] or band >= p2.shape[2]:
return
tmu[component, band] = 0
tsigma[component, band] = 0
for i in range(p2.shape[0]):
if not math.isnan(mags[i, band]) and \
not math.isnan(p2[i, component, band]) and \
not math.isnan(p3[i, component, band]):
tmu[component, band] += (mags[i, band] - mhat[i, component]) * \
p2[i, component, band]
tsigma[component, band] += \
(mags[i, band] - mhat[i, component] -
mu[component, band])**2 * \
p3[i, component, band]
@cuda.jit
def balance_params(p_zyt, p2, tw, tmu, tsigma):
component, band = cuda.grid(2)
if component >= tmu.shape[0] or band >= tmu.shape[1]:
return
count = 0
total = 0
for i in range(p_zyt.shape[0]):
if not math.isnan(p_zyt[i, component]):
count += 1
total += p_zyt[i, component]
tw[component] = total / count
count = 0
total = 0
for i in range(p2.shape[0]):
if not math.isnan(p2[i, component, band]):
count += 1
total += p2[i, component, band]
if count > 0:
tmu[component, band] = tmu[component, band] / total
tsigma[component, band] = math.sqrt(tsigma[component, band] / total)
@cuda.jit
def rebalance_params(tw, tmu, tsigma, min_sigma):
pos = cuda.grid(1)
if pos >= len(tw):
return
if pos == 0:
total = 0
for i in range(len(tw)):
total += tw[i]
for i in range(len(tw)):
tw[i] = tw[i] / total
total = 0
count = 0
for i in range(tmu.shape[1]):
if not math.isnan(tmu[pos, i]):
total += tmu[pos, i]
count += 1
for i in range(tmu.shape[1]):
tmu[pos, i] -= total / count
if tsigma[pos, i] < min_sigma:
tsigma[pos, i] = min_sigma
|
# Copyright (c) 2017. Zuercher Hochschule fuer Angewandte Wissenschaften
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License'); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# AUTHOR: Bruno Grazioli
import unittest
from .fake_compose_file import COMPOSE_WITHOUT_NETWORK_AND_VOLUMES, \
COMPOSE_WITH_NETWORK_AND_VOLUMES
from .fake_api import MockDockerAPI
from swarm.stack import Stack
from swarm.service import Service
from swarm.network import Network
from swarm.volume import Volume
class StackTest(unittest.TestCase):
def setUp(self):
self.fake_client = MockDockerAPI()
self.stack_name = 'stack'
self.stack_label = {'com.docker.stack.namespace': self.stack_name}
def test_create_stack_without_volumes_and_networks(self):
compose = COMPOSE_WITHOUT_NETWORK_AND_VOLUMES
stack = Stack(stack_name=self.stack_name,
compose_file=compose,
client=self.fake_client)
svcs, nets, vols = stack.create()
self.assertIsInstance(svcs, list)
for svc in svcs:
self.assertIsInstance(svc, Service)
self.assertEquals(svc.name, 'stack_test')
self.assertEquals(svc.image, 'test:1.13')
self.assertEquals(svc.ports, ['8080:8080'])
self.assertEquals(svc.environment, {'LABEL': 'Test'})
self.assertIsInstance(nets, list)
for net in nets:
self.assertIsInstance(net, Network)
self.assertEquals(net.name, 'stack_default')
self.assertEquals(vols, [])
def test_create_stack_with_volumes_and_networks(self):
compose = COMPOSE_WITH_NETWORK_AND_VOLUMES
stack = Stack(stack_name=self.stack_name,
compose_file=compose,
client=self.fake_client)
svcs, nets, vols = stack.create()
self.assertIsInstance(svcs, list)
for svc in svcs:
self.assertIsInstance(svc, Service)
self.assertEquals(svc.name, 'stack_test')
self.assertEquals(svc.image, 'test:1.13')
self.assertIsInstance(nets, list)
for net in nets:
self.assertIsInstance(net, Network)
self.assertIn(net.name, ['stack_backend', 'stack_default'])
self.assertIsInstance(vols, list)
for vol in vols:
self.assertIsInstance(vol, Volume)
self.assertEquals(vol.name, 'stack_test-volume')
|
# Uses python3
import sys
import numpy as np
def get_fibonacci_huge_naive(n, m):
if n <= 1:
return n
previous = 0
current = 1
for _ in range(n - 1):
previous, current = current, previous + current
return current % m
def get_fibonacci_mod(n, m):
fib_arr = []
mod_arr = []
full_pattern = False
for i in range(n + 1):
fib = i if (i <= 1) else (fib_arr[i - 1] + fib_arr[i - 2])
mod = fib % m
fib_arr.append(fib)
mod_arr.append(mod)
if (i >= 2) & (mod_arr[i - 1] == 0) & (mod_arr[i] == 1):
full_pattern = True
break
fib_mod = 0
if full_pattern:
mod_arr.pop()
mod_arr.pop()
pos = n % len(mod_arr)
fib_mod = mod_arr[pos]
else:
fib_mod = fib_arr[len(fib_arr) - 1] % m
return (fib_mod)
if __name__ == '__main__':
input = sys.stdin.readline();
n, m = map(int, input.split())
print(get_fibonacci_mod(n, m))
|
from django.apps import AppConfig
class MedicoConfig(AppConfig):
name = 'medico'
|
import pytest
from django.contrib.auth.models import Permission
from django.test import override_settings
from django.urls import reverse
from django_otp import login as otp_login
from django_otp.plugins.otp_totp.models import TOTPDevice
from wagtail_2fa.middleware import VerifyUserMiddleware, VerifyUserPermissionsMiddleware
def test_verified_request(rf, superuser):
request = rf.get("/admin/")
request.user = superuser
device = TOTPDevice.objects.create(user=superuser, confirmed=True)
otp_login(request, device)
middleware = VerifyUserMiddleware()
response = middleware.process_request(request)
assert response is None
def test_superuser_force_mfa_auth(rf, superuser):
request = rf.get("/admin/")
request.user = superuser
TOTPDevice.objects.create(user=superuser, confirmed=True)
middleware = VerifyUserMiddleware(lambda x: x)
with override_settings(WAGTAIL_2FA_REQUIRED=True):
response = middleware(request)
assert response.url == "%s?next=/admin/" % reverse("wagtail_2fa_auth")
def test_superuser_require_register_device(rf, superuser):
request = rf.get("/admin/")
request.user = superuser
middleware = VerifyUserMiddleware(lambda x: x)
with override_settings(WAGTAIL_2FA_REQUIRED=True):
response = middleware(request)
assert response.url == "%s?next=/admin/" % reverse("wagtail_2fa_device_new")
def test_superuser_dont_require_register_device(rf, superuser, settings):
settings.WAGTAIL_2FA_REQUIRED = False
request = rf.get("/admin/")
request.user = superuser
middleware = VerifyUserMiddleware(lambda x: x)
response = middleware.process_request(request)
assert response is None
def test_adding_new_device_requires_verification_when_user_has_device(
rf, superuser, settings, django_assert_num_queries
):
TOTPDevice.objects.create(user=superuser, confirmed=True)
with django_assert_num_queries(2):
url_new_device = reverse("wagtail_2fa_device_new")
url_auth = reverse("wagtail_2fa_auth")
request = rf.get(url_new_device)
request.user = superuser
middleware = VerifyUserMiddleware(lambda x: x)
with override_settings(WAGTAIL_2FA_REQUIRED=True):
response = middleware(request)
assert response.url == f"{url_auth}?next={url_new_device}"
def test_adding_new_device_does_not_require_verification_when_user_has_no_device(
rf, superuser, settings, django_assert_num_queries
):
with django_assert_num_queries(1):
url_new_device = reverse("wagtail_2fa_device_new")
reverse("wagtail_2fa_auth")
request = rf.get(url_new_device)
request.user = superuser
middleware = VerifyUserMiddleware(lambda x: x)
with override_settings(WAGTAIL_2FA_REQUIRED=True):
response = middleware(request)
assert response is request
@pytest.mark.skip
def test_always_require_verification_when_user_has_device(rf, user, settings):
TOTPDevice.objects.create(user=user, confirmed=True)
url_auth = reverse("wagtail_2fa_auth")
request = rf.get("/admin/")
request.user = user
middleware = VerifyUserMiddleware(lambda x: x)
with override_settings(WAGTAIL_2FA_REQUIRED=True):
response = middleware(request)
assert response.url == f"{url_auth}?next=/admin/"
class TestVerifyUserPermissionsMiddleware:
def test_enable_2fa_permission_does_require_verification(self, rf, staff_user):
enable_2fa_permission = Permission.objects.get(codename="enable_2fa")
user_no_2fa = staff_user
user_no_2fa.user_permissions.add(enable_2fa_permission)
request = rf.get("/admin/")
request.user = user_no_2fa
middleware = VerifyUserPermissionsMiddleware(lambda x: x)
with override_settings(WAGTAIL_2FA_REQUIRED=True):
result = middleware._require_verified_user(request)
assert result is True
def test_no_enable_2fa_permission_no_device_does_not_require_verification(
self, rf, staff_user
):
user_2fa = staff_user
request = rf.get("/admin/")
request.user = user_2fa
middleware = VerifyUserPermissionsMiddleware(lambda x: x)
with override_settings(WAGTAIL_2FA_REQUIRED=True):
result = middleware._require_verified_user(request)
assert result is False
def test_no_enable_2fa_permission_with_device_does_require_verification(
self, rf, staff_user
):
user_2fa = staff_user
TOTPDevice.objects.create(user=user_2fa, confirmed=True)
request = rf.get("/admin/")
request.user = user_2fa
middleware = VerifyUserPermissionsMiddleware(lambda x: x)
with override_settings(WAGTAIL_2FA_REQUIRED=True):
result = middleware._require_verified_user(request)
assert result is True
def test_process_request_enable_2fa_permission_sets_attribute_on_user_to_true(
self, rf, staff_user
):
enable_2fa_permission = Permission.objects.get(codename="enable_2fa")
user_no_2fa = staff_user
user_no_2fa.user_permissions.add(enable_2fa_permission)
request = rf.get("/admin/")
request.user = user_no_2fa
middleware = VerifyUserPermissionsMiddleware(lambda x: x)
with override_settings(WAGTAIL_2FA_REQUIRED=True):
middleware.process_request(request)
assert request.user.enable_2fa is True
def test_process_no_request_enable_2fa_permission_sets_attribute_on_user_to_false(
self, rf, staff_user
):
user_2fa = staff_user
request = rf.get("/admin/")
request.user = user_2fa
middleware = VerifyUserPermissionsMiddleware(lambda x: x)
with override_settings(WAGTAIL_2FA_REQUIRED=True):
middleware.process_request(request)
assert request.user.enable_2fa is False
|
from dataclasses import replace
from typing import Any, Iterator
from unittest.mock import patch
import pytest
import black
from tests.util import (
DEFAULT_MODE,
PY36_VERSIONS,
THIS_DIR,
assert_format,
dump_to_stderr,
read_data,
)
SIMPLE_CASES = [
"beginning_backslash",
"bracketmatch",
"class_blank_parentheses",
"class_methods_new_line",
"collections",
"comments",
"comments2",
"comments3",
"comments4",
"comments5",
"comments6",
"comments_non_breaking_space",
"comment_after_escaped_newline",
"composition",
"composition_no_trailing_comma",
"docstring",
"empty_lines",
"expression",
"fmtonoff",
"fmtonoff2",
"fmtonoff3",
"fmtonoff4",
"fmtskip",
"fmtskip2",
"fmtskip3",
"fmtskip4",
"fmtskip5",
"fmtskip6",
"fstring",
"function",
"function2",
"function_trailing_comma",
"import_spacing",
"remove_parens",
"slices",
"string_prefixes",
"tricky_unicode_symbols",
"tupleassign",
]
SIMPLE_CASES_PY2 = [
"numeric_literals_py2",
"python2",
"python2_unicode_literals",
]
EXPERIMENTAL_STRING_PROCESSING_CASES = [
"cantfit",
"comments7",
"long_strings",
"long_strings__edge_case",
"long_strings__regression",
"percent_precedence",
]
PY310_CASES = [
"pattern_matching_simple",
"pattern_matching_complex",
"pattern_matching_extras",
"parenthesized_context_managers",
]
SOURCES = [
"src/black/__init__.py",
"src/black/__main__.py",
"src/black/brackets.py",
"src/black/cache.py",
"src/black/comments.py",
"src/black/concurrency.py",
"src/black/const.py",
"src/black/debug.py",
"src/black/files.py",
"src/black/linegen.py",
"src/black/lines.py",
"src/black/mode.py",
"src/black/nodes.py",
"src/black/numerics.py",
"src/black/output.py",
"src/black/parsing.py",
"src/black/report.py",
"src/black/rusty.py",
"src/black/strings.py",
"src/black/trans.py",
"src/blackd/__init__.py",
"src/black_primer/cli.py",
"src/black_primer/lib.py",
"src/blib2to3/pygram.py",
"src/blib2to3/pytree.py",
"src/blib2to3/pgen2/conv.py",
"src/blib2to3/pgen2/driver.py",
"src/blib2to3/pgen2/grammar.py",
"src/blib2to3/pgen2/literals.py",
"src/blib2to3/pgen2/parse.py",
"src/blib2to3/pgen2/pgen.py",
"src/blib2to3/pgen2/tokenize.py",
"src/blib2to3/pgen2/token.py",
"setup.py",
"tests/test_black.py",
"tests/test_blackd.py",
"tests/test_format.py",
"tests/test_primer.py",
"tests/optional.py",
"tests/util.py",
"tests/conftest.py",
]
@pytest.fixture(autouse=True)
def patch_dump_to_file(request: Any) -> Iterator[None]:
with patch("black.dump_to_file", dump_to_stderr):
yield
def check_file(filename: str, mode: black.Mode, *, data: bool = True) -> None:
source, expected = read_data(filename, data=data)
assert_format(source, expected, mode, fast=False)
@pytest.mark.parametrize("filename", SIMPLE_CASES_PY2)
@pytest.mark.python2
def test_simple_format_py2(filename: str) -> None:
check_file(filename, DEFAULT_MODE)
@pytest.mark.parametrize("filename", SIMPLE_CASES)
def test_simple_format(filename: str) -> None:
check_file(filename, DEFAULT_MODE)
@pytest.mark.parametrize("filename", EXPERIMENTAL_STRING_PROCESSING_CASES)
def test_experimental_format(filename: str) -> None:
check_file(filename, black.Mode(experimental_string_processing=True))
@pytest.mark.parametrize("filename", SOURCES)
def test_source_is_formatted(filename: str) -> None:
path = THIS_DIR.parent / filename
check_file(str(path), DEFAULT_MODE, data=False)
# =============== #
# Complex cases
# ============= #
def test_empty() -> None:
source = expected = ""
assert_format(source, expected)
def test_pep_572() -> None:
source, expected = read_data("pep_572")
assert_format(source, expected, minimum_version=(3, 8))
def test_long_first_line() -> None:
source, expected = read_data("long_first_line")
assert_format(source, expected, mode=black.Mode(use_tabs=True))
def test_docstring_tabs() -> None:
source, expected = read_data("docstring_tabs")
assert_format(source, expected, mode=black.Mode(use_tabs=True))
def test_pep_572_remove_parens() -> None:
source, expected = read_data("pep_572_remove_parens")
assert_format(source, expected, minimum_version=(3, 8))
def test_pep_572_do_not_remove_parens() -> None:
source, expected = read_data("pep_572_do_not_remove_parens")
# the AST safety checks will fail, but that's expected, just make sure no
# parentheses are touched
assert_format(source, expected, fast=True)
@pytest.mark.parametrize("major, minor", [(3, 9), (3, 10)])
def test_pep_572_newer_syntax(major: int, minor: int) -> None:
source, expected = read_data(f"pep_572_py{major}{minor}")
assert_format(source, expected, minimum_version=(major, minor))
def test_pep_570() -> None:
source, expected = read_data("pep_570")
assert_format(source, expected, minimum_version=(3, 8))
@pytest.mark.parametrize("filename", PY310_CASES)
def test_python_310(filename: str) -> None:
source, expected = read_data(filename)
mode = black.Mode(target_versions={black.TargetVersion.PY310})
assert_format(source, expected, mode, minimum_version=(3, 10))
def test_docstring_no_string_normalization() -> None:
"""Like test_docstring but with string normalization off."""
source, expected = read_data("docstring_no_string_normalization")
mode = replace(DEFAULT_MODE, string_normalization=False)
assert_format(source, expected, mode)
def test_long_strings_flag_disabled() -> None:
"""Tests for turning off the string processing logic."""
source, expected = read_data("long_strings_flag_disabled")
mode = replace(DEFAULT_MODE, experimental_string_processing=False)
assert_format(source, expected, mode)
def test_numeric_literals() -> None:
source, expected = read_data("numeric_literals")
mode = replace(DEFAULT_MODE, target_versions=PY36_VERSIONS)
assert_format(source, expected, mode)
def test_numeric_literals_ignoring_underscores() -> None:
source, expected = read_data("numeric_literals_skip_underscores")
mode = replace(DEFAULT_MODE, target_versions=PY36_VERSIONS)
assert_format(source, expected, mode)
@pytest.mark.python2
def test_python2_print_function() -> None:
source, expected = read_data("python2_print_function")
mode = replace(DEFAULT_MODE, target_versions={black.TargetVersion.PY27})
assert_format(source, expected, mode)
def test_stub() -> None:
mode = replace(DEFAULT_MODE, is_pyi=True)
source, expected = read_data("stub.pyi")
assert_format(source, expected, mode)
def test_python38() -> None:
source, expected = read_data("python38")
assert_format(source, expected, minimum_version=(3, 8))
def test_python39() -> None:
source, expected = read_data("python39")
assert_format(source, expected, minimum_version=(3, 9))
|
import sys
def solution(R):
return sum(R[i] < R[i+3] for i in range(len(R)-3))
print(solution([*map(int, sys.stdin.readlines())]))
|
tup2 = (1, 2, 3, 4, 5, 6, 7); #tuplas (son inmutables: sus valores no cambian)
print "tup2[1:4]: ", tup2[1:4];
print "-"*70
print len((1,2,"a",3)) # cantidad de valores
tup3= ("a","b","c","perro","e","f","g", "i","loro");
tup4 = tup2 + tup3
print tup4
print "-"*70
tup5 = tup4[0];
tup6 = tup4[1];
suma = tup5 + tup6
cantidad = len (tup3)
print cantidad
print ("-"+"^")*40
if "loro" and "gato" in tup3:
print tup3[2]
else:
print tup3[0]
print "-,"*30
a = [] #lista, sus valores pueden cambiar
for x in tup2:
L = x
V = x*1
if x <=5:
a.append (x)
print a
print V
print "-+"*30
tup7=()
tup1=()
for i in range(1,10,3):
tup7+= (i,) #almacena el rango en una lista o array, += es append
print tup7
print "-"*70
print range(0, 16)
print "-.."*30
for i in range(2,10,2): #recorre desde,hasta,step
tup1+= (i,)
print tup1
print "._"*30
lst = []
for i in range(0,10,2):
lst.append(i)
print "lst: ", lst
tup = tuple(lst)
print "tuple: ", tup
print "_"*30
lsta = range(1,10,2)
tupla = tuple(range(1,10,2))
tuplaLC = [i for i in range(1, 11, 3)] #list comprehension
tuplaLCif = tuple(g**2 for g in range(0, 10, 2) if g <= 6)
listaLCif = [g**2 for g in range(0, 10, 2) if g <= 6]
rrr = tuple(i for i in range(1, 10, 2))
print lsta
print tupla
print tuplaLC
print tuplaLCif
print listaLCif
print rrr
|
from tempfile import TemporaryDirectory
import os
from forest.common import proc_utils
cmake_command = 'cmake'
default_args = list()
def _construct(self, srcdir, builddir):
self.srcdir = srcdir
self.builddir = builddir
def _is_configured(self):
return os.path.exists(os.path.join(self.builddir, 'Makefile'))
def _set_default_args(args):
global default_args
default_args = args
def _configure(self, args):
if args is None:
args = list()
return _call_cmake([self.srcdir] + args + default_args, cwd=self.builddir)
def _build(self, target, jobs):
return _call_cmake(['--build', self.builddir, '--target', target, '--', '-j', jobs])
def _call_cmake(args, cwd='.', print_on_error=True):
args_str = list(map(str, args))
return proc_utils.call_process(args=[cmake_command] + args_str, cwd=cwd, print_on_error=print_on_error)
def _find_package(pkg_name: str):
with TemporaryDirectory(prefix="foresttmp-") as tmpdir:
# dir of current file
cmake_tools_dir = os.path.dirname(os.path.abspath(__file__))
# open template
with open(os.path.join(cmake_tools_dir, 'template', 'find_package.txt'), 'r') as f:
find_package_tpl = f.read()
# configure template
srcdir = find_package_tpl.replace('ยฃPKG_NAME', pkg_name)
# write it to tmp dir
cmakelists_path = os.path.join(tmpdir, 'CMakeLists.txt')
with open(cmakelists_path, 'w') as f:
f.write(srcdir)
return _call_cmake('.', cwd=tmpdir, print_on_error=False)
|
import json
import os
class Sample:
"""A helper class for managing static sample JSON returns from MBTA API V3.
"""
def __init__(self, json_path):
self.raw_json = json.loads(open(json_path, 'rt').read())
def __repr__(self):
return self.raw_json['data']
def __str__(self):
return self.raw_json['data']
def __getitem__(self, key):
return self.raw_json['data'][key]
BASE = os.path.dirname(os.path.abspath(__name__))
BASE = os.path.join(BASE, 'tests', 'sample_responses')
samp_predictions = Sample(os.path.join(BASE, 'predictions.json'))
samp_schedules = Sample(os.path.join(BASE, 'schedules.json'))
del BASE
|
from ..factory import Method
class viewTrendingStickerSets(Method):
sticker_set_ids = None # type: "vector<int64>"
|
#!/usr/bin/env python3
"""Prepares a simple TVM library for testing."""
from os import path as osp
import sys
import tvm
def main():
n = tvm.var('n')
A = tvm.placeholder((n,), name='A')
B = tvm.placeholder((n,), name='B')
C = tvm.compute(A.shape, lambda *i: A(*i) + B(*i), name='C')
s = tvm.create_schedule(C.op)
s[C].parallel(s[C].op.axis[0])
print(tvm.lower(s, [A, B, C], simple_mode=True))
tvm.build(s, [A, B, C], 'llvm --system-lib').save(osp.join(sys.argv[1], 'test.o'))
if __name__ == '__main__':
main()
|
#
# Build various blueprints
#
from javascript import require
from botlib import *
Vec3 = require('vec3').Vec3
from blueprint import *
from workarea import *
import blueprint_data
#
# Main class that has the build, analyze and helper functions
#
class BuildBot:
def __init__(self):
self.blueprintList = []
print('build ', end='')
blueprint_data.init(self)
def learnBlueprint(self,b):
self.blueprintList.append(b)
def listBlueprints(self):
return ' '.join([str(n) for n in Blueprint.bpList])
def getBlueprint(self,name):
for b in self.blueprintList:
if str(b) == name:
return b
else:
return None
emptyBlocks = {
"Air",
"Cave Air",
"Void Air",
}
# Blocks where the inventory item is different from the block in the world
blockW2I = {
"Redstone Wire": "Redstone Dust",
"Redstone Wall Torch": "Redstone Torch",
}
# Blocks that require sneak to place against (chests, repeaters etc.)
interactiveBlocks = [
"Redstone Repeater",
"Redstone Comparator",
"Chest",
"Hopper",
]
#
# Translate world blocks to inventory items
#
def world2inv(self,block_name):
if block_name in self.blockW2I:
return self.blockW2I[block_name]
else:
return block_name
#
# Build an Thing
#
def doBuild(self,args):
if len(args) == 0:
self.chat('Need to specify blueprint to build.')
else:
bp_name = args[0]
stage = 1
while not self.stopActivity:
bp = self.getBlueprint(bp_name+"_"+str(stage))
# Special handling for the first phase
if stage == 1:
if not bp:
print(f'Cant find blueprint {bp_name}.')
break
else:
if bp:
print(f'Phase {stage} is starting.')
else:
# Done, no next phase
break
# Define the area - may be different for each phase
area = workArea(self,bp.width,bp.height,bp.depth)
if not area.valid:
break
print("Area!")
# Analyze what we need, and what is already there
need= {"Bread":2}
for v in area.allBlocks():
old_b = area.blockAt(v).displayName
new_b = bp.blockAt(v)
if old_b not in self.emptyBlocks:
# something is there already
if old_b != new_b:
# and it's now what we expect
print(f'*** error: wrong object {old_b} instead of {new_b} at position {v}. Clear the area.')
self.endActivity()
return False
elif new_b not in self.emptyBlocks:
new_inv = self.world2inv(new_b)
need[new_inv] = need.get(new_inv,0) + 1
else:
# This is an empty space
pass
if need:
self.restockFromChest(need)
else:
print(" all needed items already in inventory.")
sneak = False
jump = False
# Build, back to front, bottom to top
for z in range(bp.depth-1,-1,-1):
for y in bp.yRange():
for x in bp.xRange():
if self.stopActivity:
break
block_type = bp.blockAt(Vec3(x,y,z))
# Just air in the blueprint? Continue...
if block_type in self.emptyBlocks:
continue
# Correct block already there? Continue...
if area.blockAt(Vec3(x,y,z)).displayName == block_type:
continue
bot_v = area.toWorld(x,0,z-1.5)
bot_v_center = Vec3(bot_v.x+0.5, bot_v.y, bot_v.z+0.5)
# Figure out how we can place this block.
# This is complicated...
placement_type = "unknown"
if bp.buildFunction:
spec = bp.buildFunction(x,y,z)
else:
spec = None
if area.blockAt(Vec3(x,y-1,z)).displayName not in self.emptyBlocks:
# Easiest case: block below the block we want to place is not empty.
against_v = area.toWorld(x,y-1,z)
direction_v = Vec3(0,1,0)
placement_type = " (top)"
elif area.blockAt(Vec3(x,y,z+1)).displayName not in self.emptyBlocks:
# Block behind the block we want to palce is there, let's place against that
against_v = area.toWorld(x,y,z+1)
direction_v = Vec3(-area.d.x,0,-area.d.z)
placement_type = " (front)"
elif not spec:
# Nothing works, no special instructions
print(f'*** error ({x:3},{y:3},{z:3}) {block_type} no placement strategy found')
bot.stopActivity = True
break
if spec:
placement_type +=" S"
if spec.jump:
placement_type +=" jmp"
if spec.sneak:
placement_type +=" snk"
if spec.placement_type:
placement_type = spec.placement_type
if spec.bot_pos:
bot_v = area.toWorldV3(spec.bot_pos)
bot_v_center = Vec3(bot_v.x+0.5, bot_v.y, bot_v.z+0.5)
placement_type +=" pos"
if spec.block_against:
against_v = area.toWorldV3(spec.block_against)
placement_type +=" @ "
if spec.block_surface:
direction_v = area.dirToWorldV3(spec.block_surface)
placement_type +=" dir"
self.safeWalk(bot_v_center,0.1)
time.sleep(1)
else:
self.safeWalk(bot_v_center,0.2)
time.sleep(1)
against_type = self.bot.blockAt(against_v).displayName
# Let's do this
print(f' ({x:3},{y:3},{z:3}) {block_type} -> {against_type} {placement_type}')
if not self.wieldItem(self.world2inv(block_type)):
print(f'*** aborting, cant wield item {block_type}')
self.stopActivity = True
break
if against_type in self.interactiveBlocks:
self.bot.setControlState('sneak', True)
time.sleep(0.5)
sneak = True
if spec and spec.sneak:
self.bot.setControlState('sneak', True)
time.sleep(0.2)
sneak = True
if spec and spec.jump:
self.bot.setControlState('jump', True)
time.sleep(0.2)
jump = True
if not self.safePlaceBlock(against_v,direction_v):
print(f'*** aborting, cant place block {block_type} at {x}, {y}, {z}')
self.stopActivity = True
break
if sneak:
self.bot.setControlState('sneak', False)
sneak = False
if jump:
self.bot.setControlState('jump', False)
jump = False
stage += 1
self.safeWalk(area.start)
time.sleep(1)
self.endActivity()
#
# Analyze the area in front and print in python format
#
def analyzeBuild(self,width=3,height=4, depth=6):
area = workArea(self,width,height,depth)
if not area.valid:
return False
w2 = int((width-1)/2) # offset from center for width
print(f'# Minebot Blueprint')
print(f'# {width} x {height} x {depth}')
print(f'')
print(f'bpData = [')
for z in range(0,depth):
print(f' [')
for y in range(height-1,-1,-1):
print(f' [',end="")
for x in range(-w2, w2+1):
b = self.bot.blockAt(area.toWorld(x,y,z))
s = '"'+b.displayName+'"'
print(f'{s:15}',end=", ")
print(f'],')
print(f' ],')
print(f']')
|
"""
Test of integrating torch Conv2d and LSTM modules
"""
from numbers import Number
import numpy as np
import torch
import torch.nn as nn
if __name__ == "__main__":
MAX_LENGTH = 100
MIN_LENGTH = 10
NUM_SAMPLES = 45
CHANNELS = 3
WIDTH = 128
HEIGHT = 128
HIDDEN_SIZE = 32
HIDDEN_LAYERS = 2
CONV_FILTERS = [
(8, (5, 5), 2),
(8, (5, 5), 2),
(8, (5, 5), 2)
]
# Generate random sequence data
samples = []
for _ in range(NUM_SAMPLES):
seq_len = np.random.randint(MIN_LENGTH, MAX_LENGTH)
seq = np.random.uniform(0.0, 0.1, (seq_len, CHANNELS, WIDTH, HEIGHT))
samples.append(seq)
# Construct model
input_channels = CHANNELS
image_shape = [WIDTH, HEIGHT]
layers = []
for l, (channels, kernel, stride) in enumerate(CONV_FILTERS):
kernel = list(kernel)
padding = []
for d in range(len(kernel)):
kernel[d] += (kernel[d] + 1) % 2
padding.append(kernel[d] // 2)
layers.append(nn.Conv2d(
input_channels,
channels,
kernel,
stride=stride,
padding=padding
))
input_channels = channels
if isinstance(stride, Number):
stride = [stride] * len(image_shape)
for d, s in enumerate(stride):
image_shape[d] = -(-image_shape[d] // s)
print(f"Conv2D: predicted output shape: {image_shape}")
if l < len(CONV_FILTERS) - 1:
layers.append(nn.ReLU())
conv = nn.Sequential(*layers)
num_features = int(input_channels * np.prod(image_shape))
lstm = nn.LSTM(num_features, HIDDEN_SIZE, HIDDEN_LAYERS)
# Process sequences
seq_lens = [sample.shape[0] for sample in samples]
samples = [torch.as_tensor(sample, dtype=torch.float32) for sample in samples]
samples = nn.utils.rnn.pad_sequence(samples)
print(f"\nPadded sequence shape: {samples.shape}")
samples = nn.utils.rnn.pack_padded_sequence(samples, seq_lens, enforce_sorted=False)
print(f"\nPacked sequence type: {samples.data.shape}")
print(f"packed type: {type(samples)}")
data = conv(samples.data)
print(f"\nConv output shape: {data.shape}")
data = torch.flatten(data, start_dim=1)
samples = nn.utils.rnn.PackedSequence(data, samples.batch_sizes, samples.sorted_indices, samples.unsorted_indices)
print(f"flattened shape: {samples.data.shape}")
hidden_shape = (HIDDEN_LAYERS, NUM_SAMPLES, HIDDEN_SIZE)
state = (torch.zeros(hidden_shape, dtype=torch.float32), torch.zeros(hidden_shape, dtype=torch.float32))
output, (hidden, cell) = lstm(samples, state)
print(f"\nLSTM output shape: {output.data.shape}")
print(f"hidden shape: {hidden.shape}")
print(f"cell shape: {cell.shape}")
output, _ = nn.utils.rnn.pad_packed_sequence(output)
print(f"\nFinal output shape: {output.shape}")
|
"""
A title to keep sphinx-gallery happy
====================================
"""
from matplotlib import pyplot as plt
plt.plot()
plt.show()
|
import subprocess
# open Terminal and call run-mbed-cli.sh
subprocess.call(
["/usr/bin/open", "-W", "-n", "--fresh","-a", "/Applications/Utilities/Terminal.app", "run-mbed-cli.sh"]
)
# # call shell script that calls terminal
# subprocess.call(
# ["/bin/sh","/Users/ausbla01/mbed/mbed-cli-osx-installer/run-mbed-cli.sh"]
# )
|
from __future__ import annotations
from typing import List, Tuple, Dict
import docker
from docker.models.containers import Container
from docker.models.images import Image
from docker.client import DockerClient
from docker.errors import APIError
import re
import io
import tarfile
import os
from datetime import datetime
import uuid
import time
class DockerContainerInstanceAlreadyExistsException(Exception):
def __init__(self, *args: object):
super().__init__(*args)
class FailedToFindContainerException(Exception):
def __init__(self, *args: object):
super().__init__(*args)
class DockerContainerAlreadyRemovedException(Exception):
def __init__(self, *args: object):
super().__init__(*args)
class DockerContainerInstance():
def __init__(self, *, name: str, docker_client: DockerClient, docker_container: Container, is_docker_socket_needed: bool):
self.__name = name
self.__docker_client = docker_client
self.__docker_container = docker_container
self.__is_docker_socket_needed = is_docker_socket_needed
self.__stdout = None
self.__docker_container_logs_sent_length = 0
self.__is_duplicate = False
def get_stdout(self) -> bytes:
if self.__docker_container is None:
raise DockerContainerAlreadyRemovedException(f"Docker container was previously removed.")
logs = self.__docker_container.logs()
if logs != b"":
sending_length = len(logs)
unsent_logs = logs[self.__docker_container_logs_sent_length:sending_length]
self.__docker_container_logs_sent_length = sending_length
if self.__stdout is None:
self.__stdout = b""
self.__stdout += unsent_logs
if self.__stdout is None:
return None
else:
line = self.__stdout
self.__stdout = None
return line
def duplicate_container(self, *, name: str, override_entrypoint_arguments: List[str] = None) -> DockerContainerInstance:
duplicate_docker_image = self.__docker_container.commit(
repository=name
) # type: Image
if override_entrypoint_arguments is not None and len(override_entrypoint_arguments) != 0:
concat_entrypoint_arguments = ""
for entrypoint_argument_index, entrypoint_argument in enumerate(override_entrypoint_arguments):
if entrypoint_argument_index != 0:
concat_entrypoint_arguments += " "
concat_entrypoint_arguments += f"{entrypoint_argument}"
if self.__is_docker_socket_needed:
duplicate_docker_container = self.__docker_client.containers.create(
image=duplicate_docker_image,
name=name,
detach=True,
command=concat_entrypoint_arguments,
volumes=["/var/run/docker.sock:/var/run/docker.sock"]
)
else:
duplicate_docker_container = self.__docker_client.containers.create(
image=duplicate_docker_image,
name=name,
detach=True,
command=concat_entrypoint_arguments
)
else:
duplicate_docker_container = self.__docker_client.containers.create(
image=duplicate_docker_image
)
duplicate_docker_container_instance = DockerContainerInstance(
name=name,
docker_client=self.__docker_client,
docker_container=duplicate_docker_container,
is_docker_socket_needed=self.__is_docker_socket_needed
)
return duplicate_docker_container_instance
def execute_command(self, *, command: str):
if self.__docker_container is None:
raise DockerContainerAlreadyRemovedException(f"Docker container was previously removed.")
is_successful = False
is_duplicate_required = False
try:
lines = self.__docker_container.exec_run(command, stderr=True, stdout=True)
if "exec failed" in str(lines) or "cannot exec in a stopped state" in str(lines):
is_duplicate_required = True
is_successful = True
except APIError as ex:
if "409 Client Error" in str(ex) and " is not running" in str(ex):
is_duplicate_required = True
else:
raise ex
if is_duplicate_required:
docker_clone_uuid = f"duplicate_{str(uuid.uuid4()).lower()}"
original_stdout = self.get_stdout()
duplicate_docker_container = self.duplicate_container(
name=docker_clone_uuid,
override_entrypoint_arguments=[command]
)
duplicate_docker_container.start()
duplicate_docker_container.wait()
self.__stdout = original_stdout
if self.__is_duplicate:
output = duplicate_docker_container.get_stdout()
if self.__stdout is None:
self.__stdout = output
else:
self.__stdout += output
# remove current container
self.__docker_container.remove()
self.__docker_client.images.remove(self.__name)
# take over duplicated container
self.__docker_container = duplicate_docker_container.__docker_container
self.__name = duplicate_docker_container.__name
# alter current container to behave correctly as a duplicate
self.__is_duplicate = True
self.__docker_container_logs_sent_length = duplicate_docker_container.__docker_container_logs_sent_length
elif is_successful:
for line in lines:
if isinstance(line, int):
pass
else:
if self.__stdout is None:
self.__stdout = b""
print(f"line: {line}")
self.__stdout += line
def copy_file(self, *, source_file_path: str, destination_directory_path: str):
if self.__docker_container is None:
raise DockerContainerAlreadyRemovedException(f"Docker container was previously removed.")
stream = io.BytesIO()
with tarfile.open(fileobj=stream, mode="w|") as tar, open(source_file_path, "rb") as source_file_handle:
tar_info = tar.gettarinfo(fileobj=source_file_handle)
tar_info.name = os.path.basename(source_file_path)
tar.addfile(tar_info, source_file_handle)
self.__docker_container.put_archive(destination_directory_path, stream.getvalue())
def wait(self):
if self.__docker_container is None:
raise DockerContainerAlreadyRemovedException(f"Docker container was previously removed.")
self.__docker_container.wait()
def is_running(self) -> bool:
if self.__docker_container is None:
raise DockerContainerAlreadyRemovedException(f"Docker container was previously removed.")
return self.__docker_container.status in ["running", "created"]
def stop(self):
if self.__docker_container is None:
raise DockerContainerAlreadyRemovedException(f"Docker container was previously removed.")
if self.is_running():
self.__docker_container.stop()
#print(f"docker_manager: stop: self.__docker_container.status: {self.__docker_container.status}")
def start(self):
if self.__docker_container is None:
raise DockerContainerAlreadyRemovedException(f"Docker container was previously removed.")
#print(f"docker_manager: start: self.is_running(): {self.is_running()}")
self.__docker_container.start()
def remove(self):
if self.__docker_container is None:
raise DockerContainerAlreadyRemovedException(f"Docker container already removed.")
self.stop()
self.__docker_container.remove()
self.__docker_client.images.remove(self.__name)
self.__docker_container = None
class DockerManager():
def __init__(self, *, dockerfile_directory_path: str, is_docker_socket_needed: bool):
self.__dockerfile_directory_path = dockerfile_directory_path
self.__is_docker_socket_needed = is_docker_socket_needed
self.__is_docker_client_from_environment = True
self.__docker_client = docker.from_env() # type: DockerClient
def is_image_exists(self, *, name: str) -> bool:
images = self.__docker_client.images.list()
for image in images:
if f"{name}:latest" in image.tags:
return True
return False
def is_container_exists(self, *, name: str) -> bool:
containers = self.__docker_client.containers.list() # type: List[Container]
for container in containers:
if container.name == name:
return True
return False
def get_existing_docker_container_instance_from_name(self, *, name: str) -> DockerContainerInstance:
if not self.is_container_exists(
name=name
):
raise FailedToFindContainerException(f"Failed to find container based on name \"{name}\".")
containers = self.__docker_client.containers() # type: List[Container]
found_container = None
for container in containers:
if container.name == name:
found_container = container
break
if found_container is None:
raise FailedToFindContainerException(f"Unexpected missing container after already finding it by name \"{name}\".")
docker_container_instance = DockerContainerInstance(
name=name,
docker_client=self.__docker_client,
docker_container=found_container,
is_docker_socket_needed=self.__is_docker_socket_needed
)
return docker_container_instance
def start(self, *, name: str) -> DockerContainerInstance:
if re.search(r"\s", name):
raise Exception(f"Name cannot contain whitespace.")
else:
if self.is_image_exists(
name=name
) or self.is_container_exists(
name=name
):
raise DockerContainerInstanceAlreadyExistsException(f"Cannot start image/container with the same name \"{name}\".")
self.__docker_client.images.build(
path=self.__dockerfile_directory_path,
tag=name,
rm=True
)
if self.__is_docker_socket_needed:
docker_container = self.__docker_client.containers.run(
image=name,
name=name,
detach=True,
stdout=True,
stderr=True,
volumes=["/var/run/docker.sock:/var/run/docker.sock"]
)
else:
docker_container = self.__docker_client.containers.run(
image=name,
name=name,
detach=True,
stdout=True,
stderr=True
)
docker_container_instance = DockerContainerInstance(
name=name,
docker_client=self.__docker_client,
docker_container=docker_container,
is_docker_socket_needed=self.__is_docker_socket_needed
)
return docker_container_instance
def dispose(self):
self.__docker_client.close()
|
# __version__.py
__title__ = 'DirtyWordOfFilter'
__date__ = '2019-03-30'
__description__ = 'Filter out all dirty words.'
__version__ = '0.0.1'
__author__ = 'Residual Mark'
__author_email__ = '1285679912@qq.com'
|
from .redis import Redis
|
import ossaudiodev as sd
import scipy
from scipy.optimize import leastsq
import numpy as np
from IPython.display import Audio
from recipes.misc import is_interactive
from obstools.psf.model import Model
from .spectral import Spectral, normaliser
def rescale(data, interval=(-1, 1)):
"""Linearly rescale data to fall within given interval"""
data = np.asarray(data)
dmin, dmax = data.min(), data.max()
imin, imax = sorted(interval)
scale = np.ptp(interval) / (dmax - dmin)
return (data - dmin) * scale + imin
def best_int_dtype(data):
"""get bit depth required to best represent float data as int"""
d, r = divmod(np.log2(data.ptp()), 8)
d = max(d, 1)
i = (2 ** (int(np.log2(d)) + bool(r)))
return np.dtype('i%d' % i)
def rescale_int(data, dtype=None):
"""Convert to integer array for saving as wav"""
dtype = best_int_dtype(data) if dtype is None else np.dtype(dtype)
if not isinstance(dtype.type(), np.integer):
raise ValueError('Please give valid dtype')
lims = np.iinfo(dtype)
interval = (lims.min, lims.max)
return rescale(data, interval).astype(dtype)
def monotone(f, duration=1, fs=44100):
"""A pure sinusoidal tone"""
t = np.linspace(0, duration, fs * duration)
return np.cos(2 * np.pi * f * t)
# def multitone(frqs, duration=1, fs=44100):
def play(signal, rate):
if is_interactive():
return Audio(data=signal, rate=rate, autoplay=True)
with sd.open('w') as dev:
dev.setfmt(sd.AFMT_S16_LE)
dev.speed(rate)
dev.writeall(signal)
class PianoKeys():
"""
Simple class that returns the frequency of keys on the piano when sliced
Example
-------
piano = PianoKeys()
piano[40] # 261.625 #(middle C)
piano['C4'] # 261.625 #(middle C)
piano.to_name(40),
piano.to_key_nr(piano[40]),
piano.freq_to_name(piano[40]),
piano['A0'],
piano['C#1'],
piano['B5']
See:
----
https://en.wikipedia.org/wiki/Piano_key_frequencies
"""
A = 440 # Hz
iA = 49 # key number for middle C
notes = []
for n in 'abcdefg'.upper():
notes.append(n)
if n not in 'BE':
notes.append(n + '#')
def to_freq(self, n):
# https://en.wikipedia.org/wiki/Piano_key_frequencies
if n < 1 or n > 88:
raise ValueError('Key nr not in range')
return self.A * pow(2, (n - self.iA) / 12)
def to_key_nr(self, f):
return int(12 * np.log2(f / self.A) + self.iA)
def to_name(self, n):
i = n % 12
octave = (n + 8) // 12
return self.notes[i - 1] + str(octave)
def name_to_key(self, name):
ix = 1 + ('#' in name)
note = name[:ix].upper()
if not note in self.notes:
raise ValueError('Unrecognized note %s' % name)
i = self.notes.index(note)
octave = name[-1]
if not octave.isdigit():
octave = (i > 2)
octave = int(octave)
n = i + 1 + (octave * 12)
n -= (n % 12 > 3) * 12
return n
def freq_to_name(self, f):
return self.to_name(self.to_key_nr(f))
def name_to_freq(self, name):
return self.to_freq(self.name_to_key(name))
def freq_of(self, key):
if isinstance(key, (int, np.integer, float, np.floating)):
return self.to_freq(key)
elif isinstance(key, str):
return self.name_to_freq(key)
else:
raise KeyError('Invalid key %s' % key)
def __getitem__(self, key):
return self.freq_of(key)
def play(self, key, duration=1):
"""Produce a monotone signal at frequency of *key*"""
signal = monotone(self.freq_of(key), duration)
return Audio(data=signal, rate=44100, autoplay=True)
def FrequencyModulator(data, duration, fs=44.1e3, phase=0, fcarrier=None,
fdev=None):
"""
data : information to be transmitted (i.e., the baseband signal)
fcarrier : carrier's base frequency
fdev : frequency deviation (represents the maximum shift away from the carrier frequency)"""
t = np.linspace(0, duration, fs * duration)
dmin, dmax = data.min(), data.max()
if fcarrier is None:
fcarrier = np.mean((dmin, dmax))
if fdev is None:
fdev = (np.max((dmin, dmax)) - fcarrier) / fs
# normalize the data range from -1 to 1
rescaled = rescale(data, (-1, 1))
# generate FM signal:
return np.cos(
2 * np.pi * (fcarrier * t + fdev * np.cumsum(rescaled)) + phase)
class AudifySpec(Spectral):
def main(self, segments): # calculate_spectra
# calculate spectra
spec = scipy.fftpack.fft(segments)
# since we are dealing with real signals
spec = spec[..., :len(self.frq)]
self.spectra = spec
power = np.square(np.abs(spec))
power = normaliser(power, self.segments, self.opts.normalise,
self.npadded, self.dt)
return power
def reconstruct_segment(self, i, duration, rate):
n = int(duration * rate)
ifft = scipy.fftpack.ifft(self.spectra[i], n)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-20 19:44
from __future__ import unicode_literals
import c3nav.mapdata.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mapdata', '0064_access_permission_unique_key'),
]
operations = [
migrations.CreateModel(
name='AccessRestrictionGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', c3nav.mapdata.fields.I18nField(blank=True, fallback_any=True, fallback_value='{model} {pk}', plural_name='titles', verbose_name='Title')),
],
options={
'verbose_name': 'Access Restriction Group',
'verbose_name_plural': 'Access Restriction Groups',
'default_related_name': 'accessrestrictiongroups',
},
),
migrations.AddField(
model_name='accessrestriction',
name='groups',
field=models.ManyToManyField(blank=True, related_name='accessrestrictions', to='mapdata.AccessRestrictionGroup', verbose_name='Groups'),
),
]
|
# Question 4
# Vowel checker
letter = input("Enter a letter: ")
if letter in ["a", "e", "ฤฑ", "i", "o", "รถ", "u", "รผ", "A", "E", "I", "ฤฐ", "O", "ร", "U", "ร"]:
print("Passed letter is vowel")
else:
print("Passed letter is not vowel")
|
""" Tracklet XML file parsing
This code was taken as is from the kitti website link
(http://cvlibs.net/datasets/kitti/downloads/parseTrackletXML.py ).
Minor Pythonic naming changes made and TRUNC_UNSET enum addition.
Original header and author comments follow below.
---
parse XML files containing tracklet info for kitti data base (raw data section)
(http://cvlibs.net/datasets/kitti/raw_data.php)
No guarantees that this code is correct, usage is at your own risk!
created by Christian Herdtweck, Max Planck Institute for Biological Cybernetics
(christian.herdtweck@tuebingen.mpg.de)
requires numpy!
"""
# Version History:
# 4/7/12 Christian Herdtweck: seems to work with a few random test xml tracklet files;
# converts file contents to ElementTree and then to list of Tracklet objects;
# Tracklet objects have str and iter functions
# 5/7/12 ch: added constants for state, occlusion, truncation and added consistency checks
# 30/1/14 ch: create example function from example code
from __future__ import print_function
from xml.etree.ElementTree import ElementTree
import numpy as np
import itertools
from warnings import warn
STATE_UNSET = 0
STATE_INTERP = 1
STATE_LABELED = 2
stateFromText = {'0': STATE_UNSET, '1': STATE_INTERP, '2': STATE_LABELED}
OCC_UNSET = 255 # -1 as uint8
OCC_VISIBLE = 0
OCC_PARTLY = 1
OCC_FULLY = 2
occFromText = {'-1': OCC_UNSET, '0': OCC_VISIBLE, '1': OCC_PARTLY, '2': OCC_FULLY}
TRUNC_UNSET = 255 # -1 as uint8, but in xml files the value '99' is used!
TRUNC_IN_IMAGE = 0
TRUNC_TRUNCATED = 1
TRUNC_OUT_IMAGE = 2
TRUNC_BEHIND_IMAGE = 3
truncFromText = {
'-1': TRUNC_UNSET, # FIXME RW: Added this
'99': TRUNC_UNSET, # FIXME RW: Original code had this but 99 is supposed be 'behind'???
'0': TRUNC_IN_IMAGE,
'1': TRUNC_TRUNCATED,
'2': TRUNC_OUT_IMAGE,
'3': TRUNC_BEHIND_IMAGE}
class Tracklet(object):
r""" representation an annotated object track
Tracklets are created in function parseXML and can most conveniently used as follows:
for trackletObj in parseXML(trackletFile):
for translation, rotation, state, occlusion, truncation, amtOcclusion, amt_borders, absoluteFrameNumber in trackletObj:
... your code here ...
#end: for all frames
#end: for all tracklets
absoluteFrameNumber is in range [first_frame, first_frame+num_frames[
amtOcclusion and amt_borders could be None
You can of course also directly access the fields objType (string), size (len-3 ndarray), first_frame/num_frames (int),
trans/rots (num_frames x 3 float ndarrays), states/truncs (len-num_frames uint8 ndarrays), occs (num_frames x 2 uint8 ndarray),
and for some tracklets amt_occs (num_frames x 2 float ndarray) and amt_borders (num_frames x 3 float ndarray). The last two
can be None if the xml file did not include these fields in poses
"""
object_type = None
size = None # len-3 float array: (height, width, length)
first_frame = None
trans = None # n x 3 float array (x,y,z)
rots = None # n x 3 float array (x,y,z)
states = None # len-n uint8 array of states
occs = None # n x 2 uint8 array (occlusion, occlusion_kf)
truncs = None # len-n uint8 array of truncation
amt_occs = None # None or (n x 2) float array (amt_occlusion, amt_occlusion_kf)
amt_borders = None # None (n x 3) float array (amt_border_l / _r / _kf)
num_frames = None
def __init__(self):
r""" create Tracklet with no info set """
self.size = np.nan * np.ones(3, dtype=float)
def __str__(self):
r""" return human-readable string representation of tracklet object
called implicitly in
print trackletObj
or in
text = str(trackletObj)
"""
return '[Tracklet over {0} frames for {1}]'.format(self.num_frames, self.object_type)
def __iter__(self):
r""" returns an iterator that yields tuple of all the available data for each frame
called whenever code iterates over a tracklet object, e.g. in
for translation, rotation, state, occlusion, truncation, amtOcclusion, amt_borders, absoluteFrameNumber in trackletObj:
...do something ...
or
trackDataIter = iter(trackletObj)
"""
if self.amt_occs is None:
return itertools.izip(
self.trans, self.rots, self.states, self.occs, self.truncs,
itertools.repeat(None), itertools.repeat(None),
range(self.first_frame, self.first_frame + self.num_frames))
else:
return itertools.izip(
self.trans, self.rots, self.states, self.occs, self.truncs,
self.amt_occs, self.amt_borders, range(self.first_frame, self.first_frame + self.num_frames))
# end: class Tracklet
def parse_xml(tracklet_file):
r""" parse tracklet xml file and convert results to list of Tracklet objects
:param tracklet_file: name of a tracklet xml file
:returns: list of Tracklet objects read from xml file
"""
# convert tracklet XML data to a tree structure
etree = ElementTree()
print('Parsing Tracklet file', tracklet_file)
with open(tracklet_file) as f:
etree.parse(f)
# now convert output to list of Tracklet objects
tracklets_elem = etree.find('tracklets')
tracklets = []
tracklet_idx = 0
num_tracklets = None
for tracklet_elem in tracklets_elem:
if tracklet_elem.tag == 'count':
num_tracklets = int(tracklet_elem.text)
print('File contains', num_tracklets, 'Tracklets')
elif tracklet_elem.tag == 'item_version':
pass
elif tracklet_elem.tag == 'item':
new_track = Tracklet()
is_finished = False
has_amt = False
frame_idx = None
for info in tracklet_elem:
if is_finished:
raise ValueError('More info on element after finished!')
if info.tag == 'objectType':
new_track.object_type = info.text
elif info.tag == 'h':
new_track.size[0] = float(info.text)
elif info.tag == 'w':
new_track.size[1] = float(info.text)
elif info.tag == 'l':
new_track.size[2] = float(info.text)
elif info.tag == 'first_frame':
new_track.first_frame = int(info.text)
elif info.tag == 'poses':
# this info is the possibly long list of poses
for pose in info:
if pose.tag == 'count': # this should come before the others
if new_track.num_frames is not None:
raise ValueError('There are several pose lists for a single track!')
elif frame_idx is not None:
raise ValueError('?!')
new_track.num_frames = int(pose.text)
new_track.trans = np.nan * np.ones((new_track.num_frames, 3), dtype=float)
new_track.rots = np.nan * np.ones((new_track.num_frames, 3), dtype=float)
new_track.states = np.nan * np.ones(new_track.num_frames, dtype='uint8')
new_track.occs = np.nan * np.ones((new_track.num_frames, 2), dtype='uint8')
new_track.truncs = np.nan * np.ones(new_track.num_frames, dtype='uint8')
new_track.amt_occs = np.nan * np.ones((new_track.num_frames, 2), dtype=float)
new_track.amt_borders = np.nan * np.ones((new_track.num_frames, 3), dtype=float)
frame_idx = 0
elif pose.tag == 'item_version':
pass
elif pose.tag == 'item':
# pose in one frame
if frame_idx is None:
raise ValueError('Pose item came before number of poses!')
for poseInfo in pose:
if poseInfo.tag == 'tx':
new_track.trans[frame_idx, 0] = float(poseInfo.text)
elif poseInfo.tag == 'ty':
new_track.trans[frame_idx, 1] = float(poseInfo.text)
elif poseInfo.tag == 'tz':
new_track.trans[frame_idx, 2] = float(poseInfo.text)
elif poseInfo.tag == 'rx':
new_track.rots[frame_idx, 0] = float(poseInfo.text)
elif poseInfo.tag == 'ry':
new_track.rots[frame_idx, 1] = float(poseInfo.text)
elif poseInfo.tag == 'rz':
new_track.rots[frame_idx, 2] = float(poseInfo.text)
elif poseInfo.tag == 'state':
new_track.states[frame_idx] = stateFromText[poseInfo.text]
elif poseInfo.tag == 'occlusion':
new_track.occs[frame_idx, 0] = occFromText[poseInfo.text]
elif poseInfo.tag == 'occlusion_kf':
new_track.occs[frame_idx, 1] = occFromText[poseInfo.text]
elif poseInfo.tag == 'truncation':
new_track.truncs[frame_idx] = truncFromText[poseInfo.text]
elif poseInfo.tag == 'amt_occlusion':
new_track.amt_occs[frame_idx, 0] = float(poseInfo.text)
has_amt = True
elif poseInfo.tag == 'amt_occlusion_kf':
new_track.amt_occs[frame_idx, 1] = float(poseInfo.text)
has_amt = True
elif poseInfo.tag == 'amt_border_l':
new_track.amt_borders[frame_idx, 0] = float(poseInfo.text)
has_amt = True
elif poseInfo.tag == 'amt_border_r':
new_track.amt_borders[frame_idx, 1] = float(poseInfo.text)
has_amt = True
elif poseInfo.tag == 'amt_border_kf':
new_track.amt_borders[frame_idx, 2] = float(poseInfo.text)
has_amt = True
else:
raise ValueError('Unexpected tag in poses item: {0}!'.format(poseInfo.tag))
frame_idx += 1
else:
raise ValueError('Unexpected pose info: {0}!'.format(pose.tag))
elif info.tag == 'finished':
is_finished = True
else:
raise ValueError('Unexpected tag in tracklets: {0}!'.format(info.tag))
# end: for all fields in current tracklet
# some final consistency checks on new tracklet
if not is_finished:
warn('Tracklet {0} was not finished!'.format(tracklet_idx))
if new_track.num_frames is None:
warn('Tracklet {0} contains no information!'.format(tracklet_idx))
elif frame_idx != new_track.num_frames:
warn('Tracklet {0} is supposed to have {1} frames, but parser found {1}!'.format(
tracklet_idx, new_track.num_frames, frame_idx))
if np.abs(new_track.rots[:, :2]).sum() > 1e-16:
warn('Track contains rotation other than yaw!')
# if amt_occs / amt_borders are not set, set them to None
if not has_amt:
new_track.amt_occs = None
new_track.amt_borders = None
# add new tracklet to list
tracklets.append(new_track)
tracklet_idx += 1
else:
raise ValueError('Unexpected tracklet info')
# end: for tracklet list items
print('Loaded', tracklet_idx, 'Tracklets')
# final consistency check
if tracklet_idx != num_tracklets:
warn('According to xml information the file has {0} tracklets, but parser found {1}!'.format(
num_tracklets, tracklet_idx))
return tracklets
|
#!/usr/bin/env python3
"""
set_membership_annotation.py
Python 3 code for annotating a junction list with presence in annotation,
gene boundaries, antisense transcripts, and presence in various adult, stem
cell, and developmental samples.
"""
import argparse
from datetime import datetime
import glob
import json
import logging
import os
import pandas as pd
import sqlite3 as sql
import sys
try:
import utilities.utilities as jx_util
except ModuleNotFoundError:
sys.path.append(
os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
)
import utilities.utilities as jx_util
from utilities.utilities import _TCGA_CANCER_TYPES, _SRA_ADULT, _SRA_DEV
from utilities.utilities import _SRA_STEMCELLS
from utilities.utilities import snaptron_results_to_jxs, create_jx_id_map
from utilities.utilities import make_id_name_dict, gtf_to_cds, cds_to_tree
from utilities.utilities import cds_to_antisense, extract_splice_sites
from utilities.utilities import jx_df_from_file, jx_gene_overlap
def collect_cancer_loci(cancer_gene_file, symbol_column, separator):
can_gene_df = pd.read_table(
cancer_gene_file, sep=separator, usecols=[symbol_column]
)
cancer_loci = set(can_gene_df[symbol_column].tolist())
return cancer_loci
def check_cancer_loci(coding_region_entry, cancer_locus_set):
cancer_locus_presence = 0
regions = coding_region_entry.split(';')
all_genes = []
for gene_set in regions:
for gene in gene_set.split(','):
all_genes.append(gene)
for gene in all_genes:
if gene in cancer_locus_set:
cancer_locus_presence = 1
return cancer_locus_presence
def load_non_cancer_SRA_jxs(snap_in, min_samps, min_reads, overall_set_count):
"""
:param snap_in:
:return:
"""
sra_adult_jxs = set()
sra_dev_jxs = set()
sra_sc_jxs = set()
sra_emb_all = set()
sra_fet = set()
sra_zyg = set()
sra_oo = set()
sra_plc = set()
sra_emb_ect = set()
sra_emb_emb = set()
sra_emb_late = set()
sra_emb_mes = set()
sra_emb_myo = set()
dev_sets = [
sra_dev_jxs, sra_emb_all, sra_fet, sra_zyg, sra_oo, sra_plc,
sra_emb_ect, sra_emb_emb, sra_emb_late, sra_emb_mes, sra_emb_myo
]
if not snap_in.endswith('.txt'):
txt_path = os.path.join(snap_in, '*rawresults*.txt')
ont_files = glob.glob(txt_path)
else:
ont_files = [snap_in]
adult_samp_counts = {}
sc_samp_counts = {}
dev_samp_counts = {}
for ont in ont_files:
name_tag = os.path.basename(ont).split('.')[0]
name_tag = name_tag.split('_rawresults')[0]
try:
name_tag = name_tag.split('metaSRA-runs_')[1]
except IndexError:
pass
with open(ont) as lines:
jxs = snaptron_results_to_jxs(
lines, min_sample_count=min_samps, min_read_count=min_reads
)
if name_tag in _SRA_ADULT:
sra_adult_jxs.update(jxs)
for jx, samp_count in jxs.items():
try:
adult_samp_counts[jx] += samp_count
except KeyError:
adult_samp_counts[jx] = samp_count
if name_tag in _SRA_DEV:
sra_dev_jxs.update(jxs)
for jx, samp_count in jxs.items():
try:
dev_samp_counts[jx] += samp_count
except KeyError:
dev_samp_counts[jx] = samp_count
if name_tag in _SRA_STEMCELLS:
sra_sc_jxs.update(jxs)
for jx, samp_count in jxs.items():
try:
sc_samp_counts[jx] += samp_count
except KeyError:
sc_samp_counts[jx] = samp_count
if name_tag in jx_util._SRA_EMB_ALL:
sra_emb_all.update(jxs)
if name_tag in jx_util._SRA_FET:
sra_fet.update(jxs)
if name_tag in jx_util._SRA_ZYG:
sra_zyg.update(jxs)
if name_tag in jx_util._SRA_OO:
sra_oo.update(jxs)
if name_tag in jx_util._SRA_PLC:
sra_plc.update(jxs)
if name_tag in jx_util._SRA_EMB_ECT:
sra_emb_ect.update(jxs)
if name_tag in jx_util._SRA_EMB_EMB:
sra_emb_emb.update(jxs)
if name_tag in jx_util._SRA_EMB_LATE:
sra_emb_late.update(jxs)
if name_tag in jx_util._SRA_EMB_MES:
sra_emb_mes.update(jxs)
if name_tag in jx_util._SRA_EMB_MYO:
sra_emb_myo.update(jxs)
uncategorized_jxs = set()
if overall_set_count > 1:
for jx, samp_count in adult_samp_counts.items():
if samp_count < overall_set_count:
sra_adult_jxs.remove(jx)
uncategorized_jxs.update([jx])
for jx, samp_count in sc_samp_counts.items():
if samp_count < overall_set_count:
sra_sc_jxs.remove(jx)
uncategorized_jxs.update([jx])
dev_removals = set()
for jx, samp_count in dev_samp_counts.items():
if samp_count < overall_set_count:
dev_removals.update([jx])
uncategorized_jxs.update([jx])
for dev_set in dev_sets:
dev_set.difference_update(dev_removals)
jx_sets = [
sra_adult_jxs, sra_dev_jxs, sra_sc_jxs, sra_emb_all, sra_fet, sra_zyg,
sra_oo, sra_plc, sra_emb_ect, sra_emb_emb, sra_emb_late, sra_emb_mes,
sra_emb_myo, uncategorized_jxs
]
return jx_sets
def load_cancer_SRA_jxs(sra_can):
txt_path = os.path.join(sra_can, '*rawresults*.txt')
sra_can_files = glob.glob(txt_path)
sra_can_jxs = set()
for file in sra_can_files:
with open(file) as lines:
jxs = set(
snaptron_results_to_jxs(
lines, min_sample_count=min_samps, min_read_count=min_reads
)
)
sra_can_jxs = sra_can_jxs.union(jxs)
return sra_can_jxs
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Check TCGA junctions for developmental tissue evidence.'
)
parser.add_argument(
'--snaptron-results',
help='.txt file containing junction results from a previous snaptron '
'search, or directory containing multiple of these.'
)
parser.add_argument(
'--output-path', '-o', default='./',
help='give path for output set membership files. subdirectories for '
'developmental and unexplained junctions will be created within '
'this directory.'
)
parser.add_argument(
'--db-path', default='./',
help='give the path for storing the created sql database.'
)
parser.add_argument(
'--log-level', '-l', default='INFO', choices=['INFO'],
help='choose what logging mode to run (only INFO currently supported)'
)
parser.add_argument(
'--database-junction-directory', '-d',
help='Specify a directory containing .csv files with junctions '
'extracted via a jx_indexer query, each containing prevalence '
'values for one cancer type.'
)
parser.add_argument(
'--nongtex-junctions-directory', '-g',
help='Specify a directory containing .csv files of cancer type '
'specific junctions that are not present in GTEx.'
)
parser.add_argument(
'--nonpaired-junctions-directory', '-p',
help='Specify a directory containing .csv files of cancer type '
'specific junctions that are not present in the paired normal '
'tissue type(s) on GTEx.'
)
parser.add_argument(
'--gtf-file',
help='gtf file containing GENCODE annotation.'
)
parser.add_argument(
'--min-SRA-sample-top_x', default=1, type=int,
help='provide the minimum number of samples a junction must occur in '
'in a given SRA sample type cohort to top_x as "in" the cohort.'
)
parser.add_argument(
'--min-SRA-read-top_x', default=1, type=int,
help='provide the minimum read coverage sum a junction must have in a '
'given SRA sample type cohort to top_x as "in" the cohort.'
)
parser.add_argument(
'--single-read-jx-json', required=True,
help='Give the json file containing the list of single-read TCGA jxs, '
'created by collect_1-read_jxs.py.'
)
parser.add_argument(
'--cancer-sra-directory',
help='Specify the directory containing snaptron results for junctions '
'from SRA cancer samples.'
)
parser.add_argument(
'--cancer-gene-census',
help='Provide the file with COSMIC cancer gene census data, '
'cancer_gene_census.tsv'
)
parser.add_argument(
'--oncokb-cancer-genes',
help='Provide the file containing the onco-KB cancer gene list.'
)
parser.add_argument(
'--min-overall-set-top_x', default=1, type=int,
help='provide the minimum number of samples a junction must occur in '
'in a broad SRA category (e.g. adult, stem cell, devlopmental) '
'for membership in that category.'
)
args = parser.parse_args()
snap_results = args.snaptron_results
out_path = args.output_path
db_path = args.db_path
log_mode = args.log_level
jx_dir = args.database_junction_directory
nongtex_dir = args.nongtex_junctions_directory
nonpair_dir = args.nonpaired_junctions_directory
gtf_path = args.gtf_file
min_samps = args.min_SRA_sample_count
min_reads = args.min_SRA_read_count
single_read_file = args.single_read_jx_json
sra_can = args.cancer_sra_directory
cancer_census = args.cancer_gene_census
oncokb = args.oncokb_cancer_genes
overall_set_count = args.min_overall_set_count
now = datetime.now().strftime('%m-%d-%Y_%H.%M.%S')
log_file = os.path.join(
out_path, 'piechart_annotation_log_{}.txt'.format(now)
)
logging.basicConfig(filename=log_file, level=log_mode)
logging.info('input is: {}'.format(' '.join(sys.argv)))
dev_dir = os.path.join(out_path, 'developmental')
os.makedirs(dev_dir, exist_ok=True)
unexpl_dir = os.path.join(out_path, 'unexplained')
os.makedirs(unexpl_dir, exist_ok=True)
# load junctions with only 1 read in TCGA
with open(single_read_file) as recovered_data:
single_read_jxs = json.load(recovered_data)
single_read_jxs = set(single_read_jxs)
# create connection to the database and pull junction-id info
try:
db_name = os.path.join(db_path, 'new_jx_index.db')
except sql.OperationalError:
print('If OperationalError is "unable to open database file":')
print('make sure -d gives the PATH to the database directory,')
print('not the database itself.')
raise sql.OperationalError
conn = sql.connect(db_name)
index_db = conn.cursor()
jx_id_map = create_jx_id_map(conn)
# prepare annotation dictionaries from .gtf file
coding_regions = gtf_to_cds(gtf_path)
logging.info('coding regions discovered')
CDS_interval_tree = cds_to_tree(coding_regions)
logging.info('CDS tree created')
antisense_interval_tree = cds_to_antisense(coding_regions)
logging.info('antisense region tree created')
id_name_dict = make_id_name_dict(gtf_path)
jx_annotations = extract_splice_sites(gtf_path)
# collect cancer genes
cancer_loci = set()
cancer_loci.update(collect_cancer_loci(
cancer_census, symbol_column='Gene Symbol', separator=','
))
cancer_loci.update(collect_cancer_loci(
oncokb, symbol_column='Hugo Symbol', separator='\t'
))
# Load SRA junctions
jx_sets = load_non_cancer_SRA_jxs(
snap_results, min_samps, min_reads, overall_set_count
)
sra_adult_jxs, sra_dev_jxs, sra_sc_jxs, sra_emb_all, sra_fet, sra_zyg, \
sra_oo, sra_plc, sra_emb_ect, sra_emb_emb, sra_emb_late, sra_emb_mes, \
sra_emb_myo, uncategorized_jxs = jx_sets
sra_can_jxs = load_cancer_SRA_jxs(sra_can)
for cancer in _TCGA_CANCER_TYPES:
annotated_file = os.path.join(
out_path,
'{}_piechart_annotation_min_{}_samples.csv'
''.format(cancer, overall_set_count)
)
if os.path.exists(annotated_file):
continue
logging.info('starting {}'.format(cancer))
all_jxs_name = '{}_all_jxs*.csv'.format(cancer)
file = glob.glob(os.path.join(jx_dir, all_jxs_name))[0]
jx_df = jx_df_from_file(
file, 0.0, 1.0, chunk_it=True, glob_form=all_jxs_name,
sample=False, top_x=False, drop_ann=False
)
jx_df['gencode'] = jx_df.annotation.apply(lambda x: x == 3).astype(int)
# Load and annotate non-GTEx jxs
nongtex_name = '{}_all_neojxs*non_GTEx*.txt'.format(cancer)
nongtex_file = glob.glob(os.path.join(nongtex_dir, nongtex_name))[0]
nongtex_df = pd.read_table(nongtex_file, sep=',').fillna(0)
if 'jx' not in nongtex_df.columns.values:
nongtex_df['jx'] = nongtex_df.jx_id.apply(lambda x: jx_id_map[x])
nongtex_jxs = set(nongtex_df['jx'].tolist())
jx_df['gtex'] = jx_df.jx.apply(
lambda x: x not in nongtex_jxs
).astype(int)
# Load and annotate non-tissue-matched jxs
nonpair_name = '{}_all_neojxs*non_paired_normal*.txt'.format(cancer)
nonpair_file = glob.glob(os.path.join(nonpair_dir, nonpair_name))[0]
nonpair_df = pd.read_table(nonpair_file, sep=',').fillna(0)
if 'jx' not in nonpair_df.columns.values:
nonpair_df['jx'] = nonpair_df.jx_id.apply(lambda x: jx_id_map[x])
nonpair_jxs = set(nonpair_df['jx'].tolist())
jx_df['paired'] = jx_df.jx.apply(
lambda x: x not in nonpair_jxs
).astype(int)
# Annotate all SRA cell types
jx_df['sra_stemcells'] = jx_df.jx.apply(
lambda x: x in sra_sc_jxs
).astype(int)
jx_df['sra_developmental'] = jx_df.jx.apply(
lambda x: x in sra_dev_jxs
).astype(int)
jx_df['sra_adult'] = jx_df.jx.apply(
lambda x: x in sra_adult_jxs
).astype(int)
jx_df['sra_embryo_all'] = jx_df.jx.apply(
lambda x: x in sra_emb_all
).astype(int)
jx_df['sra_embryo_ectoderm'] = jx_df.jx.apply(
lambda x: x in sra_emb_ect
).astype(int)
jx_df['sra_embryo_embryo'] = jx_df.jx.apply(
lambda x: x in sra_emb_emb
).astype(int)
jx_df['sra_embryo_lateembryo'] = jx_df.jx.apply(
lambda x: x in sra_emb_late
).astype(int)
jx_df['sra_embryo_mesenchyme'] = jx_df.jx.apply(
lambda x: x in sra_emb_mes
).astype(int)
jx_df['sra_embryo_myoblast'] = jx_df.jx.apply(
lambda x: x in sra_emb_myo
).astype(int)
jx_df['sra_neonate_fetal'] = jx_df.jx.apply(
lambda x: x in sra_fet
).astype(int)
jx_df['sra_zygote'] = jx_df.jx.apply(
lambda x: x in sra_zyg
).astype(int)
jx_df['sra_oocyte'] = jx_df.jx.apply(
lambda x: x in sra_oo
).astype(int)
jx_df['sra_placenta'] = jx_df.jx.apply(
lambda x: x in sra_plc
).astype(int)
# Remove single-read TCGA jxs
poss_unexpl = jx_df[
(jx_df.gtex == 0) &
(jx_df.sra_stemcells == 0) &
(jx_df.sra_adult == 0) &
(jx_df.sra_developmental == 0)
]
logging.info('init unexplained: {}'.format(len(poss_unexpl)))
init_unexpl_count = len(poss_unexpl)
poss_unexpl = poss_unexpl[~poss_unexpl.jx.isin(sra_can_jxs)]
logging.info('removing sra cancer jxs: {}'.format(len(poss_unexpl)))
poss_unexpl = poss_unexpl[~poss_unexpl.jx.isin(uncategorized_jxs)]
logging.info(
'removing uncategorized sra jxs: {}'.format(len(poss_unexpl))
)
jxs_to_kill = set(
poss_unexpl[poss_unexpl.jx.isin(single_read_jxs)]['jx'].tolist()
)
logging.info(
'{} of these have 1 TCGA read and will be killed:'
''.format(len(jxs_to_kill))
)
jx_df = jx_df[~jx_df.jx.isin(jxs_to_kill)]
# Add other annotations
jx_df['coding_regions'] = jx_df.jx.apply(
lambda x: jx_gene_overlap(x, CDS_interval_tree, id_name_dict)
)
jx_df['antisense_regions'] = jx_df.jx.apply(
lambda x: jx_gene_overlap(x, antisense_interval_tree, id_name_dict)
)
jx_df['sense'] = (jx_df.coding_regions != ';').astype(int)
jx_df['antisense'] = (jx_df.antisense_regions != ';').astype(int)
jx_df['cancer_locus'] = jx_df.coding_regions.apply(
lambda x: check_cancer_loci(x, cancer_loci)
)
with open(annotated_file, 'w') as output:
jx_df.to_csv(output, index=False)
# select and save developmental jxs
developmental_df = jx_df[
(jx_df.gtex == 0) &
(jx_df.sra_adult == 0) &
(jx_df.sra_stemcells == 0) &
(jx_df.sra_developmental == 1)
]
dev_file = os.path.join(
dev_dir,
'{}_piechart_annotation_developmental_min_{}_samples.csv'
''.format(cancer, overall_set_count)
)
with open(dev_file, 'w') as output:
developmental_df.to_csv(output, index=False)
# select and save unexplained jxs
unexplained_df = jx_df[
(jx_df.gtex == 0) &
(jx_df.sra_stemcells == 0) &
(jx_df.sra_adult == 0) &
(jx_df.sra_developmental == 0)
]
logging.info('{} final unexplained jxs'.format(len(unexplained_df)))
logging.info(
'this is {}% of the originals'
''.format(len(unexplained_df) / init_unexpl_count)
)
unexpl_file = os.path.join(
unexpl_dir,
'{}_piechart_annotation_unexplained_min_{}_samples.csv'
''.format(cancer, overall_set_count)
)
with open(unexpl_file, 'w') as output:
unexplained_df.to_csv(output, index=False)
|
'''
Manage Azure Resource Manager deployments.
'''
from ... pyaz_utils import _call_az
from . import operation
def create(resource_group, aux_subs=None, aux_tenants=None, handle_extended_json_format=None, mode=None, name=None, no_prompt=None, no_wait=None, parameters=None, rollback_on_error=None, template_file=None, template_uri=None):
'''
Start a deployment.
Required Parameters:
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- aux_subs -- Auxiliary subscriptions which will be used during deployment across tenants.
- aux_tenants -- Auxiliary tenants which will be used during deployment across tenants.
- handle_extended_json_format -- Support to handle extended template content including multiline and comments in deployment
- mode -- Incremental (only add resources to resource group) or Complete (remove extra resources from resource group)
- name -- The deployment name. Default to template file base name
- no_prompt -- The option to disable the prompt of missing parameters for ARM template. When the value is true, the prompt requiring users to provide missing parameter will be ignored. The default value is false.
- no_wait -- Do not wait for the long-running operation to finish.
- parameters -- the deployment parameters
- rollback_on_error -- The name of a deployment to roll back to on error, or use as a flag to roll back to the last successful deployment.
- template_file -- a path to a template file or Bicep file in the file system
- template_uri -- a uri to a remote template file
'''
return _call_az("az group deployment create", locals())
def list(resource_group, filter=None, top=None):
'''
Required Parameters:
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- filter -- The filter to apply on the operation. For example, you can use $filter=provisioningState eq '{state}'.
- top -- The number of results to get. If null is passed, returns all deployments.
'''
return _call_az("az group deployment list", locals())
def show(name, resource_group):
'''
Required Parameters:
- name -- The deployment name.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az group deployment show", locals())
def delete(name, resource_group, no_wait=None):
'''
Required Parameters:
- name -- The deployment name.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- no_wait -- Do not wait for the long-running operation to finish.
'''
return _call_az("az group deployment delete", locals())
def validate(resource_group, handle_extended_json_format=None, mode=None, no_prompt=None, parameters=None, rollback_on_error=None, template_file=None, template_uri=None):
'''
Validate whether a template is syntactically correct.
Required Parameters:
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- handle_extended_json_format -- Support to handle extended template content including multiline and comments in deployment
- mode -- Incremental (only add resources to resource group) or Complete (remove extra resources from resource group)
- no_prompt -- The option to disable the prompt of missing parameters for ARM template. When the value is true, the prompt requiring users to provide missing parameter will be ignored. The default value is false.
- parameters -- the deployment parameters
- rollback_on_error -- The name of a deployment to roll back to on error, or use as a flag to roll back to the last successful deployment.
- template_file -- a path to a template file or Bicep file in the file system
- template_uri -- a uri to a remote template file
'''
return _call_az("az group deployment validate", locals())
def export(name, resource_group):
'''
Export the template used for a deployment.
Required Parameters:
- name -- The deployment name.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az group deployment export", locals())
def wait(name, resource_group, created=None, custom=None, deleted=None, exists=None, interval=None, timeout=None, updated=None):
'''
Place the CLI in a waiting state until a deployment condition is met.
Required Parameters:
- name -- The deployment name.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- created -- wait until created with 'provisioningState' at 'Succeeded'
- custom -- Wait until the condition satisfies a custom JMESPath query. E.g. provisioningState!='InProgress', instanceView.statuses[?code=='PowerState/running']
- deleted -- wait until deleted
- exists -- wait until the resource exists
- interval -- polling interval in seconds
- timeout -- maximum wait in seconds
- updated -- wait until updated with provisioningState at 'Succeeded'
'''
return _call_az("az group deployment wait", locals())
def cancel(name, resource_group):
'''
Required Parameters:
- name -- The deployment name.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az group deployment cancel", locals())
|
import pymongo
myclient = pymongo.MongoClient("mongodb://localhost:27017/")
mydb = myclient["python"]
mycol = mydb["python_test"]
mydict = { "host": "devserver", "ip": "192.168.0.50" }
x = mycol.insert_one(mydict)
|
from rest_framework.common.entity import FileDTO
from rest_framework.common.services import Reader, Printer
import pandas as pd
import numpy as np
from sklearn import preprocessing
'''
์ด์ธ ๋ฐ์,์ด์ธ ๊ฒ๊ฑฐ,๊ฐ๋ ๋ฐ์,๊ฐ๋ ๊ฒ๊ฑฐ,๊ฐ๊ฐ ๋ฐ์,๊ฐ๊ฐ ๊ฒ๊ฑฐ,์ ๋ ๋ฐ์,์ ๋ ๊ฒ๊ฑฐ,ํญ๋ ฅ ๋ฐ์,ํญ๋ ฅ ๊ฒ๊ฑฐ
'''
class Service(Reader):
def __init__(self):
self.f = FileDTO()
self.r = Reader()
self.p = Printer()
self.crime_rate_columns = ['์ด์ธ๊ฒ๊ฑฐ์จ','๊ฐ๋๊ฒ๊ฑฐ์จ','๊ฐ๊ฐ๊ฒ๊ฑฐ์จ','์ ๋๊ฒ๊ฑฐ์จ','ํญ๋ ฅ๊ฒ๊ฑฐ์จ']
self.crime_columns = ['์ด์ธ','๊ฐ๋','๊ฐ๊ฐ','์ ๋','ํญ๋ ฅ']
def save_police_pos(self):
f = self.f
r = self.r
p = self.p
f.context = './data/'
f.fname = 'crime_in_seoul'
crime = r.csv(f)
# p.dframe(crime)
station_names = []
for name in crime['๊ด์๋ช
']:
station_names.append('์์ธ'+str(name[:-1]+'๊ฒฝ์ฐฐ์'))
station_addrs = []
station_lats = []
station_lngs = []
gmaps = r.gmaps()
for name in station_names:
t = gmaps.geocode(name, language='ko')
station_addrs.append(t[0].get('formatted_address'))
t_loc = t[0].get('geometry')
station_lats.append(t_loc['location']['lat'])
station_lngs.append(t_loc['location']['lng'])
# print(f'name{t[0].get("formatted_address")}')
gu_names = []
for name in station_addrs:
t = name.split()
gu_name = [gu for gu in t if gu[-1] == '๊ตฌ'][0]
gu_names.append(gu_name)
crime['๊ตฌ๋ณ'] = gu_names
# ๊ตฌ ์ ๊ฒฝ์ฐฐ์์ ์์น๊ฐ ๋ค๋ฅธ ๊ฒฝ์ฐ ์์์
crime.loc[crime['๊ด์๋ช
'] == 'ํํ์', ['๊ตฌ๋ณ']] == '์ข
๋ก๊ตฌ'
crime.loc[crime['๊ด์๋ช
'] == '์๋ถ์', ['๊ตฌ๋ณ']] == '์ํ๊ตฌ'
crime.loc[crime['๊ด์๋ช
'] == '๊ฐ์์', ['๊ตฌ๋ณ']] == '์์ฒ๊ตฌ'
crime.loc[crime['๊ด์๋ช
'] == '์ข
์์', ['๊ตฌ๋ณ']] == '์ฑ๋ถ๊ตฌ'
crime.loc[crime['๊ด์๋ช
'] == '๋ฐฉ๋ฐฐ์', ['๊ตฌ๋ณ']] == '์์ด๊ตฌ'
crime.loc[crime['๊ด์๋ช
'] == '์์์', ['๊ตฌ๋ณ']] == '๊ฐ๋จ๊ตฌ'
crime.to_csv('./saved_data/police_pos.csv')
def save_cctv_pop(self):
f = self.f
r = self.r
p = self.p
f.context = './data/'
f.fname = 'cctv_in_seoul'
cctv = r.csv(f)
# p.dframe(cctv)
f.fname = 'pop_in_seoul'
pop = r.xls(f, 2, 'B, D, G, J, N')
# p.dframe(pop)
cctv.rename(columns={cctv.columns[0]:'๊ตฌ๋ณ'}, inplace=True)
pop.rename(columns={
pop.columns[0]:'๊ตฌ๋ณ',
pop.columns[1]: '์ธ๊ตฌ์',
pop.columns[2]: 'ํ๊ตญ์ธ',
pop.columns[3]: '์ธ๊ตญ์ธ',
pop.columns[4]: '๊ณ ๋ น์'
}, inplace=True)
print('*' * 100)
pop.drop([26], inplace=True)
print(pop)
pop['์ธ๊ตญ์ธ๋น์จ'] = pop['์ธ๊ตญ์ธ'].astype(int) / pop['์ธ๊ตฌ์'].astype(int) * 100
pop['๊ณ ๋ น์๋น์จ'] = pop['๊ณ ๋ น์'].astype(int) / pop['์ธ๊ตฌ์'].astype(int) * 100
cctv.drop(['2013๋
๋ ์ด์ ','2014๋
','2015๋
','2016๋
'],1, inplace=True)
cctv_pop = pd.merge(cctv, pop, on = '๊ตฌ๋ณ')
cor1 = np.corrcoef(cctv_pop['๊ณ ๋ น์๋น์จ'], cctv_pop['์๊ณ'])
cor2 = np.corrcoef(cctv_pop['์ธ๊ตญ์ธ๋น์จ'], cctv_pop['์๊ณ'])
print(f'๊ณ ๋ น์๋น์จ๊ณผ CCTV์ ์๊ด๊ณ์ {str(cor1)} \n'
f'์ธ๊ตญ์ธ๋น์จ๊ณผ CCTV์ ์๊ด๊ณ์ {str(cor2)} ')
"""
๊ณ ๋ น์๋น์จ๊ณผ CCTV ์ ์๊ด๊ณ์ [[ 1. -0.28078554]
[-0.28078554 1. ]]
์ธ๊ตญ์ธ๋น์จ๊ณผ CCTV ์ ์๊ด๊ณ์ [[ 1. -0.13607433]
[-0.13607433 1. ]]
r์ด -1.0๊ณผ -0.7 ์ฌ์ด์ด๋ฉด, ๊ฐํ ์์ ์ ํ๊ด๊ณ,
r์ด -0.7๊ณผ -0.3 ์ฌ์ด์ด๋ฉด, ๋๋ ทํ ์์ ์ ํ๊ด๊ณ,
r์ด -0.3๊ณผ -0.1 ์ฌ์ด์ด๋ฉด, ์ฝํ ์์ ์ ํ๊ด๊ณ,
r์ด -0.1๊ณผ +0.1 ์ฌ์ด์ด๋ฉด, ๊ฑฐ์ ๋ฌด์๋ ์ ์๋ ์ ํ๊ด๊ณ,
r์ด +0.1๊ณผ +0.3 ์ฌ์ด์ด๋ฉด, ์ฝํ ์์ ์ ํ๊ด๊ณ,
r์ด +0.3๊ณผ +0.7 ์ฌ์ด์ด๋ฉด, ๋๋ ทํ ์์ ์ ํ๊ด๊ณ,
r์ด +0.7๊ณผ +1.0 ์ฌ์ด์ด๋ฉด, ๊ฐํ ์์ ์ ํ๊ด๊ณ
๊ณ ๋ น์๋น์จ ๊ณผ CCTV ์๊ด๊ณ์ [[ 1. -0.28078554] ์ฝํ ์์ ์ ํ๊ด๊ณ
[-0.28078554 1. ]]
์ธ๊ตญ์ธ๋น์จ ๊ณผ CCTV ์๊ด๊ณ์ [[ 1. -0.13607433] ๊ฑฐ์ ๋ฌด์๋ ์ ์๋
[-0.13607433 1. ]]
"""
cctv_pop.to_csv('./saved_data/cctv_pop.csv')
def save_police_norm(self):
f = self.f
r = self.r
p = self.p
f.context = './saved_data/'
f.fname = 'police_pos'
police_pos = r.csv(f)
print(' ---- 0 ------')
print(police_pos.columns)
police = pd.pivot_table(police_pos, index='๊ตฌ๋ณ', aggfunc=np.sum)
print(' ---- 1 ------')
print(police.columns)
police['์ด์ธ๊ฒ๊ฑฐ์จ'] = (police['์ด์ธ ๊ฒ๊ฑฐ'].astype(int) / police['์ด์ธ ๋ฐ์'].astype(int)) * 100
police['๊ฐ๋๊ฒ๊ฑฐ์จ'] = (police['๊ฐ๋ ๊ฒ๊ฑฐ'].astype(int) / police['๊ฐ๋ ๋ฐ์'].astype(int)) * 100
police['๊ฐ๊ฐ๊ฒ๊ฑฐ์จ'] = (police['๊ฐ๊ฐ ๊ฒ๊ฑฐ'].astype(int) / police['๊ฐ๊ฐ ๋ฐ์'].astype(int)) * 100
police['์ ๋๊ฒ๊ฑฐ์จ'] = (police['์ ๋ ๊ฒ๊ฑฐ'].astype(int) / police['์ ๋ ๋ฐ์'].astype(int)) * 100
police['ํญ๋ ฅ๊ฒ๊ฑฐ์จ'] = (police['ํญ๋ ฅ ๊ฒ๊ฑฐ'].astype(int) / police['ํญ๋ ฅ ๋ฐ์'].astype(int)) * 100
print(f'type : {type(police)}')
police.drop(columns={'์ด์ธ ๊ฒ๊ฑฐ', '๊ฐ๋ ๊ฒ๊ฑฐ','๊ฐ๊ฐ ๊ฒ๊ฑฐ','์ ๋ ๊ฒ๊ฑฐ','ํญ๋ ฅ ๊ฒ๊ฑฐ'}, axis=1, inplace=True)
print(' ---- 2 ------')
print(police.columns)
for i in self.crime_rate_columns:
police.loc[police[i] > 100, 1] = 100 # ๋ฐ์ดํฐ๊ฐ์ ๊ธฐ๊ฐ ์ค๋ฅ๋ก 100์ ๋์ผ๋ฉด 100์ผ๋ก ๊ณ์ฐ
police.rename(columns={
'์ด์ธ ๋ฐ์': '์ด์ธ',
'๊ฐ๋ ๋ฐ์': '๊ฐ๋',
'๊ฐ๊ฐ ๋ฐ์': '๊ฐ๊ฐ',
'์ ๋ ๋ฐ์': '์ ๋',
'ํญ๋ ฅ ๋ฐ์': 'ํญ๋ ฅ'
} , inplace=True)
x = police[self.crime_rate_columns].values
min_max_scalar = preprocessing.MinMaxScaler()
"""
์ค์ผ์ผ๋ง์ ์ ํ๋ณํ์ ์ ์ฉํ์ฌ
์ ์ฒด ์๋ฃ์ ๋ถํฌ๋ฅผ ํ๊ท 0, ๋ถ์ฐ 1์ด ๋๋๋ก ๋ง๋๋ ๊ณผ์
"""
x_scaled = min_max_scalar.fit_transform(x.astype(float))
"""
์ ๊ทํ normalization
๋ง์ ์์ ๋ฐ์ดํฐ๋ฅผ ์ฒ๋ฆฌํจ์ ์์ด ๋ฐ์ดํฐ์ ๋ฒ์(๋๋ฉ์ธ)๋ฅผ ์ผ์น์ํค๊ฑฐ๋
๋ถํฌ(์ค์ผ์ผ)๋ฅผ ์ ์ฌํ๊ฒ ๋ง๋๋ ์์
"""
police_norm = pd.DataFrame(x_scaled, columns=self.crime_columns, index=police.index)
print(f'police_norm columns {police_norm.columns}')
police_norm[self.crime_rate_columns] = police[self.crime_rate_columns]
police_norm['๋ฒ์ฃ'] = np.sum(police_norm[self.crime_rate_columns], axis=1)
police_norm['๊ฒ๊ฑฐ'] = np.sum(police_norm[self.crime_columns], axis=1)
police_norm.to_csv('./saved_data/police_norm.csv', sep=',', encoding='UTF-8')
if __name__ == '__main__':
s = Service()
# s.save_police_pos()
# s.save_cctv_pop()
s.save_police_norm()
|
# -*-coding:utf-8-*-
from apps.core.flask.login_manager import osr_login_required
from apps.core.blueprint import api
from apps.core.flask.permission import permission_required, permissions
from apps.core.flask.response import response_format
from apps.modules.user.process.email import email_update
__author__ = "Allen Woo"
@api.route('/account/email', methods=['PUT'])
@osr_login_required
@permission_required(permissions(["USER"]))
def api_account_email():
'''
PUT
่ดฆๆท้ฎไปถไฟฎๆน
email:<email>, ่ฆ็ปๅฎ็ๆฐ้ฎ็ฎฑ
new_email_code:<str>, ๆฐ้ฎ็ฎฑๆถๅๅฐ็้ช่ฏ็ ,็จไบไฟ่ฏ็ปๅฎ็้ฎ็ฎฑๆถ็จๆท่ชๅทฑ็
current_email_code:<str>, ๅฝๅ้ฎ็ฎฑๆถๅ็้ช่ฏ็ ,็จไบไฟ่ฏ้ฎ็ฎฑไฟฎๆนๆฏ็จๆท่ชๅทฑๅ่ตท
password:<str>, ่ดฆๆท็็ปๅฝๅฏ็
:return:
'''
data = email_update()
return response_format(data) |
import numpy as np
import sys
M = int(sys.argv[1])
K = int(sys.argv[2])
N = int(sys.argv[3])
int8 = True if int(sys.argv[4]) == 1 else False
if int8:
#scale = np.random.normal(size=(M)).astype(np.float32)
scale = np.ones((M)).astype(np.float32)
else:
scale = 1
bias = np.random.normal(size=(M))
#bias = np.zeros((M))
AB = 1 + np.abs(np.random.normal(size=(K,M)).astype(np.float32) * 3)
BLOCK = int(sys.argv[5])
locs = [i for i in range(M* K) if i %BLOCK == 0]
zero_locs = np.random.choice(M*K//BLOCK, M * K // BLOCK // 10 * 9,replace=False) * BLOCK
for i in range(BLOCK):
indices0 = np.unravel_index(zero_locs + i,(K,M))
AB[indices0] = 0
#
AB = AB.transpose().copy()
#mask = (AB > 0) * 3
#AB = AB - mask
#print(AB)
#AB = AB * (AB > 2.7)
#BC = np.random.normal(size=(K,N)).astype(np.float32)
BC = np.ones((K,N)).astype(np.float32)
for i in range(K):
BC[i] = np.random.randint(2)
if int8:
AB = AB.astype(np.int8)
BC = BC.astype(np.uint8)
bias = bias.astype(np.int32)
AB = -AB
print("density",np.count_nonzero(AB) / M/ K)
AC = np.dot(AB,BC) + np.expand_dims(bias,1)
AC = AC.astype(np.float32) * np.expand_dims(scale,1)
if int8:
#AC = AC.astype(np.int32)
AC = AC.astype(np.int8)
if int8:
np.save("bias.npy",bias)
else:
np.save("bias.npy",bias.astype(np.float32))
np.save("matrix.npy",AB)
if int8:
np.save("scale.npy",scale)
np.save("matrix_transposed.npy",AB.transpose())
np.save("BC.npy",BC)
np.save("ref.npy",AC )
|
"""
Copyright 2013 Steven Diamond
This file is part of CVXPY.
CVXPY is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
CVXPY is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with CVXPY. If not, see <http://www.gnu.org/licenses/>.
"""
import cvxpy.settings as s
import cvxpy.interface as intf
from cvxpy.error import SolverError
import cvxpy.lin_ops.lin_utils as lu
from cvxpy.lin_ops.lin_op import VARIABLE
import cvxpy.utilities.performance_utils as pu
from cvxpy.constraints.nonlinear import NonlinearConstraint
from cvxpy.constraints.utilities import format_elemwise
import cvxopt
import math
class ExpCone(NonlinearConstraint):
"""A reformulated exponential cone constraint.
Operates elementwise on x, y, z.
Original cone:
K = {(x,y,z) | y > 0, ye^(x/y) <= z}
U {(x,y,z) | x <= 0, y = 0, z >= 0}
Reformulated cone:
K = {(x,y,z) | y, z > 0, y * log(y) + x <= y * log(z)}
U {(x,y,z) | x <= 0, y = 0, z >= 0}
Attributes
----------
x: Variable x in the exponential cone.
y: Variable y in the exponential cone.
z: Variable z in the exponential cone.
"""
CVXOPT_DENSE_INTF = intf.get_matrix_interface(cvxopt.matrix)
CVXOPT_SPARSE_INTF = intf.get_matrix_interface(cvxopt.spmatrix)
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
self.size = self.x.size
super(ExpCone, self).__init__(self._solver_hook,
[self.x, self.y, self.z])
def __str__(self):
return "ExpCone(%s, %s, %s)" % (self.x, self.y, self.z)
def format(self, eq_constr, leq_constr, dims, solver):
"""Formats EXP constraints for the solver.
Parameters
----------
eq_constr : list
A list of the equality constraints in the canonical problem.
leq_constr : list
A list of the inequality constraints in the canonical problem.
dims : dict
A dict with the dimensions of the conic constraints.
solver : str
The solver being called.
"""
if solver.name() == s.CVXOPT:
eq_constr += self.__CVXOPT_format[0]
elif solver.name() == s.SCS:
leq_constr += self.__SCS_format[1]
else:
raise SolverError("Solver does not support exponential cone.")
# Update dims.
dims[s.EXP_DIM] += self.size[0]*self.size[1]
@pu.lazyprop
def __SCS_format(self):
return ([], format_elemwise([self.x, self.y, self.z]))
@pu.lazyprop
def __CVXOPT_format(self):
constraints = []
for i, var in enumerate(self.vars_):
if not var.type is VARIABLE:
lone_var = lu.create_var(var.size)
constraints.append(lu.create_eq(lone_var, var))
self.vars_[i] = lone_var
return (constraints, [])
def _solver_hook(self, vars_=None, scaling=None):
"""A function used by CVXOPT's nonlinear solver.
Based on f(x,y,z) = y * log(y) + x - y * log(z).
Parameters
----------
vars_: A cvxopt dense matrix with values for (x,y,z).
scaling: A scaling for the Hessian.
Returns
-------
_solver_hook() returns the constraint size and a feasible point.
_solver_hook(x) returns the function value and gradient at x.
_solver_hook(x, z) returns the function value, gradient,
and (z scaled) Hessian at x.
"""
entries = self.size[0]*self.size[1]
if vars_ is None:
x_init = entries*[0.0]
y_init = entries*[0.5]
z_init = entries*[1.0]
return self.size[0], cvxopt.matrix(x_init + y_init + z_init)
# Unpack vars_
x = vars_[0:entries]
y = vars_[entries:2*entries]
z = vars_[2*entries:]
# Out of domain.
# TODO what if y == 0.0?
if min(y) <= 0.0 or min(z) <= 0.0:
return None
# Evaluate the function.
f = self.CVXOPT_DENSE_INTF.zeros(entries, 1)
for i in range(entries):
f[i] = x[i] - y[i]*math.log(z[i]) + y[i]*math.log(y[i])
# Compute the gradient.
Df = self.CVXOPT_DENSE_INTF.zeros(entries, 3*entries)
for i in range(entries):
Df[i, i] = 1.0
Df[i, entries+i] = math.log(y[i]) - math.log(z[i]) + 1.0
Df[i, 2*entries+i] = -y[i]/z[i]
if scaling is None:
return f, Df
# Compute the Hessian.
big_H = self.CVXOPT_SPARSE_INTF.zeros(3*entries, 3*entries)
for i in range(entries):
H = cvxopt.matrix([
[0.0, 0.0, 0.0],
[0.0, 1.0/y[i], -1.0/z[i]],
[0.0, -1.0/z[i], y[i]/(z[i]**2)],
])
big_H[i:3*entries:entries, i:3*entries:entries] = scaling[i]*H
return f, Df, big_H
|
import torch
from torch.optim import Optimizer
from meta_learning.backend.pytorch.optimizer.memory_static import MemoryStatic
class MetaSGD(Optimizer):
"""Implements SGD with meta learning algorithm.
params (iterable): iterable of parameters to optimize or dicts defining
parameter groups
lr (float, optional): learning rate (default: 1e-3)
betas (Tuple[float, float], optional): coefficients used for computing
running averages of gradient and its square (default: (0.9, 0.999))
eps (float, optional): term added to the denominator to improve
numerical stability (default: 1e-8)
weight_decay (float, optional): weight decay (L2 penalty) (default: 0)
"""
def __init__(self, params, use_memory=True, lr=1e-3,
betas=(0.9, 0.999), eps=1e-8,
weight_decay=0):
defaults = dict(lr=lr, betas=betas, eps=eps,
weight_decay=weight_decay)
super(MetaSGD, self).__init__(params, defaults)
self.clip_grad = 2
self.use_memory = use_memory
def step(self, closure=None):
"""Performs a single optimization step.
Arguments:
closure (callable, optional): A closure that reevaluates the model
and returns the loss.
"""
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
for p in group['params']:
if p.grad is None:
continue
p.grad.data.clamp_(min=-self.clip_grad,
max=self.clip_grad)
grad = p.grad.data
state = self.state[p]
# State initialization
if len(state) == 0:
state['step'] = 0
state['prev_grad'] = torch.FloatTensor(grad.size())
if self.use_memory:
state['memory'] = MemoryStatic(self.clip_grad,
20,
0.1,
0.01,
group['lr'],
True)
if self.use_memory:
memory = state['memory']
if state['step'] > 0:
prev_grad = state['prev_grad']
memory.update_memory(prev_grad.view(-1), grad.view(-1))
lr = memory.compute_eta(grad.view(-1))
scaled_grad = lr.view(grad.size()) * grad
state['prev_grad'].copy_(grad)
else:
lr = group['lr']
scaled_grad = lr * grad
p.data.add_(-1.0, scaled_grad)
state['step'] += 1
return loss
class MetaAdam(Optimizer):
"""Implements Adam algorithm.
It has been proposed in `Adam: A Method for Stochastic Optimization`_.
Arguments:
params (iterable): iterable of parameters to optimize or dicts defining
parameter groups
lr (float, optional): learning rate (default: 1e-3)
betas (Tuple[float, float], optional): coefficients used for computing
running averages of gradient and its square (default: (0.9, 0.999))
eps (float, optional): term added to the denominator to improve
numerical stability (default: 1e-8)
weight_decay (float, optional): weight decay (L2 penalty) (default: 0)
.. _Adam\: A Method for Stochastic Optimization:
https://arxiv.org/abs/1412.6980
"""
def __init__(self, params, use_memory=True, lr=1e-3,
betas=(0.9, 0.999), eps=1e-8,
weight_decay=0):
defaults = dict(lr=lr, betas=betas, eps=eps,
weight_decay=weight_decay)
super(MetaSGD, self).__init__(params, defaults)
self.clip_grad = 2
self.use_memory = use_memory
def step(self, closure=None):
"""Performs a single optimization step.
Arguments:
closure (callable, optional): A closure that reevaluates the model
and returns the loss.
"""
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
for p in group['params']:
if p.grad is None:
continue
p.grad.data.clamp_(min=-self.clip_grad,
max=self.clip_grad)
grad = p.grad.data
state = self.state[p]
# State initialization
if len(state) == 0:
state['step'] = 0
state['prev_grad'] = torch.FloatTensor(grad.size())
if self.use_memory:
state['memory'] = MemoryStatic(self.clip_grad,
20,
0.1,
0.01,
group['lr'],
True)
if self.use_memory:
memory = state['memory']
if state['step'] > 0:
prev_grad = state['prev_grad']
memory.update_memory(prev_grad.view(-1), grad.view(-1))
lr = memory.compute_eta(grad.view(-1))
scaled_grad = lr.view(grad.size()) * grad
state['prev_grad'].copy_(grad)
else:
lr = group['lr']
scaled_grad = lr * grad
p.data.add_(-1.0, scaled_grad)
state['step'] += 1
return loss
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.