code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: CAM-resnet.py
import cv2
import sys
import argparse
import numpy as np
import os
import multiprocessing
import tensorflow as tf
from tensorpack import *
from tensorpack.dataflow import dataset
from tensorpack.tfutils import optimizer, gradproc
from tensorpack.tfutils.symbolic_functions import *
from tensorpack.tfutils.summary import *
from tensorpack.utils.gpu import get_num_gpu
from tensorpack.utils import viz
from imagenet_utils import (
fbresnet_augmentor, ImageNetModel)
from resnet_model import (
preresnet_basicblock, preresnet_group)
TOTAL_BATCH_SIZE = 256
DEPTH = None
class Model(ImageNetModel):
def get_logits(self, image):
cfg = {
18: ([2, 2, 2, 2], preresnet_basicblock),
34: ([3, 4, 6, 3], preresnet_basicblock),
}
defs, block_func = cfg[DEPTH]
with argscope(Conv2D, use_bias=False,
kernel_initializer=tf.variance_scaling_initializer(scale=2.0, mode='fan_out')), \
argscope([Conv2D, MaxPooling, GlobalAvgPooling, BatchNorm], data_format='channels_first'):
convmaps = (LinearWrap(image)
.Conv2D('conv0', 64, 7, strides=2, activation=BNReLU)
.MaxPooling('pool0', 3, strides=2, padding='SAME')
.apply2(preresnet_group, 'group0', block_func, 64, defs[0], 1)
.apply2(preresnet_group, 'group1', block_func, 128, defs[1], 2)
.apply2(preresnet_group, 'group2', block_func, 256, defs[2], 2)
.apply2(preresnet_group, 'group3new', block_func, 512, defs[3], 1)())
print(convmaps)
convmaps = GlobalAvgPooling('gap', convmaps)
logits = FullyConnected('linearnew', convmaps, 1000)
return logits
def optimizer(self):
lr = tf.get_variable('learning_rate', initializer=0.1, trainable=False)
opt = tf.train.MomentumOptimizer(lr, 0.9, use_nesterov=True)
gradprocs = [gradproc.ScaleGradient(
[('conv0.*', 0.1), ('group[0-2].*', 0.1)])]
return optimizer.apply_grad_processors(opt, gradprocs)
def get_data(train_or_test):
# completely copied from imagenet-resnet.py example
isTrain = train_or_test == 'train'
datadir = args.data
ds = dataset.ILSVRC12(datadir, train_or_test, shuffle=isTrain)
augmentors = fbresnet_augmentor(isTrain)
augmentors.append(imgaug.ToUint8())
ds = AugmentImageComponent(ds, augmentors, copy=False)
if isTrain:
ds = PrefetchDataZMQ(ds, min(25, multiprocessing.cpu_count()))
ds = BatchData(ds, BATCH_SIZE, remainder=not isTrain)
return ds
def get_config():
dataset_train = get_data('train')
dataset_val = get_data('val')
return TrainConfig(
model=Model(),
dataflow=dataset_train,
callbacks=[
ModelSaver(),
PeriodicTrigger(InferenceRunner(dataset_val, [
ClassificationError('wrong-top1', 'val-error-top1'),
ClassificationError('wrong-top5', 'val-error-top5')]),
every_k_epochs=2),
ScheduledHyperParamSetter('learning_rate',
[(30, 1e-2), (55, 1e-3), (75, 1e-4), (95, 1e-5)]),
],
steps_per_epoch=5000,
max_epoch=105,
)
def viz_cam(model_file, data_dir):
ds = get_data('val')
pred_config = PredictConfig(
model=Model(),
session_init=get_model_loader(model_file),
input_names=['input', 'label'],
output_names=['wrong-top1', 'group3new/bnlast/Relu', 'linearnew/W'],
return_input=True
)
meta = dataset.ILSVRCMeta().get_synset_words_1000()
pred = SimpleDatasetPredictor(pred_config, ds)
cnt = 0
for inp, outp in pred.get_result():
images, labels = inp
wrongs, convmaps, W = outp
batch = wrongs.shape[0]
for i in range(batch):
if wrongs[i]:
continue
weight = W[:, [labels[i]]].T # 512x1
convmap = convmaps[i, :, :, :] # 512xhxw
mergedmap = np.matmul(weight, convmap.reshape((512, -1))).reshape(14, 14)
mergedmap = cv2.resize(mergedmap, (224, 224))
heatmap = viz.intensity_to_rgb(mergedmap, normalize=True)
blend = images[i] * 0.5 + heatmap * 0.5
concat = np.concatenate((images[i], heatmap, blend), axis=1)
classname = meta[labels[i]].split(',')[0]
cv2.imwrite('cam{}-{}.jpg'.format(cnt, classname), concat)
cnt += 1
if cnt == 500:
return
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', help='comma separated list of GPU(s) to use.')
parser.add_argument('--data', help='ILSVRC dataset dir')
parser.add_argument('--depth', type=int, default=18)
parser.add_argument('--load', help='load model')
parser.add_argument('--cam', action='store_true', help='run visualization')
args = parser.parse_args()
DEPTH = args.depth
if args.gpu:
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu
num_gpu = get_num_gpu()
BATCH_SIZE = TOTAL_BATCH_SIZE // num_gpu
if args.cam:
BATCH_SIZE = 128 # something that can run on one gpu
viz_cam(args.load, args.data)
sys.exit()
logger.auto_set_dir()
config = get_config()
if args.load:
config.session_init = get_model_loader(args.load)
launch_train_with_config(config, SyncMultiGPUTrainerParameterServer(num_gpu))
|
eyaler/tensorpack
|
examples/Saliency/CAM-resnet.py
|
Python
|
apache-2.0
| 5,641
|
# coding=utf-8
# Copyright 2018 The DisentanglementLib Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for relational_layers.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from disentanglement_lib.evaluation.abstract_reasoning import relational_layers
import numpy as np
import tensorflow.compat.v1 as tf
def _create_positional_encoding_matrices():
"""Shared input/output pair for the positional encoding tests."""
input_array = np.arange(24, dtype=np.float64).reshape((1, 4, 3, 2))
output_array = np.eye(4)
output_array = np.repeat(np.expand_dims(output_array, -1), 2, axis=-1)
output_array = np.expand_dims(output_array, 0)
return input_array, output_array
class RelationalLayersTest(tf.test.TestCase):
def test_repeat_for_tensor(self):
a = np.arange(24).reshape((1, 4, 3, 2))
shouldbe = np.concatenate([a] * 3, axis=-2)
result = self.evaluate(relational_layers.repeat(tf.constant(a), 3, axis=-2))
self.assertAllClose(shouldbe, result)
def test_pairwise_edge_embeddings_for_tensor(self):
a = np.array([[[1], [2]]])
shouldbe = np.array([[[[1, 1], [1, 2]], [[2, 1], [2, 2]]]])
layer = relational_layers.PairwiseEdgeEmbeddings()
result = self.evaluate(layer(tf.constant(a)))
self.assertAllClose(shouldbe, result)
def test_relational_layer_for_tensor(self):
a = np.array([[[1], [2]]])
shouldbe = np.array([[[2, 3], [4, 3]]])
layer = relational_layers.RelationalLayer(
tf.keras.layers.Lambda(lambda x: x),
tf.keras.layers.Lambda(lambda x: tf.reduce_sum(x, axis=-2)))
result = self.evaluate(layer(tf.constant(a)))
self.assertAllClose(shouldbe, result)
def test_positional_encoding_like_for_static_shape_tensor(self):
value, shouldbe = _create_positional_encoding_matrices()
a = tf.constant(value)
output_tensor = relational_layers.positional_encoding_like(a, -3, -2)
result = self.evaluate(output_tensor)
self.assertEqual((1, 4, 4, 2), result.shape)
self.assertAllClose(shouldbe, result)
def test_positional_encoding_like_for_dynamic_shape_tensor(self):
value, shouldbe = _create_positional_encoding_matrices()
a = tf.placeholder(tf.float32, shape=(None, 4, 3, 2))
output_tensor = relational_layers.positional_encoding_like(a, -3, -2)
# Check the static shape.
self.assertEqual([None, 4, 4, 2], output_tensor.get_shape().as_list())
# Check the solution.
with self.session() as sess:
result = sess.run(output_tensor, feed_dict={a: value})
self.assertAllClose(shouldbe, result)
def test_add_positional_encoding_layer_for_tensor(self):
value, shouldbe_positional = _create_positional_encoding_matrices()
shouldbe = np.concatenate([value, shouldbe_positional], axis=-2)
a = tf.constant(value)
output_tensor = relational_layers.AddPositionalEncoding(-3, -2)(a)
result = self.evaluate(output_tensor)
self.assertAllClose(shouldbe, result)
def test_stack_answers_for_tensors(self):
# Tensors used for testing.
context = np.arange(24).reshape((2, 3, 4))
answers = np.arange(24, 48).reshape((2, 3, 4))
# Compute the correct solutions.
results = []
for i in range(answers.shape[-1]):
results.append(
np.concatenate([context, answers[:, :, i:(i + 1)]], axis=-1))
shouldbe = np.stack(results, axis=-2)
# Compute the solution based on the layer.
layer = relational_layers.StackAnswers(answer_axis=-1, stack_axis=-2)
result = self.evaluate(layer([tf.constant(context), tf.constant(answers)]))
# Check that they are the same.
self.assertAllClose(shouldbe, result)
def test_multi_dim_batch_apply_for_tensors(self):
# Tensors used for testing.
input_tensor = np.arange(24).reshape((2, 3, 4))
kernel = np.arange(24, 36).reshape((4, 3))
# Compute the correct solutions.
shouldbe = np.matmul(input_tensor, kernel)
# Compute the solution based on the layer.
layer = relational_layers.MultiDimBatchApply(
tf.keras.layers.Lambda(lambda x: tf.matmul(x, tf.constant(kernel))),
num_dims_to_keep=1)
result = self.evaluate(layer(tf.constant(input_tensor)))
# Check that they are the same.
self.assertAllClose(shouldbe, result)
if __name__ == '__main__':
tf.test.main()
|
google-research/disentanglement_lib
|
disentanglement_lib/evaluation/abstract_reasoning/relational_layers_test.py
|
Python
|
apache-2.0
| 4,868
|
#
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
from ast import literal_eval
from threading import Thread
from ovirtscheduler import utils
class PythonMethodRunner(Thread):
def __init__(self, path, module, cls, method, args, request_id=''):
super(PythonMethodRunner, self).__init__(group=None)
logger = logging.getLogger()
self._log_adapter = utils.RequestAdapter(
logger,
{'method': 'PythonMethodRunner',
'request_id': request_id})
self._path = path
self._result = None
self._error = None
self._process = None
self._script = self.createScript(module, cls, method, args)
self.request_id = request_id
def run(self):
try:
self._log_adapter.debug(
'running %s in %s' % (self._script, self._path))
self._process = utils.createProcess(self._script, self._path)
(result, error) = self._process.communicate()
if not isinstance(result, str):
result = result.decode()
try:
self._result = literal_eval(result)
except Exception as ex:
if not error:
self._error = "Unable to parse result: %s" \
" got error : %s " % (result, ex)
if error:
self._error = error
except Exception as ex:
self._error = ex
if self._error:
self._log_adapter.error("script %s got error %s" %
(self._script, self._error))
def getResults(self):
return self._result
def getErrors(self):
return self._error
def getReturnCode(self):
return self._process.returncode
def stop(self):
return utils.killProcess(self._process)
def createScript(self, module, cls, method, args):
command_template = "import {m}; {m}.{c}().{method}{args}"
command_string = command_template\
.format(m=module,
c=cls,
method=method,
args=repr(utils.createFunctionArgs(args)))
return ["python3", "-c", command_string]
|
oVirt/ovirt-scheduler-proxy
|
src/ovirtscheduler/runner.py
|
Python
|
apache-2.0
| 2,775
|
__author__ = 'Chao'
import numpy as np
from sklearn import svm, cross_validation
from sklearn.ensemble import RandomForestClassifier
from sklearn.neighbors import KNeighborsClassifier
activity_label = {'1': 'WALKING',
'2': 'WALKING_UPSTAIRS',
'3': 'WALKING_DOWNSTAIRS',
'4': 'SITTING',
'5': 'STANDING',
'6': 'LAYING'}
# ############################# Open data set ###############################
X = []
y = []
X_fin = []
y_fin = []
print "Opening dataset..."
try:
with open("X_train.txt", 'rU') as f:
res = list(f)
for line in res:
line.strip("\n")
pair = line.split(" ")
while pair.__contains__(""):
pair.remove("")
for i in xrange(pair.__len__()):
pair[i] = float(pair[i])
X.append(pair)
f.close()
with open("y_train.txt", 'rU') as f:
res = list(f)
for line in res:
y.append(int(line.strip("\n")[0]))
f.close()
except:
print "Error in reading the train set file."
exit()
try:
with open("X_test.txt", 'rU') as f:
res = list(f)
for line in res:
line.strip("\n")
pair = line.split(" ")
while pair.__contains__(""):
pair.remove("")
for i in xrange(pair.__len__()):
pair[i] = float(pair[i])
X_fin.append(pair)
f.close()
with open("y_test.txt", 'rU') as f:
res = list(f)
for line in res:
y_fin.append(int(line.strip("\n")[0]))
f.close()
except:
print "Error in reading the train set file."
exit()
print "Dataset opened."
X = np.array(X)
y = np.array(y)
###### Separate data set into 70% training set and 30% test set
print "Separating data into 70% training set & 30% test set..."
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.3)
print "Dataset separated."
###### Get best parameters ######
############################### Kernel=Linear ###############################
print "######## SVM, Kernel = Linear #########"
#C_linear = [0.1, 1, 10, 100]
C_linear = [3]
result_linear = []
print "C value chosen from: ", C_linear
print "Calculating accuracy with K-fold..."
for C in C_linear:
svc_linear = svm.SVC(kernel='linear', C=C)
scores = cross_validation.cross_val_score(svc_linear, X_train, y_train, scoring='accuracy', cv=6)
result_linear.append(scores.mean())
print "result:", result_linear
#Result with different C are equal, so here choose C=1 directly as the best parameter.
best_param_linear = {"C": 3}
#linear_test_score = svm.SVC(kernel='linear', C=best_param_linear.get("C")).fit(X_test, y_test).score(X_test, y_test)
#rbf_test_score = svm.SVC(kernel='rbf', C=best_param_rbf.get("C"), gamma=best_param_rbf.get("gamma")).fit(X_test, y_test).score(X_test, y_test)
#poly_test_score = svm.SVC(kernel='poly', C=best_param_poly.get("C"), degree=best_param_poly.get("degree")).fit(X_test, y_test).score(X_test, y_test)
linear_test = svm.SVC(kernel='linear', C=best_param_linear.get("C")).fit(X, y)
count1 = 0
count2 = 0
for i in xrange(X_fin.__len__()):
count2 += 1
a = linear_test.predict(X_fin[i])
b = y_fin[i]
if a == [b]:
count1 += 1
print "Total cases: ", count2
print "Correct Prediction: ", count1
print "Correct Rate: ", float(count1) / count2
#print "Linear Kernel test score: ", linear_test_score
#print "RBF Kernel test score: ", rbf_test_score
#print "Poly Kernel test score: ", poly_test_score
################################### Random Forests ####################################
print "##### Random Forest ######"
n_estimators_list = range(1, 16, 1)
result_random_forests = []
max_score_rf = float("-inf")
best_param_rf = None
for n_estimators in n_estimators_list:
print "Testing n_estimators = ", n_estimators
rf_clf = RandomForestClassifier(n_estimators=n_estimators, max_depth=None, min_samples_split=1, random_state=0)
scores = cross_validation.cross_val_score(rf_clf, X_train, y_train, scoring="accuracy", cv=6)
result_random_forests.append(scores.mean())
if scores.mean() > max_score_rf:
max_score_rf = scores.mean()
best_param_rf = {"n_estimators": n_estimators}
print "number of trees: ", n_estimators_list
print "results: ", result_random_forests
print "best accuracy: ", max_score_rf
print "best parameter: ", best_param_rf
rf_clf_test_score = RandomForestClassifier(n_estimators=best_param_rf.get("n_estimators"), max_depth=None,
min_samples_split=1, random_state=0).fit(X_test, y_test).score(X_test,
y_test)
print "Test set accuracy: ", rf_clf_test_score
rf_clf = RandomForestClassifier(n_estimators=best_param_rf.get("n_estimators"), max_depth=None, min_samples_split=1,
random_state=0).fit(X, y)
count1 = 0
count2 = 0
for i in xrange(X_fin.__len__()):
count2 += 1
a = rf_clf.predict(X_fin[i])
b = y_fin[i]
print "+ ", a[0],
print "- ", b
if a == [b]:
count1 += 1
print "Total cases: ", count2
print "Correct Prediction: ", count1
print "Correct Rate: ", float(count1) / count2
################################### K Nearest Neighbors ####################################
print "##### K Nearest Neighbors ######"
n_neighbors_list = range(1, 6, 1)
result_n_neighbors = []
max_score_knn = float("-inf")
best_param_knn = None
for n_neighbors in n_neighbors_list:
print "Testing n_neighbors = ", n_neighbors
neigh = KNeighborsClassifier(n_neighbors=n_neighbors)
scores = cross_validation.cross_val_score(neigh, X_train, y_train, scoring="accuracy", cv=6)
result_n_neighbors.append(scores.mean())
if scores.mean() > max_score_knn:
max_score_knn = scores.mean()
best_param_knn = {"n_neighbors": n_neighbors}
print "number of neighbors: ", n_neighbors_list
print "results: ", result_n_neighbors
print "best accuracy: ", max_score_knn
print "best parameter: ", best_param_knn
neigh_test_score = KNeighborsClassifier(best_param_knn.get("n_neighbors")).fit(X_test, y_test).score(X_test, y_test)
print "Test set accuracy: ", neigh_test_score
neigh = KNeighborsClassifier(best_param_knn.get("n_neighbors")).fit(X, y)
count1 = 0
count2 = 0
for i in xrange(X_fin.__len__()):
count2 += 1
a = neigh.predict(X_fin[i])
b = y_fin[i]
if a == [b]:
count1 += 1
print "Total cases: ", count2
print "Correct Prediction: ", count1
print "Correct Rate: ", float(count1) / count2
|
Sapphirine/Human-Activity-Monitoring-and-Prediction
|
analysis.py
|
Python
|
apache-2.0
| 6,718
|
#!/usr/bin/env python
#
# Copyright 2020 Espressif Systems (Shanghai) PTE LTD
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Check placements in this test app for main
# specified in main/linker.lf
import argparse
import subprocess
from pyparsing import LineEnd, LineStart, Literal, Optional, Word, alphanums, hexnums
argparser = argparse.ArgumentParser()
argparser.add_argument('objdump')
argparser.add_argument('elf')
args = argparser.parse_args()
contents = subprocess.check_output([args.objdump, '-t', args.elf]).decode()
def check_location(symbol, expected):
pattern = (LineStart() + Word(hexnums).setResultsName('address')
+ Optional(Word(alphanums, exact=1))
+ Optional(Word(alphanums,exact=1))
+ Word(alphanums + '._*').setResultsName('actual')
+ Word(hexnums)
+ Literal(symbol)
+ LineEnd())
try:
results = pattern.searchString(contents)[0]
except IndexError:
raise Exception("check placement fail: '%s' was not found" % (symbol))
if results.actual != expected:
raise Exception("check placement fail: '%s' was placed in '%s', not in '%s'" % (symbol, results.actual, expected))
print("check placement pass: '%s' was successfully placed in '%s'" % (symbol, results.actual))
return int(results.address, 16)
# src1:func1 (noflash) - explicit mapping for func2 using 'rtc' scheme
# should have been dropped since it is unreferenced.
func1 = check_location('func1', '.iram0.text')
sym1_start = check_location('_sym1_start', '*ABS*')
sym1_end = check_location('_sym1_end', '*ABS*')
assert func1 >= sym1_start, 'check placement fail: func1 comes before __sym1_start'
assert func1 < sym1_end, 'check placement fail: func1 comes after __sym1_end'
assert sym1_start % 9 == 0, '_sym1_start is not aligned as specified in linker fragment'
assert sym1_end % 12 == 0, '_sym1_end is not aligned as specified in linker fragment'
print('check placement pass: _sym1_start < func1 < __sym1_end and alignments checked')
# src1:func2 (rtc) - explicit mapping for func2 using 'rtc' scheme
check_location('func2', '.rtc.text')
# src1 (default) - only func3 in src1 remains that has not been
# mapped using a different scheme
check_location('func3', '.flash.text')
check_location('func4', '.iram0.text')
|
espressif/esp-idf
|
tools/test_apps/build_system/ldgen_test/check_placements.py
|
Python
|
apache-2.0
| 2,847
|
"""
Define a set of scopes to be used by COS Internal OAuth implementation, specifically tailored to work with APIv2.
List of scopes, nomenclature, and rationale can be found in the relevant "Login as OSF- phase 2" proposal document
"""
from collections import namedtuple
from website import settings
# Public scopes are described with 3 pieces of information: list of constituent scopes, a description, and whether or
# not this scope is available to be requested by the general public
class scope(namedtuple('scope', ['parts_', 'description', 'is_public'])):
""" Patch to add `ALWAYS_PUBLIC` scope to every selectable scope,
ensuring that public endpoints are accessible with any token.
"""
@property
def parts(self):
return frozenset((CoreScopes.ALWAYS_PUBLIC, )).union(self.parts_)
class CoreScopes(object):
"""
The smallest units of permission that can be granted- all other scopes are built out of these.
Each named constant is a single string."""
# IMPORTANT: All views should be based on the smallest number of Core scopes required to describe
# the data in that view
USERS_READ = 'users_read'
USERS_WRITE = 'users_write'
USERS_CREATE = 'users_create'
USER_SETTINGS_READ = 'user.settings_read'
USER_SETTINGS_WRITE = 'user.settings_write'
USER_EMAIL_READ = 'users.email_read'
USER_ADDON_READ = 'users.addon_read'
SUBSCRIPTIONS_READ = 'subscriptions_read'
SUBSCRIPTIONS_WRITE = 'subscriptions_write'
MEETINGS_READ = 'meetings.base_read'
NODE_BASE_READ = 'nodes.base_read'
NODE_BASE_WRITE = 'nodes.base_write'
NODE_CHILDREN_READ = 'nodes.children_read'
NODE_CHILDREN_WRITE = 'nodes.children_write'
NODE_FORKS_READ = 'nodes.forks_read'
NODE_FORKS_WRITE = 'nodes.forks_write'
NODE_CONTRIBUTORS_READ = 'nodes.contributors_read'
NODE_CONTRIBUTORS_WRITE = 'nodes.contributors_write'
PREPRINT_CONTRIBUTORS_READ = 'preprints.contributors_read'
PREPRINT_CONTRIBUTORS_WRITE = 'preprints.contributors_write'
NODE_FILE_READ = 'nodes.files_read'
NODE_FILE_WRITE = 'nodes.files_write'
PREPRINT_FILE_READ = 'preprints.files_read'
PREPRINT_FILE_WRITE = 'preprints.files_write'
NODE_ADDON_READ = 'nodes.addon_read'
NODE_ADDON_WRITE = 'nodes.addon_write'
NODE_LINKS_READ = 'nodes.links_read'
NODE_LINKS_WRITE = 'nodes.links_write'
NODE_VIEW_ONLY_LINKS_READ = 'node.view_only_links_read'
NODE_VIEW_ONLY_LINKS_WRITE = 'node.view_only_links_write'
NODE_PREPRINTS_READ = 'node.preprints_read'
NODE_PREPRINTS_WRITE = 'node.preprints_write'
PREPRINTS_READ = 'preprint.preprints_read'
PREPRINTS_WRITE = 'preprint.preprints_write'
REGISTRATION_VIEW_ONLY_LINKS_READ = 'registration.view_only_links_read'
REGISTRATION_VIEW_ONLY_LINKS_WRITE = 'registration.view_only_links_write'
SCHEMA_READ = 'schemas.read'
NODE_DRAFT_REGISTRATIONS_READ = 'nodes.draft_registrations_read'
NODE_DRAFT_REGISTRATIONS_WRITE = 'nodes.draft_registrations_write'
NODE_REGISTRATIONS_READ = 'nodes.registrations_read'
NODE_REGISTRATIONS_WRITE = 'nodes.registrations_write'
NODE_CITATIONS_READ = 'nodes.citations_read'
NODE_CITATIONS_WRITE = 'nodes.citations_write'
PREPRINT_CITATIONS_READ = 'preprints.citations_read'
PREPRINT_CITATIONS_WRITE = 'preprints.citations_write'
NODE_COMMENTS_READ = 'comments.data_read'
NODE_COMMENTS_WRITE = 'comments.data_write'
LICENSE_READ = 'license.data_read'
COMMENT_REPORTS_READ = 'comments.reports_read'
COMMENT_REPORTS_WRITE = 'comments.reports_write'
APPLICATIONS_READ = 'applications_read'
APPLICATIONS_WRITE = 'applications_write'
NODE_LOG_READ = 'nodes.logs_read'
TOKENS_READ = 'tokens_read'
TOKENS_WRITE = 'tokens_write'
ALERTS_READ = 'alerts_read'
ALERTS_WRITE = 'alerts_write'
INSTITUTION_READ = 'institutions_read'
SCOPES_READ = 'scopes_read'
SEARCH = 'search_read'
ACTIONS_READ = 'actions_read'
ACTIONS_WRITE = 'actions_write'
MODERATORS_READ = 'moderators_read'
MODERATORS_WRITE = 'moderators_write'
NODE_REQUESTS_READ = 'node_requests_read'
NODE_REQUESTS_WRITE = 'node_requests_write'
NODE_SETTINGS_READ = 'node_settings_read'
NODE_SETTINGS_WRITE = 'node_settings_write'
PREPRINT_REQUESTS_READ = 'preprint_requests_read'
PREPRINT_REQUESTS_WRITE = 'preprint_requests_write'
PROVIDERS_WRITE = 'providers_write'
CHRONOS_SUBMISSION_READ = 'chronos_submission_read'
CHRONOS_SUBMISSION_WRITE = 'chronos_submission_write'
WAFFLE_READ = 'waffle_read'
NULL = 'null'
# NOTE: Use with extreme caution.
# This should NEVER be assigned to endpoints:
# - with mutable data,
# - that might contain *anything* that could be personally-identifiable,
# - as a write scope
ALWAYS_PUBLIC = 'always_public'
ORGANIZER_COLLECTIONS_BASE_READ = 'collections.base_read'
ORGANIZER_COLLECTIONS_BASE_WRITE = 'collections.base_write'
COLLECTED_META_READ = 'collected_meta_read'
COLLECTED_META_WRITE = 'collected_meta_write'
GUIDS_READ = 'guids.base_read'
WIKI_BASE_READ = 'wikis.base_read'
WIKI_BASE_WRITE = 'wikis.base_write'
IDENTIFIERS_READ = 'identifiers.data_read'
IDENTIFIERS_WRITE = 'identifiers.data_write'
METRICS_BASIC = 'metrics_basic'
METRICS_RESTRICTED = 'metrics_restricted'
class ComposedScopes(object):
"""
Composed scopes, listed in increasing order of access (most restrictive first). Each named constant is a tuple.
"""
# IMPORTANT: Composed scopes exist only as an internal implementation detail.
# All views should be based on selections from CoreScopes, above
# Users collection
USERS_READ = (CoreScopes.USERS_READ, CoreScopes.SUBSCRIPTIONS_READ, CoreScopes.ALERTS_READ, CoreScopes.USER_SETTINGS_READ)
USERS_WRITE = USERS_READ + (CoreScopes.USERS_WRITE, CoreScopes.SUBSCRIPTIONS_WRITE, CoreScopes.ALERTS_WRITE, CoreScopes.USER_SETTINGS_WRITE)
USERS_CREATE = USERS_READ + (CoreScopes.USERS_CREATE, )
# User extensions
USER_EMAIL_READ = (CoreScopes.USER_EMAIL_READ, )
# Applications collection
APPLICATIONS_READ = (CoreScopes.APPLICATIONS_READ, )
APPLICATIONS_WRITE = APPLICATIONS_READ + (CoreScopes.APPLICATIONS_WRITE,)
# Tokens collection
TOKENS_READ = (CoreScopes.TOKENS_READ,)
TOKENS_WRITE = TOKENS_READ + (CoreScopes.TOKENS_WRITE,)
# Guid redirect view
GUIDS_READ = (CoreScopes.GUIDS_READ, )
# Metaschemas collection
METASCHEMAS_READ = (CoreScopes.SCHEMA_READ, )
# Draft registrations
DRAFT_READ = (CoreScopes.NODE_DRAFT_REGISTRATIONS_READ, )
DRAFT_WRITE = (CoreScopes.NODE_DRAFT_REGISTRATIONS_WRITE, )
# Identifier views
IDENTIFIERS_READ = (CoreScopes.IDENTIFIERS_READ, )
IDENTIFIERS_WRITE = (CoreScopes.IDENTIFIERS_WRITE, )
# Comment reports collection
COMMENT_REPORTS_READ = (CoreScopes.COMMENT_REPORTS_READ,)
COMMENT_REPORTS_WRITE = COMMENT_REPORTS_READ + (CoreScopes.COMMENT_REPORTS_WRITE,)
# Nodes collection.
# Base node data includes node metadata, links, children, and preprints.
NODE_METADATA_READ = (CoreScopes.NODE_BASE_READ, CoreScopes.NODE_CHILDREN_READ, CoreScopes.NODE_LINKS_READ,
CoreScopes.NODE_CITATIONS_READ, CoreScopes.NODE_COMMENTS_READ, CoreScopes.NODE_LOG_READ,
CoreScopes.NODE_FORKS_READ, CoreScopes.WIKI_BASE_READ, CoreScopes.LICENSE_READ,
CoreScopes.IDENTIFIERS_READ, CoreScopes.NODE_PREPRINTS_READ, CoreScopes.PREPRINT_REQUESTS_READ)
NODE_METADATA_WRITE = NODE_METADATA_READ + \
(CoreScopes.NODE_BASE_WRITE, CoreScopes.NODE_CHILDREN_WRITE, CoreScopes.NODE_LINKS_WRITE, CoreScopes.IDENTIFIERS_WRITE,
CoreScopes.NODE_CITATIONS_WRITE, CoreScopes.NODE_COMMENTS_WRITE, CoreScopes.NODE_FORKS_WRITE,
CoreScopes.NODE_PREPRINTS_WRITE, CoreScopes.PREPRINT_REQUESTS_WRITE, CoreScopes.WIKI_BASE_WRITE)
# Preprints collection
# TODO: Move Metrics scopes to their own restricted composed scope once the Admin app can manage scopes on tokens/apps
PREPRINT_METADATA_READ = (CoreScopes.PREPRINTS_READ, CoreScopes.PREPRINT_CITATIONS_READ, CoreScopes.IDENTIFIERS_READ, CoreScopes.METRICS_BASIC,)
PREPRINT_METADATA_WRITE = PREPRINT_METADATA_READ + (CoreScopes.PREPRINTS_WRITE, CoreScopes.PREPRINT_CITATIONS_WRITE, CoreScopes.METRICS_RESTRICTED,)
# Organizer Collections collection
# Using Organizer Collections and the node links they collect. Reads Node Metadata.
ORGANIZER_READ = (CoreScopes.ORGANIZER_COLLECTIONS_BASE_READ, CoreScopes.COLLECTED_META_READ,) + NODE_METADATA_READ
ORGANIZER_WRITE = ORGANIZER_READ + (CoreScopes.ORGANIZER_COLLECTIONS_BASE_WRITE, CoreScopes.NODE_LINKS_WRITE, CoreScopes.COLLECTED_META_WRITE)
# Privileges relating to editing content uploaded under that node
NODE_DATA_READ = (CoreScopes.NODE_FILE_READ, CoreScopes.WIKI_BASE_READ)
NODE_DATA_WRITE = NODE_DATA_READ + \
(CoreScopes.NODE_FILE_WRITE, CoreScopes.WIKI_BASE_WRITE)
# Privileges relating to editing content uploaded under that preprint
PREPRINT_DATA_READ = (CoreScopes.PREPRINT_FILE_READ,)
PREPRINT_DATA_WRITE = PREPRINT_DATA_READ + \
(CoreScopes.PREPRINT_FILE_WRITE,)
# Privileges relating to who can access a node (via contributors or registrations)
NODE_ACCESS_READ = (CoreScopes.NODE_CONTRIBUTORS_READ, CoreScopes.NODE_REGISTRATIONS_READ,
CoreScopes.NODE_VIEW_ONLY_LINKS_READ, CoreScopes.REGISTRATION_VIEW_ONLY_LINKS_READ,
CoreScopes.NODE_REQUESTS_READ, CoreScopes.NODE_SETTINGS_READ)
NODE_ACCESS_WRITE = NODE_ACCESS_READ + \
(CoreScopes.NODE_CONTRIBUTORS_WRITE, CoreScopes.NODE_REGISTRATIONS_WRITE,
CoreScopes.NODE_VIEW_ONLY_LINKS_WRITE, CoreScopes.REGISTRATION_VIEW_ONLY_LINKS_WRITE,
CoreScopes.NODE_REQUESTS_WRITE, CoreScopes.NODE_SETTINGS_WRITE)
# Privileges relating to who can access a preprint via contributors
PREPRINT_ACCESS_READ = (CoreScopes.PREPRINT_CONTRIBUTORS_READ,)
PREPRINT_ACCESS_WRITE = PREPRINT_ACCESS_READ + \
(CoreScopes.PREPRINT_CONTRIBUTORS_WRITE,)
# Combine all sets of node permissions into one convenience level
NODE_ALL_READ = NODE_METADATA_READ + NODE_DATA_READ + NODE_ACCESS_READ
NODE_ALL_WRITE = NODE_ALL_READ + NODE_METADATA_WRITE + NODE_DATA_WRITE + NODE_ACCESS_WRITE
# Combine preprint permissions
PREPRINT_ALL_READ = PREPRINT_METADATA_READ + PREPRINT_ACCESS_READ + PREPRINT_DATA_READ
PREPRINT_ALL_WRITE = PREPRINT_ALL_READ + PREPRINT_METADATA_WRITE + PREPRINT_ACCESS_WRITE + PREPRINT_DATA_WRITE
# Reviews
REVIEWS_READ = (CoreScopes.ACTIONS_READ, CoreScopes.MODERATORS_READ)
REVIEWS_WRITE = (CoreScopes.ACTIONS_WRITE, CoreScopes.MODERATORS_WRITE, CoreScopes.PROVIDERS_WRITE)
# Full permissions: all routes intended to be exposed to third party API users
FULL_READ = NODE_ALL_READ + USERS_READ + ORGANIZER_READ + GUIDS_READ + METASCHEMAS_READ + DRAFT_READ + REVIEWS_READ + PREPRINT_ALL_READ + (CoreScopes.MEETINGS_READ, CoreScopes.INSTITUTION_READ, CoreScopes.SEARCH, CoreScopes.SCOPES_READ)
FULL_WRITE = FULL_READ + NODE_ALL_WRITE + USERS_WRITE + ORGANIZER_WRITE + DRAFT_WRITE + REVIEWS_WRITE + PREPRINT_ALL_WRITE
# Admin permissions- includes functionality not intended for third-party use
ADMIN_LEVEL = FULL_WRITE + APPLICATIONS_WRITE + TOKENS_WRITE + COMMENT_REPORTS_WRITE + USERS_CREATE + REVIEWS_WRITE +\
(CoreScopes.USER_EMAIL_READ, CoreScopes.USER_ADDON_READ, CoreScopes.NODE_ADDON_READ, CoreScopes.NODE_ADDON_WRITE, CoreScopes.WAFFLE_READ, )
# List of all publicly documented scopes, mapped to composed scopes defined above.
# Return as sets to enable fast comparisons of provided scopes vs those required by a given node
# These are the ***only*** scopes that will be recognized from CAS
public_scopes = {
'osf.full_read': scope(parts_=frozenset(ComposedScopes.FULL_READ),
description='View all information associated with this account, including for '
'private projects.',
is_public=True),
'osf.full_write': scope(parts_=frozenset(ComposedScopes.FULL_WRITE),
description='View and edit all information associated with this account, including for '
'private projects.',
is_public=True),
'osf.users.profile_read': scope(parts_=frozenset(ComposedScopes.USERS_READ),
description='Read your profile data.',
is_public=True),
'osf.users.email_read': scope(parts_=frozenset(ComposedScopes.USER_EMAIL_READ),
description='Read your primary email address.',
is_public=True),
}
if settings.DEV_MODE:
public_scopes.update({
'osf.users.profile_write': scope(parts_=frozenset(ComposedScopes.USERS_WRITE),
description='Read and edit your profile data.',
is_public=True),
'osf.nodes.metadata_read': scope(parts_=frozenset(ComposedScopes.NODE_METADATA_READ),
description='Read a list of all public and private nodes accessible to this '
'account, and view associated metadata such as project descriptions '
'and titles.',
is_public=True),
'osf.nodes.metadata_write': scope(parts_=frozenset(ComposedScopes.NODE_METADATA_WRITE),
description='Read a list of all public and private nodes accessible to this '
'account, and view and edit associated metadata such as project '
'descriptions and titles.',
is_public=True),
'osf.nodes.data_read': scope(parts_=frozenset(ComposedScopes.NODE_DATA_READ),
description='List and view files associated with any public or private projects '
'accessible to this account.',
is_public=True),
'osf.nodes.data_write': scope(parts_=frozenset(ComposedScopes.NODE_DATA_WRITE),
description='List, view, and update files associated with any public or private '
'projects accessible to this account.',
is_public=True),
'osf.nodes.access_read': scope(parts_=frozenset(ComposedScopes.NODE_ACCESS_READ),
description='View the contributors list and any established registrations '
'associated with public or private projects.',
is_public=True),
'osf.nodes.access_write': scope(parts_=frozenset(ComposedScopes.NODE_ACCESS_WRITE),
description='View and edit the contributors list associated with public or '
'private projects accessible to this account. Also view and create '
'registrations.',
is_public=True), # TODO: Language: Does registrations endpoint allow creation of registrations? Is that planned?
'osf.nodes.full_read': scope(parts_=frozenset(ComposedScopes.NODE_ALL_READ),
description='View all metadata, files, and access rights associated with all public '
'and private projects accessible to this account.',
is_public=True),
'osf.nodes.full_write': scope(parts_=frozenset(ComposedScopes.NODE_ALL_WRITE),
description='View and edit all metadata, files, and access rights associated with '
'all public and private projects accessible to this account.',
is_public=True),
# Undocumented scopes that can not be requested by third parties (per CAS restriction)
'osf.users.create': scope(parts_=frozenset(ComposedScopes.USERS_CREATE),
description='This permission should only be granted to OSF collaborators. Allows a site to '
'programmatically create new users with this account.',
is_public=False),
'osf.admin': scope(parts_=frozenset(ComposedScopes.ADMIN_LEVEL),
description='This permission should only be granted to OSF administrators. Allows a site to '
'create, read, edit, and delete all information associated with this account.',
is_public=False),
})
def normalize_scopes(scopes):
"""
Given a list of public-facing scope names from a CAS token, return the list of internal scopes
This is useful for converting a single broad scope name (from CAS) into the small constituent parts
(as used by views)
:param list scopes: a list public facing scopes
"""
all_scopes = set()
for sc in scopes:
try:
scope_tuple = public_scopes[sc]
all_scopes |= scope_tuple.parts
except KeyError:
pass
return all_scopes
if __name__ == '__main__':
# Print some data to console, to help audit what views/core scopes map to a given public/composed scope
# Although represented internally as a set, print as a sorted list for readability.
from pprint import pprint as pp
pp({k: sorted(v.parts)
for k, v in public_scopes.items()})
|
pattisdr/osf.io
|
framework/auth/oauth_scopes.py
|
Python
|
apache-2.0
| 18,447
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import math
import numpy as np
import os
import time
import unittest
import paddle
import paddle.dataset.conll05 as conll05
import paddle.fluid as fluid
word_dict, verb_dict, label_dict = conll05.get_dict()
word_dict_len = len(word_dict)
label_dict_len = len(label_dict)
pred_dict_len = len(verb_dict)
mark_dict_len = 2
word_dim = 32
mark_dim = 5
hidden_dim = 512
depth = 8
mix_hidden_lr = 1e-3
IS_SPARSE = True
PASS_NUM = 10
BATCH_SIZE = 10
embedding_name = 'emb'
def load_parameter(file_name, h, w):
with open(file_name, 'rb') as f:
f.read(16) # skip header.
return np.fromfile(f, dtype=np.float32).reshape(h, w)
def db_lstm(word, predicate, ctx_n2, ctx_n1, ctx_0, ctx_p1, ctx_p2, mark,
**ignored):
# 8 features
predicate_embedding = fluid.layers.embedding(
input=predicate,
size=[pred_dict_len, word_dim],
dtype='float32',
is_sparse=IS_SPARSE,
param_attr='vemb')
mark_embedding = fluid.layers.embedding(
input=mark,
size=[mark_dict_len, mark_dim],
dtype='float32',
is_sparse=IS_SPARSE)
word_input = [word, ctx_n2, ctx_n1, ctx_0, ctx_p1, ctx_p2]
emb_layers = [
fluid.layers.embedding(
size=[word_dict_len, word_dim],
input=x,
param_attr=fluid.ParamAttr(
name=embedding_name, trainable=False)) for x in word_input
]
emb_layers.append(predicate_embedding)
emb_layers.append(mark_embedding)
hidden_0_layers = [
fluid.layers.fc(input=emb, size=hidden_dim, act='tanh')
for emb in emb_layers
]
hidden_0 = fluid.layers.sums(input=hidden_0_layers)
lstm_0 = fluid.layers.dynamic_lstm(
input=hidden_0,
size=hidden_dim,
candidate_activation='relu',
gate_activation='sigmoid',
cell_activation='sigmoid')
# stack L-LSTM and R-LSTM with direct edges
input_tmp = [hidden_0, lstm_0]
for i in range(1, depth):
mix_hidden = fluid.layers.sums(input=[
fluid.layers.fc(input=input_tmp[0], size=hidden_dim, act='tanh'),
fluid.layers.fc(input=input_tmp[1], size=hidden_dim, act='tanh')
])
lstm = fluid.layers.dynamic_lstm(
input=mix_hidden,
size=hidden_dim,
candidate_activation='relu',
gate_activation='sigmoid',
cell_activation='sigmoid',
is_reverse=((i % 2) == 1))
input_tmp = [mix_hidden, lstm]
feature_out = fluid.layers.sums(input=[
fluid.layers.fc(input=input_tmp[0], size=label_dict_len, act='tanh'),
fluid.layers.fc(input=input_tmp[1], size=label_dict_len, act='tanh')
])
return feature_out
def to_lodtensor(data, place):
seq_lens = [len(seq) for seq in data]
cur_len = 0
lod = [cur_len]
for l in seq_lens:
cur_len += l
lod.append(cur_len)
flattened_data = np.concatenate(data, axis=0).astype("int64")
flattened_data = flattened_data.reshape([len(flattened_data), 1])
res = fluid.LoDTensor()
res.set(flattened_data, place)
res.set_lod([lod])
return res
def create_random_lodtensor(lod, place, low, high):
data = np.random.random_integers(low, high, [lod[-1], 1]).astype("int64")
res = fluid.LoDTensor()
res.set(data, place)
res.set_lod([lod])
return res
def train(use_cuda, save_dirname=None, is_local=True):
# define network topology
word = fluid.layers.data(
name='word_data', shape=[1], dtype='int64', lod_level=1)
predicate = fluid.layers.data(
name='verb_data', shape=[1], dtype='int64', lod_level=1)
ctx_n2 = fluid.layers.data(
name='ctx_n2_data', shape=[1], dtype='int64', lod_level=1)
ctx_n1 = fluid.layers.data(
name='ctx_n1_data', shape=[1], dtype='int64', lod_level=1)
ctx_0 = fluid.layers.data(
name='ctx_0_data', shape=[1], dtype='int64', lod_level=1)
ctx_p1 = fluid.layers.data(
name='ctx_p1_data', shape=[1], dtype='int64', lod_level=1)
ctx_p2 = fluid.layers.data(
name='ctx_p2_data', shape=[1], dtype='int64', lod_level=1)
mark = fluid.layers.data(
name='mark_data', shape=[1], dtype='int64', lod_level=1)
feature_out = db_lstm(**locals())
target = fluid.layers.data(
name='target', shape=[1], dtype='int64', lod_level=1)
crf_cost = fluid.layers.linear_chain_crf(
input=feature_out,
label=target,
param_attr=fluid.ParamAttr(
name='crfw', learning_rate=mix_hidden_lr))
avg_cost = fluid.layers.mean(crf_cost)
# TODO(qiao)
# check other optimizers and check why out will be NAN
sgd_optimizer = fluid.optimizer.SGD(
learning_rate=fluid.layers.exponential_decay(
learning_rate=0.01,
decay_steps=100000,
decay_rate=0.5,
staircase=True))
sgd_optimizer.minimize(avg_cost)
# TODO(qiao)
# add dependency track and move this config before optimizer
crf_decode = fluid.layers.crf_decoding(
input=feature_out, param_attr=fluid.ParamAttr(name='crfw'))
train_data = paddle.batch(
paddle.reader.shuffle(
paddle.dataset.conll05.test(), buf_size=8192),
batch_size=BATCH_SIZE)
place = fluid.CUDAPlace(0) if use_cuda else fluid.CPUPlace()
feeder = fluid.DataFeeder(
feed_list=[
word, ctx_n2, ctx_n1, ctx_0, ctx_p1, ctx_p2, predicate, mark, target
],
place=place)
exe = fluid.Executor(place)
def train_loop(main_program):
exe.run(fluid.default_startup_program())
embedding_param = fluid.global_scope().find_var(
embedding_name).get_tensor()
embedding_param.set(
load_parameter(conll05.get_embedding(), word_dict_len, word_dim),
place)
start_time = time.time()
batch_id = 0
for pass_id in xrange(PASS_NUM):
for data in train_data():
cost = exe.run(main_program,
feed=feeder.feed(data),
fetch_list=[avg_cost])
cost = cost[0]
if batch_id % 10 == 0:
print("avg_cost:" + str(cost))
if batch_id != 0:
print("second per batch: " + str((time.time(
) - start_time) / batch_id))
# Set the threshold low to speed up the CI test
if float(cost) < 60.0:
if save_dirname is not None:
# TODO(liuyiqun): Change the target to crf_decode
fluid.io.save_inference_model(save_dirname, [
'word_data', 'verb_data', 'ctx_n2_data',
'ctx_n1_data', 'ctx_0_data', 'ctx_p1_data',
'ctx_p2_data', 'mark_data'
], [feature_out], exe)
return
batch_id = batch_id + 1
if is_local:
train_loop(fluid.default_main_program())
else:
port = os.getenv("PADDLE_INIT_PORT", "6174")
pserver_ips = os.getenv("PADDLE_INIT_PSERVERS") # ip,ip...
eplist = []
for ip in pserver_ips.split(","):
eplist.append(':'.join([ip, port]))
pserver_endpoints = ",".join(eplist) # ip:port,ip:port...
trainers = int(os.getenv("TRAINERS"))
current_endpoint = os.getenv("POD_IP") + ":" + port
trainer_id = int(os.getenv("PADDLE_INIT_TRAINER_ID"))
training_role = os.getenv("TRAINING_ROLE", "TRAINER")
t = fluid.DistributeTranspiler()
t.transpile(trainer_id, pservers=pserver_endpoints, trainers=trainers)
if training_role == "PSERVER":
pserver_prog = t.get_pserver_program(current_endpoint)
pserver_startup = t.get_startup_program(current_endpoint,
pserver_prog)
exe.run(pserver_startup)
exe.run(pserver_prog)
elif training_role == "TRAINER":
train_loop(t.get_trainer_program())
def infer(use_cuda, save_dirname=None):
if save_dirname is None:
return
place = fluid.CUDAPlace(0) if use_cuda else fluid.CPUPlace()
exe = fluid.Executor(place)
inference_scope = fluid.core.Scope()
with fluid.scope_guard(inference_scope):
# Use fluid.io.load_inference_model to obtain the inference program desc,
# the feed_target_names (the names of variables that will be feeded
# data using feed operators), and the fetch_targets (variables that
# we want to obtain data from using fetch operators).
[inference_program, feed_target_names,
fetch_targets] = fluid.io.load_inference_model(save_dirname, exe)
lod = [0, 4, 10]
word = create_random_lodtensor(
lod, place, low=0, high=word_dict_len - 1)
pred = create_random_lodtensor(
lod, place, low=0, high=pred_dict_len - 1)
ctx_n2 = create_random_lodtensor(
lod, place, low=0, high=word_dict_len - 1)
ctx_n1 = create_random_lodtensor(
lod, place, low=0, high=word_dict_len - 1)
ctx_0 = create_random_lodtensor(
lod, place, low=0, high=word_dict_len - 1)
ctx_p1 = create_random_lodtensor(
lod, place, low=0, high=word_dict_len - 1)
ctx_p2 = create_random_lodtensor(
lod, place, low=0, high=word_dict_len - 1)
mark = create_random_lodtensor(
lod, place, low=0, high=mark_dict_len - 1)
# Construct feed as a dictionary of {feed_target_name: feed_target_data}
# and results will contain a list of data corresponding to fetch_targets.
assert feed_target_names[0] == 'word_data'
assert feed_target_names[1] == 'verb_data'
assert feed_target_names[2] == 'ctx_n2_data'
assert feed_target_names[3] == 'ctx_n1_data'
assert feed_target_names[4] == 'ctx_0_data'
assert feed_target_names[5] == 'ctx_p1_data'
assert feed_target_names[6] == 'ctx_p2_data'
assert feed_target_names[7] == 'mark_data'
results = exe.run(inference_program,
feed={
feed_target_names[0]: word,
feed_target_names[1]: pred,
feed_target_names[2]: ctx_n2,
feed_target_names[3]: ctx_n1,
feed_target_names[4]: ctx_0,
feed_target_names[5]: ctx_p1,
feed_target_names[6]: ctx_p2,
feed_target_names[7]: mark
},
fetch_list=fetch_targets,
return_numpy=False)
print(results[0].lod())
np_data = np.array(results[0])
print("Inference Shape: ", np_data.shape)
def main(use_cuda, is_local=True):
if use_cuda and not fluid.core.is_compiled_with_cuda():
return
# Directory for saving the trained model
save_dirname = "label_semantic_roles.inference.model"
train(use_cuda, save_dirname, is_local)
infer(use_cuda, save_dirname)
class TestLabelSemanticRoles(unittest.TestCase):
def test_cuda(self):
with self.scope_prog_guard():
main(use_cuda=True)
def test_cpu(self):
with self.scope_prog_guard():
main(use_cuda=False)
@contextlib.contextmanager
def scope_prog_guard(self):
prog = fluid.Program()
startup_prog = fluid.Program()
scope = fluid.core.Scope()
with fluid.scope_guard(scope):
with fluid.program_guard(prog, startup_prog):
yield
if __name__ == '__main__':
unittest.main()
|
pkuyym/Paddle
|
python/paddle/fluid/tests/book/test_label_semantic_roles.py
|
Python
|
apache-2.0
| 12,571
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from warehouse import tasks
from warehouse.integrations.github import utils
@tasks.task(ignore_result=True, acks_late=True)
def analyze_disclosure_task(request, disclosure_record, origin):
utils.analyze_disclosure(
request=request,
disclosure_record=disclosure_record,
origin=origin,
)
|
pypa/warehouse
|
warehouse/integrations/github/tasks.py
|
Python
|
apache-2.0
| 862
|
import pytest
@pytest.mark.parametrize(
"method,path",
[
('get_html', '/'),
('get_html', '/parameters/GB020'),
('get_json', '/parameters/GB020.geojson'),
('get_json', '/parameters/GB020.geojson?domainelement=GB020-1'),
('get_html', '/languages/nene1249'),
('get_html', '/contributions'),
('get_html', '/contributors'),
('get_html', '/familys'),
('get_dt', '/contributions'),
('get_dt', '/familys'),
('get_dt', '/values'),
('get_html', '/contributors/ML'),
])
def test_pages(app, method, path):
getattr(app, method)(path)
|
clld/grambank
|
tests/test_functional.py
|
Python
|
apache-2.0
| 635
|
"""MySensors platform that offers a Climate (MySensors-HVAC) component."""
from homeassistant.components import mysensors
from homeassistant.components.climate import ClimateDevice
from homeassistant.components.climate.const import (
ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, DOMAIN, HVAC_MODE_AUTO,
HVAC_MODE_COOL, HVAC_MODE_HEAT, SUPPORT_FAN_MODE,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
HVAC_MODE_OFF)
from homeassistant.const import (
ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT)
DICT_HA_TO_MYS = {
HVAC_MODE_AUTO: 'AutoChangeOver',
HVAC_MODE_COOL: 'CoolOn',
HVAC_MODE_HEAT: 'HeatOn',
HVAC_MODE_OFF: 'Off',
}
DICT_MYS_TO_HA = {
'AutoChangeOver': HVAC_MODE_AUTO,
'CoolOn': HVAC_MODE_COOL,
'HeatOn': HVAC_MODE_HEAT,
'Off': HVAC_MODE_OFF,
}
FAN_LIST = ['Auto', 'Min', 'Normal', 'Max']
OPERATION_LIST = [HVAC_MODE_OFF, HVAC_MODE_AUTO, HVAC_MODE_COOL,
HVAC_MODE_HEAT]
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Set up the mysensors climate."""
mysensors.setup_mysensors_platform(
hass, DOMAIN, discovery_info, MySensorsHVAC,
async_add_entities=async_add_entities)
class MySensorsHVAC(mysensors.device.MySensorsEntity, ClimateDevice):
"""Representation of a MySensors HVAC."""
@property
def supported_features(self):
"""Return the list of supported features."""
features = 0
set_req = self.gateway.const.SetReq
if set_req.V_HVAC_SPEED in self._values:
features = features | SUPPORT_FAN_MODE
if (set_req.V_HVAC_SETPOINT_COOL in self._values and
set_req.V_HVAC_SETPOINT_HEAT in self._values):
features = (
features | SUPPORT_TARGET_TEMPERATURE_RANGE)
else:
features = features | SUPPORT_TARGET_TEMPERATURE
return features
@property
def assumed_state(self):
"""Return True if unable to access real state of entity."""
return self.gateway.optimistic
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS if self.gateway.metric else TEMP_FAHRENHEIT
@property
def current_temperature(self):
"""Return the current temperature."""
value = self._values.get(self.gateway.const.SetReq.V_TEMP)
if value is not None:
value = float(value)
return value
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
set_req = self.gateway.const.SetReq
if set_req.V_HVAC_SETPOINT_COOL in self._values and \
set_req.V_HVAC_SETPOINT_HEAT in self._values:
return None
temp = self._values.get(set_req.V_HVAC_SETPOINT_COOL)
if temp is None:
temp = self._values.get(set_req.V_HVAC_SETPOINT_HEAT)
return float(temp) if temp is not None else None
@property
def target_temperature_high(self):
"""Return the highbound target temperature we try to reach."""
set_req = self.gateway.const.SetReq
if set_req.V_HVAC_SETPOINT_HEAT in self._values:
temp = self._values.get(set_req.V_HVAC_SETPOINT_COOL)
return float(temp) if temp is not None else None
@property
def target_temperature_low(self):
"""Return the lowbound target temperature we try to reach."""
set_req = self.gateway.const.SetReq
if set_req.V_HVAC_SETPOINT_COOL in self._values:
temp = self._values.get(set_req.V_HVAC_SETPOINT_HEAT)
return float(temp) if temp is not None else None
@property
def hvac_mode(self):
"""Return current operation ie. heat, cool, idle."""
return self._values.get(self.value_type)
@property
def hvac_modes(self):
"""List of available operation modes."""
return OPERATION_LIST
@property
def fan_mode(self):
"""Return the fan setting."""
return self._values.get(self.gateway.const.SetReq.V_HVAC_SPEED)
@property
def fan_modes(self):
"""List of available fan modes."""
return FAN_LIST
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
set_req = self.gateway.const.SetReq
temp = kwargs.get(ATTR_TEMPERATURE)
low = kwargs.get(ATTR_TARGET_TEMP_LOW)
high = kwargs.get(ATTR_TARGET_TEMP_HIGH)
heat = self._values.get(set_req.V_HVAC_SETPOINT_HEAT)
cool = self._values.get(set_req.V_HVAC_SETPOINT_COOL)
updates = []
if temp is not None:
if heat is not None:
# Set HEAT Target temperature
value_type = set_req.V_HVAC_SETPOINT_HEAT
elif cool is not None:
# Set COOL Target temperature
value_type = set_req.V_HVAC_SETPOINT_COOL
if heat is not None or cool is not None:
updates = [(value_type, temp)]
elif all(val is not None for val in (low, high, heat, cool)):
updates = [
(set_req.V_HVAC_SETPOINT_HEAT, low),
(set_req.V_HVAC_SETPOINT_COOL, high)]
for value_type, value in updates:
self.gateway.set_child_value(
self.node_id, self.child_id, value_type, value)
if self.gateway.optimistic:
# Optimistically assume that device has changed state
self._values[value_type] = value
self.async_schedule_update_ha_state()
async def async_set_fan_mode(self, fan_mode):
"""Set new target temperature."""
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_HVAC_SPEED, fan_mode)
if self.gateway.optimistic:
# Optimistically assume that device has changed state
self._values[set_req.V_HVAC_SPEED] = fan_mode
self.async_schedule_update_ha_state()
async def async_set_hvac_mode(self, hvac_mode):
"""Set new target temperature."""
self.gateway.set_child_value(
self.node_id, self.child_id, self.value_type,
DICT_HA_TO_MYS[hvac_mode])
if self.gateway.optimistic:
# Optimistically assume that device has changed state
self._values[self.value_type] = hvac_mode
self.async_schedule_update_ha_state()
async def async_update(self):
"""Update the controller with the latest value from a sensor."""
await super().async_update()
self._values[self.value_type] = DICT_MYS_TO_HA[
self._values[self.value_type]]
|
jabesq/home-assistant
|
homeassistant/components/mysensors/climate.py
|
Python
|
apache-2.0
| 6,774
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from airflow.hooks.S3_hook import S3Hook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class S3ToFileSystem(BaseOperator):
@apply_defaults
def __init__(
self,
s3_bucket,
s3_key,
download_file_location,
s3_conn_id='s3_default',
* args, **kwargs):
super(S3ToFileSystem, self).__init__(*args, **kwargs)
self.local_location = download_file_location
self.s3_bucket = s3_bucket
self.s3_key = s3_key
self.s3_conn_id = s3_conn_id
def execute(self, context):
self.s3 = S3Hook(s3_conn_id=self.s3_conn_id)
file_paths = []
for k in self.s3.list_keys(self.s3_bucket, prefix=self.s3_key):
kpath = os.path.join(self.local_location, os.path.basename(k))
# Download the file
self.s3.download_file(self.s3_bucket, k, kpath)
file_paths.append(kpath)
context['ti'].xcom_push(key=kpath, value="")
context['ti'].xcom_push(key="files_added", value=file_paths)
# read in chunks
# start reading from the file.
# insert in respective SQS operators
|
brandsoulmates/incubator-airflow
|
airflow/operators/S3_to_FS.py
|
Python
|
apache-2.0
| 1,792
|
import logging
from concurrent.futures import CancelledError
import asyncio
from aiohttp import web
from aioredis import create_redis
from etc.ice_fetcher import get_current_song
from config.settings import STREAM_HOST, STREAM_PORT
server_logger = logging.getLogger('aiohttp.server')
async def push_current_track(request):
"""
Args:
request: HTTP request (aiohttp.web_reqrep.Request)
View that handle SSE updates of current track obtained from Icecast server
using keep-alive text/event-stream Response
"""
if request.headers['Accept'] != 'text/event-stream':
raise web.HTTPFound('/')
# Construct Stream Response for SSE
stream = web.StreamResponse()
stream.headers['Content-Type'] = 'text/event-stream'
stream.headers['Cache-Control'] = 'no-cache'
stream.headers['Connection'] = 'keep-alive'
await stream.prepare(request)
redis = await create_redis(('localhost', 6379))
channel, _ = await redis.subscribe('CHANNEL', '')
try:
current_song = await get_current_song(icecast_host=STREAM_HOST,
icecast_port=STREAM_PORT)
if current_song:
stream.write(b'event: track_update\r\n')
stream.write(b'data: ' + str.encode(current_song) + b'\r\n\r\n')
else:
# pass because no song available, will wait for next one from Redis
pass
except Exception as e:
server_logger.error('got error while getting current song {}'.format(e))
# going into loop to get updates fro redis
try:
try:
while True:
# check the channel queue size
if channel._queue.qsize() > 0:
for msg in range(channel._queue.qsize()):
message = await channel.get()
if message:
# it is possible that there will be no song playing
# so we check it. In other case Client will kill server with
# every 3 second request for new song.
stream.write(b'event: track_update\r\n')
stream.write(b'data: ' + message + b'\r\n\r\n')
else:
stream.write(b'event: ping\r\n')
stream.write(b'data: ' + b'waiting...' + b'\r\n\r\n')
await asyncio.sleep(10, loop=request.app.loop)
except Exception as e:
import traceback
server_logger.error('Connection with redis broken? {}'.format(e))
traceback.print_exc()
except CancelledError as e:
server_logger.error('Feature got canceled {}'.format(e))
# here we mark that response processing is finished
# After write_eof() call any manipulations with the response object are forbidden.
print ('will call eof')
await stream.write_eof()
return stream
|
wolendranh/movie_radio
|
radio/views/track_info_sse.py
|
Python
|
apache-2.0
| 2,957
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-10-04 21:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("peering", "0003_auto_20170903_1235")]
operations = [
migrations.AlterField(
model_name="autonomoussystem",
name="ipv4_as_set",
field=models.CharField(blank=True, max_length=128, null=True),
),
migrations.AlterField(
model_name="autonomoussystem",
name="ipv4_max_prefixes",
field=models.PositiveIntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name="autonomoussystem",
name="ipv6_as_set",
field=models.CharField(blank=True, max_length=128, null=True),
),
migrations.AlterField(
model_name="autonomoussystem",
name="ipv6_max_prefixes",
field=models.PositiveIntegerField(blank=True, null=True),
),
]
|
respawner/peering-manager
|
peering/migrations/0004_auto_20171004_2323.py
|
Python
|
apache-2.0
| 1,014
|
# -*- coding: utf-8 -*-
from subprocess import check_call
def test_shellstreaming_help():
check_call(["shellstreaming", "--help"])
|
laysakura/shellstreaming
|
test/master/test_master_functional.py
|
Python
|
apache-2.0
| 139
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""CTC (Connectionist Temporal Classification) Operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_ctc_ops
from tensorflow.python.ops.nn_grad import _BroadcastMul
from tensorflow.python.util.tf_export import tf_export
# pylint: disable=protected-access, invalid-name
@tf_export("nn.ctc_loss")
def ctc_loss(labels, inputs, sequence_length,
preprocess_collapse_repeated=False,
ctc_merge_repeated=True,
ignore_longer_outputs_than_inputs=False, time_major=True):
"""Computes the CTC (Connectionist Temporal Classification) Loss.
This op implements the CTC loss as presented in the article:
[A. Graves, S. Fernandez, F. Gomez, J. Schmidhuber.
Connectionist Temporal Classification: Labeling Unsegmented Sequence Data
with Recurrent Neural Networks. ICML 2006, Pittsburgh, USA,
pp. 369-376.](http://www.cs.toronto.edu/~graves/icml_2006.pdf)
Input requirements:
```
sequence_length(b) <= time for all b
max(labels.indices(labels.indices[:, 1] == b, 2))
<= sequence_length(b) for all b.
```
Notes:
This class performs the softmax operation for you, so inputs should
be e.g. linear projections of outputs by an LSTM.
The `inputs` Tensor's innermost dimension size, `num_classes`, represents
`num_labels + 1` classes, where num_labels is the number of true labels, and
the largest value `(num_classes - 1)` is reserved for the blank label.
For example, for a vocabulary containing 3 labels `[a, b, c]`,
`num_classes = 4` and the labels indexing is `{a: 0, b: 1, c: 2, blank: 3}`.
Regarding the arguments `preprocess_collapse_repeated` and
`ctc_merge_repeated`:
If `preprocess_collapse_repeated` is True, then a preprocessing step runs
before loss calculation, wherein repeated labels passed to the loss
are merged into single labels. This is useful if the training labels come
from, e.g., forced alignments and therefore have unnecessary repetitions.
If `ctc_merge_repeated` is set False, then deep within the CTC calculation,
repeated non-blank labels will not be merged and are interpreted
as individual labels. This is a simplified (non-standard) version of CTC.
Here is a table of the (roughly) expected first order behavior:
* `preprocess_collapse_repeated=False`, `ctc_merge_repeated=True`
Classical CTC behavior: Outputs true repeated classes with blanks in
between, and can also output repeated classes with no blanks in
between that need to be collapsed by the decoder.
* `preprocess_collapse_repeated=True`, `ctc_merge_repeated=False`
Never learns to output repeated classes, as they are collapsed
in the input labels before training.
* `preprocess_collapse_repeated=False`, `ctc_merge_repeated=False`
Outputs repeated classes with blanks in between, but generally does not
require the decoder to collapse/merge repeated classes.
* `preprocess_collapse_repeated=True`, `ctc_merge_repeated=True`
Untested. Very likely will not learn to output repeated classes.
The `ignore_longer_outputs_than_inputs` option allows to specify the behavior
of the CTCLoss when dealing with sequences that have longer outputs than
inputs. If true, the CTCLoss will simply return zero gradient for those
items, otherwise an InvalidArgument error is returned, stopping training.
Args:
labels: An `int32` `SparseTensor`.
`labels.indices[i, :] == [b, t]` means `labels.values[i]` stores
the id for (batch b, time t).
`labels.values[i]` must take on values in `[0, num_labels)`.
See `core/ops/ctc_ops.cc` for more details.
inputs: 3-D `float` `Tensor`.
If time_major == False, this will be a `Tensor` shaped:
`[batch_size, max_time, num_classes]`.
If time_major == True (default), this will be a `Tensor` shaped:
`[max_time, batch_size, num_classes]`.
The logits.
sequence_length: 1-D `int32` vector, size `[batch_size]`.
The sequence lengths.
preprocess_collapse_repeated: Boolean. Default: False.
If True, repeated labels are collapsed prior to the CTC calculation.
ctc_merge_repeated: Boolean. Default: True.
ignore_longer_outputs_than_inputs: Boolean. Default: False.
If True, sequences with longer outputs than inputs will be ignored.
time_major: The shape format of the `inputs` Tensors.
If True, these `Tensors` must be shaped `[max_time, batch_size,
num_classes]`.
If False, these `Tensors` must be shaped `[batch_size, max_time,
num_classes]`.
Using `time_major = True` (default) is a bit more efficient because it
avoids transposes at the beginning of the ctc_loss calculation. However,
most TensorFlow data is batch-major, so by this function also accepts
inputs in batch-major form.
Returns:
A 1-D `float` `Tensor`, size `[batch]`, containing the negative log
probabilities.
Raises:
TypeError: if labels is not a `SparseTensor`.
"""
# The second, third, etc output tensors contain the gradients. We use it in
# _CTCLossGrad() below.
if not isinstance(labels, sparse_tensor.SparseTensor):
raise TypeError("Expected labels (first argument) to be a SparseTensor")
# For internal calculations, we transpose to [time, batch, num_classes]
if not time_major:
inputs = array_ops.transpose(inputs, [1, 0, 2]) # (B,T,N) => (T,B,N)
loss, _ = gen_ctc_ops.ctc_loss(
inputs,
labels.indices,
labels.values,
sequence_length,
preprocess_collapse_repeated=preprocess_collapse_repeated,
ctc_merge_repeated=ctc_merge_repeated,
ignore_longer_outputs_than_inputs=ignore_longer_outputs_than_inputs)
return loss
# pylint: disable=unused-argument
@ops.RegisterGradient("CTCLoss")
def _CTCLossGrad(op, grad_loss, _):
"""The derivative provided by CTC Loss.
Args:
op: the CTCLoss op.
grad_loss: The backprop for cost.
Returns:
The CTC Loss gradient.
"""
# Outputs are: loss, grad
#
# Currently there is no way to take the second derivative of this op
# due to the fused implementation's interaction with tf.gradients(),
# so we make sure we prevent silently incorrect results by raising
# an error if the second derivative is requested via prevent_gradient.
grad_without_gradient = array_ops.prevent_gradient(
op.outputs[1], message="Currently there is no way to take the second "
" derivative of ctc_loss due to the fused implementation's interaction "
" with tf.gradients()")
# Return gradient for inputs and None for
# labels_indices, labels_values and sequence_length
return [_BroadcastMul(grad_loss, grad_without_gradient), None, None, None]
@tf_export("nn.ctc_greedy_decoder")
def ctc_greedy_decoder(inputs, sequence_length, merge_repeated=True):
"""Performs greedy decoding on the logits given in input (best path).
Note: Regardless of the value of merge_repeated, if the maximum index of a
given time and batch corresponds to the blank index `(num_classes - 1)`, no
new element is emitted.
If `merge_repeated` is `True`, merge repeated classes in output.
This means that if consecutive logits' maximum indices are the same,
only the first of these is emitted. The sequence `A B B * B * B` (where '*'
is the blank label) becomes
* `A B B B` if `merge_repeated=True`.
* `A B B B B` if `merge_repeated=False`.
Args:
inputs: 3-D `float` `Tensor` sized
`[max_time, batch_size, num_classes]`. The logits.
sequence_length: 1-D `int32` vector containing sequence lengths,
having size `[batch_size]`.
merge_repeated: Boolean. Default: True.
Returns:
A tuple `(decoded, neg_sum_logits)` where
decoded: A single-element list. `decoded[0]`
is an `SparseTensor` containing the decoded outputs s.t.:
`decoded.indices`: Indices matrix `(total_decoded_outputs, 2)`.
The rows store: `[batch, time]`.
`decoded.values`: Values vector, size `(total_decoded_outputs)`.
The vector stores the decoded classes.
`decoded.dense_shape`: Shape vector, size `(2)`.
The shape values are: `[batch_size, max_decoded_length]`
neg_sum_logits: A `float` matrix `(batch_size x 1)` containing, for the
sequence found, the negative of the sum of the greatest logit at each
timeframe.
"""
outputs = gen_ctc_ops.ctc_greedy_decoder(
inputs, sequence_length, merge_repeated=merge_repeated)
(decoded_ix, decoded_val, decoded_shape, log_probabilities) = outputs
return ([sparse_tensor.SparseTensor(decoded_ix, decoded_val, decoded_shape)],
log_probabilities)
@tf_export(v1=["nn.ctc_beam_search_decoder"])
def ctc_beam_search_decoder(inputs, sequence_length, beam_width=100,
top_paths=1, merge_repeated=True):
"""Performs beam search decoding on the logits given in input.
**Note** The `ctc_greedy_decoder` is a special case of the
`ctc_beam_search_decoder` with `top_paths=1` and `beam_width=1` (but
that decoder is faster for this special case).
If `merge_repeated` is `True`, merge repeated classes in the output beams.
This means that if consecutive entries in a beam are the same,
only the first of these is emitted. That is, when the sequence is
`A B B * B * B` (where '*' is the blank label), the return value is:
* `A B` if `merge_repeated = True`.
* `A B B B` if `merge_repeated = False`.
Args:
inputs: 3-D `float` `Tensor`, size
`[max_time x batch_size x num_classes]`. The logits.
sequence_length: 1-D `int32` vector containing sequence lengths,
having size `[batch_size]`.
beam_width: An int scalar >= 0 (beam search beam width).
top_paths: An int scalar >= 0, <= beam_width (controls output size).
merge_repeated: Boolean. Default: True.
Returns:
A tuple `(decoded, log_probabilities)` where
decoded: A list of length top_paths, where `decoded[j]`
is a `SparseTensor` containing the decoded outputs:
`decoded[j].indices`: Indices matrix `(total_decoded_outputs[j] x 2)`
The rows store: [batch, time].
`decoded[j].values`: Values vector, size `(total_decoded_outputs[j])`.
The vector stores the decoded classes for beam j.
`decoded[j].dense_shape`: Shape vector, size `(2)`.
The shape values are: `[batch_size, max_decoded_length[j]]`.
log_probability: A `float` matrix `(batch_size x top_paths)` containing
sequence log-probabilities.
"""
decoded_ixs, decoded_vals, decoded_shapes, log_probabilities = (
gen_ctc_ops.ctc_beam_search_decoder(
inputs, sequence_length, beam_width=beam_width, top_paths=top_paths,
merge_repeated=merge_repeated))
return (
[sparse_tensor.SparseTensor(ix, val, shape) for (ix, val, shape)
in zip(decoded_ixs, decoded_vals, decoded_shapes)],
log_probabilities)
@tf_export("nn.ctc_beam_search_decoder", v1=["nn.ctc_beam_search_decoder_v2"])
def ctc_beam_search_decoder_v2(inputs, sequence_length, beam_width=100,
top_paths=1):
"""Performs beam search decoding on the logits given in input.
**Note** The `ctc_greedy_decoder` is a special case of the
`ctc_beam_search_decoder` with `top_paths=1` and `beam_width=1` (but
that decoder is faster for this special case).
Args:
inputs: 3-D `float` `Tensor`, size
`[max_time, batch_size, num_classes]`. The logits.
sequence_length: 1-D `int32` vector containing sequence lengths,
having size `[batch_size]`.
beam_width: An int scalar >= 0 (beam search beam width).
top_paths: An int scalar >= 0, <= beam_width (controls output size).
Returns:
A tuple `(decoded, log_probabilities)` where
decoded: A list of length top_paths, where `decoded[j]`
is a `SparseTensor` containing the decoded outputs:
`decoded[j].indices`: Indices matrix `[total_decoded_outputs[j], 2]`;
The rows store: `[batch, time]`.
`decoded[j].values`: Values vector, size `[total_decoded_outputs[j]]`.
The vector stores the decoded classes for beam `j`.
`decoded[j].dense_shape`: Shape vector, size `(2)`.
The shape values are: `[batch_size, max_decoded_length[j]]`.
log_probability: A `float` matrix `[batch_size, top_paths]` containing
sequence log-probabilities.
"""
# Note, merge_repeated is an invalid optimization that is removed from the
# public API: it returns low probability paths.
return ctc_beam_search_decoder(inputs, sequence_length=sequence_length,
beam_width=beam_width, top_paths=top_paths,
merge_repeated=False)
ops.NotDifferentiable("CTCGreedyDecoder")
ops.NotDifferentiable("CTCBeamSearchDecoder")
|
girving/tensorflow
|
tensorflow/python/ops/ctc_ops.py
|
Python
|
apache-2.0
| 13,730
|
"""Fake Wings component"""
from threading import Thread
import time
# Use fake GPIO
import GPIOSim.RPi.in_mem as GPIO
from tuxeatpi.components.wings import Wings
from tuxeatpi.fake_components.base import push_switch
class FakeWings(Wings):
"""Fake wings class"""
def __init__(self, pins, event_queue, logger):
self.move_wings_thread = FakeWingsMover(pins.get('position'))
Wings.__init__(self, pins, event_queue, logger)
def move_start(self):
"""Override move_start function for fake one"""
self.move_wings_thread = FakeWingsMover(self.pins.get('position'))
self.move_wings_thread.start()
try:
super(FakeWings, self).move_start()
except Exception: # pylint: disable=W0703
pass
def move_stop(self):
"""Override move_stop function for fake one"""
self.move_wings_thread.stop()
super(FakeWings, self).move_stop()
def push_wing(self, side):
"""Simulation push switch function"""
push_switch(GPIO.GPIO_TO_PIN[self.pins[side + '_switch']])
class FakeWingsMover(Thread):
"""Thread which simulate wings movement"""
# TODO make it stoppable in hug with Ctrl-C signal
def __init__(self, position_pin):
Thread.__init__(self)
self.position_pin = position_pin
def stop(self):
"""Stop moving wings"""
self.running = False
def run(self):
"""Start moving wings"""
# Get pin_id from self.pins
pin_id = GPIO.GPIO_TO_PIN[self.position_pin]
self.running = True
while self.running:
if self.running:
GPIO.set_pin_value(pin_id, 1)
time.sleep(0.1)
if self.running:
GPIO.set_pin_value(pin_id, 0)
time.sleep(0.1)
if self.running:
GPIO.set_pin_value(pin_id, 1)
time.sleep(0.1)
if self.running:
GPIO.set_pin_value(pin_id, 0)
time.sleep(0.25)
|
TuxEatPi/tuxeatpi
|
tuxeatpi/fake_components/wings.py
|
Python
|
apache-2.0
| 2,035
|
import standard_play
import play
import behavior
import robocup
import tactics.line_up
import tactics.defense
import main
class DefendPenalty(play.Play):
def __init__(self):
super().__init__(continuous=True)
self.add_transition(behavior.Behavior.State.start,
behavior.Behavior.State.running, lambda: True,
'immediately')
# lineup
line = robocup.Segment(
robocup.Point(1.5, 1.3), robocup.Point(1.5, 2.5))
lineup = tactics.line_up.LineUp(line)
self.add_subbehavior(lineup, 'lineup')
@classmethod
def score(cls):
gs = main.game_state()
return 0 if gs.is_their_penalty() and gs.is_setup_state(
) and not gs.is_penalty_shootout() else float("inf")
@classmethod
def is_restart(cls):
return True
|
JNeiger/robocup-software
|
soccer/gameplay/plays/restarts/defend_penalty.py
|
Python
|
apache-2.0
| 863
|
import numpy as np
from typing import Any, List, Tuple
from ray.rllib.models.torch.misc import Reshape
from ray.rllib.models.torch.torch_modelv2 import TorchModelV2
from ray.rllib.utils.framework import try_import_torch
from ray.rllib.utils.framework import TensorType
torch, nn = try_import_torch()
if torch:
from torch import distributions as td
from ray.rllib.agents.dreamer.utils import Linear, Conv2d, \
ConvTranspose2d, GRUCell, TanhBijector
ActFunc = Any
# Encoder, part of PlaNET
class ConvEncoder(nn.Module):
"""Standard Convolutional Encoder for Dreamer. This encoder is used
to encode images frm an enviornment into a latent state for the
RSSM model in PlaNET.
"""
def __init__(self,
depth: int = 32,
act: ActFunc = None,
shape: Tuple[int] = (3, 64, 64)):
"""Initializes Conv Encoder
Args:
depth (int): Number of channels in the first conv layer
act (Any): Activation for Encoder, default ReLU
shape (List): Shape of observation input
"""
super().__init__()
self.act = act
if not act:
self.act = nn.ReLU
self.depth = depth
self.shape = shape
init_channels = self.shape[0]
self.layers = [
Conv2d(init_channels, self.depth, 4, stride=2),
self.act(),
Conv2d(self.depth, 2 * self.depth, 4, stride=2),
self.act(),
Conv2d(2 * self.depth, 4 * self.depth, 4, stride=2),
self.act(),
Conv2d(4 * self.depth, 8 * self.depth, 4, stride=2),
self.act(),
]
self.model = nn.Sequential(*self.layers)
def forward(self, x):
# Flatten to [batch*horizon, 3, 64, 64] in loss function
orig_shape = list(x.size())
x = x.view(-1, *(orig_shape[-3:]))
x = self.model(x)
new_shape = orig_shape[:-3] + [32 * self.depth]
x = x.view(*new_shape)
return x
# Decoder, part of PlaNET
class ConvDecoder(nn.Module):
"""Standard Convolutional Decoder for Dreamer.
This decoder is used to decode images from the latent state generated
by the transition dynamics model. This is used in calculating loss and
logging gifs for imagined trajectories.
"""
def __init__(self,
input_size: int,
depth: int = 32,
act: ActFunc = None,
shape: Tuple[int] = (3, 64, 64)):
"""Initializes a ConvDecoder instance.
Args:
input_size (int): Input size, usually feature size output from
RSSM.
depth (int): Number of channels in the first conv layer
act (Any): Activation for Encoder, default ReLU
shape (List): Shape of observation input
"""
super().__init__()
self.act = act
if not act:
self.act = nn.ReLU
self.depth = depth
self.shape = shape
self.layers = [
Linear(input_size, 32 * self.depth),
Reshape([-1, 32 * self.depth, 1, 1]),
ConvTranspose2d(32 * self.depth, 4 * self.depth, 5, stride=2),
self.act(),
ConvTranspose2d(4 * self.depth, 2 * self.depth, 5, stride=2),
self.act(),
ConvTranspose2d(2 * self.depth, self.depth, 6, stride=2),
self.act(),
ConvTranspose2d(self.depth, self.shape[0], 6, stride=2),
]
self.model = nn.Sequential(*self.layers)
def forward(self, x):
# x is [batch, hor_length, input_size]
orig_shape = list(x.size())
x = self.model(x)
reshape_size = orig_shape[:-1] + self.shape
mean = x.view(*reshape_size)
# Equivalent to making a multivariate diag
return td.Independent(td.Normal(mean, 1), len(self.shape))
# Reward Model (PlaNET), and Value Function
class DenseDecoder(nn.Module):
"""FC network that outputs a distribution for calculating log_prob.
Used later in DreamerLoss.
"""
def __init__(self,
input_size: int,
output_size: int,
layers: int,
units: int,
dist: str = "normal",
act: ActFunc = None):
"""Initializes FC network
Args:
input_size (int): Input size to network
output_size (int): Output size to network
layers (int): Number of layers in network
units (int): Size of the hidden layers
dist (str): Output distribution, parameterized by FC output
logits.
act (Any): Activation function
"""
super().__init__()
self.layrs = layers
self.units = units
self.act = act
if not act:
self.act = nn.ELU
self.dist = dist
self.input_size = input_size
self.output_size = output_size
self.layers = []
cur_size = input_size
for _ in range(self.layrs):
self.layers.extend([Linear(cur_size, self.units), self.act()])
cur_size = units
self.layers.append(Linear(cur_size, output_size))
self.model = nn.Sequential(*self.layers)
def forward(self, x):
x = self.model(x)
if self.output_size == 1:
x = torch.squeeze(x)
if self.dist == "normal":
output_dist = td.Normal(x, 1)
elif self.dist == "binary":
output_dist = td.Bernoulli(logits=x)
else:
raise NotImplementedError("Distribution type not implemented!")
return td.Independent(output_dist, 0)
# Represents dreamer policy
class ActionDecoder(nn.Module):
"""ActionDecoder is the policy module in Dreamer.
It outputs a distribution parameterized by mean and std, later to be
transformed by a custom TanhBijector in utils.py for Dreamer.
"""
def __init__(self,
input_size: int,
action_size: int,
layers: int,
units: int,
dist: str = "tanh_normal",
act: ActFunc = None,
min_std: float = 1e-4,
init_std: float = 5.0,
mean_scale: float = 5.0):
"""Initializes Policy
Args:
input_size (int): Input size to network
action_size (int): Action space size
layers (int): Number of layers in network
units (int): Size of the hidden layers
dist (str): Output distribution, with tanh_normal implemented
act (Any): Activation function
min_std (float): Minimum std for output distribution
init_std (float): Intitial std
mean_scale (float): Augmenting mean output from FC network
"""
super().__init__()
self.layrs = layers
self.units = units
self.dist = dist
self.act = act
if not act:
self.act = nn.ReLU
self.min_std = min_std
self.init_std = init_std
self.mean_scale = mean_scale
self.action_size = action_size
self.layers = []
self.softplus = nn.Softplus()
# MLP Construction
cur_size = input_size
for _ in range(self.layrs):
self.layers.extend([Linear(cur_size, self.units), self.act()])
cur_size = self.units
if self.dist == "tanh_normal":
self.layers.append(Linear(cur_size, 2 * action_size))
elif self.dist == "onehot":
self.layers.append(Linear(cur_size, action_size))
self.model = nn.Sequential(*self.layers)
# Returns distribution
def forward(self, x):
raw_init_std = np.log(np.exp(self.init_std) - 1)
x = self.model(x)
if self.dist == "tanh_normal":
mean, std = torch.chunk(x, 2, dim=-1)
mean = self.mean_scale * torch.tanh(mean / self.mean_scale)
std = self.softplus(std + raw_init_std) + self.min_std
dist = td.Normal(mean, std)
transforms = [TanhBijector()]
dist = td.transformed_distribution.TransformedDistribution(
dist, transforms)
dist = td.Independent(dist, 1)
elif self.dist == "onehot":
dist = td.OneHotCategorical(logits=x)
raise NotImplementedError("Atari not implemented yet!")
return dist
# Represents TD model in PlaNET
class RSSM(nn.Module):
"""RSSM is the core recurrent part of the PlaNET module. It consists of
two networks, one (obs) to calculate posterior beliefs and states and
the second (img) to calculate prior beliefs and states. The prior network
takes in the previous state and action, while the posterior network takes
in the previous state, action, and a latent embedding of the most recent
observation.
"""
def __init__(self,
action_size: int,
embed_size: int,
stoch: int = 30,
deter: int = 200,
hidden: int = 200,
act: ActFunc = None):
"""Initializes RSSM
Args:
action_size (int): Action space size
embed_size (int): Size of ConvEncoder embedding
stoch (int): Size of the distributional hidden state
deter (int): Size of the deterministic hidden state
hidden (int): General size of hidden layers
act (Any): Activation function
"""
super().__init__()
self.stoch_size = stoch
self.deter_size = deter
self.hidden_size = hidden
self.act = act
if act is None:
self.act = nn.ELU
self.obs1 = Linear(embed_size + deter, hidden)
self.obs2 = Linear(hidden, 2 * stoch)
self.cell = GRUCell(self.hidden_size, hidden_size=self.deter_size)
self.img1 = Linear(stoch + action_size, hidden)
self.img2 = Linear(deter, hidden)
self.img3 = Linear(hidden, 2 * stoch)
self.softplus = nn.Softplus
self.device = (torch.device("cuda")
if torch.cuda.is_available() else torch.device("cpu"))
def get_initial_state(self, batch_size: int) -> List[TensorType]:
"""Returns the inital state for the RSSM, which consists of mean,
std for the stochastic state, the sampled stochastic hidden state
(from mean, std), and the deterministic hidden state, which is
pushed through the GRUCell.
Args:
batch_size (int): Batch size for initial state
Returns:
List of tensors
"""
return [
torch.zeros(batch_size, self.stoch_size).to(self.device),
torch.zeros(batch_size, self.stoch_size).to(self.device),
torch.zeros(batch_size, self.stoch_size).to(self.device),
torch.zeros(batch_size, self.deter_size).to(self.device),
]
def observe(self,
embed: TensorType,
action: TensorType,
state: List[TensorType] = None
) -> Tuple[List[TensorType], List[TensorType]]:
"""Returns the corresponding states from the embedding from ConvEncoder
and actions. This is accomplished by rolling out the RNN from the
starting state through eacn index of embed and action, saving all
intermediate states between.
Args:
embed (TensorType): ConvEncoder embedding
action (TensorType): Actions
state (List[TensorType]): Initial state before rollout
Returns:
Posterior states and prior states (both List[TensorType])
"""
if state is None:
state = self.get_initial_state(action.size()[0])
embed = embed.permute(1, 0, 2)
action = action.permute(1, 0, 2)
priors = [[] for i in range(len(state))]
posts = [[] for i in range(len(state))]
last = (state, state)
for index in range(len(action)):
# Tuple of post and prior
last = self.obs_step(last[0], action[index], embed[index])
[o.append(s) for s, o in zip(last[0], posts)]
[o.append(s) for s, o in zip(last[1], priors)]
prior = [torch.stack(x, dim=0) for x in priors]
post = [torch.stack(x, dim=0) for x in posts]
prior = [e.permute(1, 0, 2) for e in prior]
post = [e.permute(1, 0, 2) for e in post]
return post, prior
def imagine(self, action: TensorType,
state: List[TensorType] = None) -> List[TensorType]:
"""Imagines the trajectory starting from state through a list of actions.
Similar to observe(), requires rolling out the RNN for each timestep.
Args:
action (TensorType): Actions
state (List[TensorType]): Starting state before rollout
Returns:
Prior states
"""
if state is None:
state = self.get_initial_state(action.size()[0])
action = action.permute(1, 0, 2)
indices = range(len(action))
priors = [[] for _ in range(len(state))]
last = state
for index in indices:
last = self.img_step(last, action[index])
[o.append(s) for s, o in zip(last, priors)]
prior = [torch.stack(x, dim=0) for x in priors]
prior = [e.permute(1, 0, 2) for e in prior]
return prior
def obs_step(
self, prev_state: TensorType, prev_action: TensorType,
embed: TensorType) -> Tuple[List[TensorType], List[TensorType]]:
"""Runs through the posterior model and returns the posterior state
Args:
prev_state (TensorType): The previous state
prev_action (TensorType): The previous action
embed (TensorType): Embedding from ConvEncoder
Returns:
Post and Prior state
"""
prior = self.img_step(prev_state, prev_action)
x = torch.cat([prior[3], embed], dim=-1)
x = self.obs1(x)
x = self.act()(x)
x = self.obs2(x)
mean, std = torch.chunk(x, 2, dim=-1)
std = self.softplus()(std) + 0.1
stoch = self.get_dist(mean, std).rsample()
post = [mean, std, stoch, prior[3]]
return post, prior
def img_step(self, prev_state: TensorType,
prev_action: TensorType) -> List[TensorType]:
"""Runs through the prior model and returns the prior state
Args:
prev_state (TensorType): The previous state
prev_action (TensorType): The previous action
Returns:
Prior state
"""
x = torch.cat([prev_state[2], prev_action], dim=-1)
x = self.img1(x)
x = self.act()(x)
deter = self.cell(x, prev_state[3])
x = deter
x = self.img2(x)
x = self.act()(x)
x = self.img3(x)
mean, std = torch.chunk(x, 2, dim=-1)
std = self.softplus()(std) + 0.1
stoch = self.get_dist(mean, std).rsample()
return [mean, std, stoch, deter]
def get_feature(self, state: List[TensorType]) -> TensorType:
# Constructs feature for input to reward, decoder, actor, critic
return torch.cat([state[2], state[3]], dim=-1)
def get_dist(self, mean: TensorType, std: TensorType) -> TensorType:
return td.Normal(mean, std)
# Represents all models in Dreamer, unifies them all into a single interface
class DreamerModel(TorchModelV2, nn.Module):
def __init__(self, obs_space, action_space, num_outputs, model_config,
name):
super().__init__(obs_space, action_space, num_outputs, model_config,
name)
nn.Module.__init__(self)
self.depth = model_config["depth_size"]
self.deter_size = model_config["deter_size"]
self.stoch_size = model_config["stoch_size"]
self.hidden_size = model_config["hidden_size"]
self.action_size = action_space.shape[0]
self.encoder = ConvEncoder(self.depth)
self.decoder = ConvDecoder(
self.stoch_size + self.deter_size, depth=self.depth)
self.reward = DenseDecoder(self.stoch_size + self.deter_size, 1, 2,
self.hidden_size)
self.dynamics = RSSM(
self.action_size,
32 * self.depth,
stoch=self.stoch_size,
deter=self.deter_size)
self.actor = ActionDecoder(self.stoch_size + self.deter_size,
self.action_size, 4, self.hidden_size)
self.value = DenseDecoder(self.stoch_size + self.deter_size, 1, 3,
self.hidden_size)
self.state = None
self.device = (torch.device("cuda")
if torch.cuda.is_available() else torch.device("cpu"))
def policy(self, obs: TensorType, state: List[TensorType], explore=True
) -> Tuple[TensorType, List[float], List[TensorType]]:
"""Returns the action. Runs through the encoder, recurrent model,
and policy to obtain action.
"""
if state is None:
self.initial_state()
else:
self.state = state
post = self.state[:4]
action = self.state[4]
embed = self.encoder(obs)
post, _ = self.dynamics.obs_step(post, action, embed)
feat = self.dynamics.get_feature(post)
action_dist = self.actor(feat)
if explore:
action = action_dist.sample()
else:
action = action_dist.mean
logp = action_dist.log_prob(action)
self.state = post + [action]
return action, logp, self.state
def imagine_ahead(self, state: List[TensorType],
horizon: int) -> TensorType:
"""Given a batch of states, rolls out more state of length horizon.
"""
start = []
for s in state:
s = s.contiguous().detach()
shpe = [-1] + list(s.size())[2:]
start.append(s.view(*shpe))
def next_state(state):
feature = self.dynamics.get_feature(state).detach()
action = self.actor(feature).rsample()
next_state = self.dynamics.img_step(state, action)
return next_state
last = start
outputs = [[] for i in range(len(start))]
for _ in range(horizon):
last = next_state(last)
[o.append(s) for s, o in zip(last, outputs)]
outputs = [torch.stack(x, dim=0) for x in outputs]
imag_feat = self.dynamics.get_feature(outputs)
return imag_feat
def get_initial_state(self) -> List[TensorType]:
self.state = self.dynamics.get_initial_state(1) + [
torch.zeros(1, self.action_space.shape[0]).to(self.device)
]
return self.state
def value_function(self) -> TensorType:
return None
|
pcmoritz/ray-1
|
rllib/agents/dreamer/dreamer_model.py
|
Python
|
apache-2.0
| 19,097
|
# coding=utf-8
#
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from f5.bigip.resource import UnnamedResource
from f5.sdk_exception import UnsupportedOperation
class Login_Enforcement(UnnamedResource):
"""BIG-IP® ASM Login Enforcement resource."""
def __init__(self, policy):
super(Login_Enforcement, self).__init__(policy)
self._meta_data['required_json_kind'] = 'tm:asm:policies:login-enforcement:login-enforcementstate'
self._meta_data['required_load_parameters'] = set()
self._meta_data['object_has_stats'] = False
self._meta_data['minimum_version'] = '11.6.0'
def update(self, **kwargs):
"""Update is not supported for Login Enforcement resource
:raises: UnsupportedOperation
"""
raise UnsupportedOperation(
"%s does not support the update method" % self.__class__.__name__
)
|
F5Networks/f5-common-python
|
f5/bigip/tm/asm/policies/login_enforcement.py
|
Python
|
apache-2.0
| 1,423
|
"""
============================
Base RPC Handler for Tornado
============================
This is a basic server implementation, designed for use within the
Tornado framework. The classes in this library should not be used
directly, but rather though the XML or JSON RPC implementations.
You can use the utility functions like 'private' and 'start_server'.
"""
from tornado.web import RequestHandler
import tornado.web
import tornado.ioloop
import tornado.httpserver
from tornado.concurrent import Future, TracebackFuture
from tornado import gen
from tornado.stack_context import ExceptionStackContext, run_with_stack_context
import types
import traceback
from tornadorpc_evok.utils import getcallargs
# Configuration element
class Config(object):
verbose = True
short_errors = True
config = Config()
class BaseRPCParser(object):
"""
This class is responsible for managing the request, dispatch,
and response formatting of the system. It is tied into the
_RPC_ attribute of the BaseRPCHandler (or subclasses) and
populated as necessary throughout the request. Use the
.faults attribute to take advantage of the built-in error
codes.
"""
content_type = 'text/plain'
def __init__(self, library, encode=None, decode=None):
# Attaches the RPC library and encode / decode functions.
self.library = library
if not encode:
encode = getattr(library, 'dumps')
if not decode:
decode = getattr(library, 'loads')
self.encode = encode
self.decode = decode
self.requests_in_progress = 0
self.responses = []
@property
def faults(self):
# Grabs the fault tree on request
return Faults(self)
def response(self, handler):
"""
This is the callback for a single finished dispatch.
Once all the dispatches have been run, it calls the
parser library to parse responses and then calls the
handler's async method.
"""
handler._requests -= 1
if handler._requests > 0:
return
# We are finished with requests, send response
if handler._RPC_finished:
# We've already sent the response
raise Exception("Error trying to send response twice.")
handler._RPC_finished = True
responses = tuple(handler._results)
response_text = self.parse_responses(responses)
if type(response_text) not in types.StringTypes:
# Likely a fault, or something messed up
response_text = self.encode(response_text)
# Calling the async callback
handler.on_result(response_text)
def traceback(self, method_name='REQUEST', params=[]):
err_lines = traceback.format_exc().splitlines()
err_title = "ERROR IN %s" % method_name
if len(params) > 0:
err_title = '%s - (PARAMS: %s)' % (err_title, repr(params))
err_sep = ('-'*len(err_title))[:79]
err_lines = [err_sep, err_title, err_sep]+err_lines
if config.verbose:
if len(err_lines) >= 7 and config.short_errors:
# Minimum number of lines to see what happened
# Plus title and separators
print '\n'.join(err_lines[0:4]+err_lines[-3:])
else:
print '\n'.join(err_lines)
# Log here
return
def parse_request(self, request_body):
"""
Extend this on the implementing protocol. If it
should error out, return the output of the
'self.faults.fault_name' response. Otherwise,
it MUST return a TUPLE of TUPLE. Each entry
tuple must have the following structure:
('method_name', params)
...where params is a list or dictionary of
arguments (positional or keyword, respectively.)
So, the result should look something like
the following:
( ('add', [5,4]), ('add', {'x':5, 'y':4}) )
"""
return ([], [])
def parse_responses(self, responses):
"""
Extend this on the implementing protocol. It must
return a response that can be returned as output to
the client.
"""
return self.encode(responses, methodresponse=True)
def check_method(self, attr_name, obj):
"""
Just checks to see whether an attribute is private
(by the decorator or by a leading underscore) and
returns boolean result.
"""
assert(not attr_name.startswith('_'))
attr = getattr(obj, attr_name)
assert( not getattr(attr, 'private', False))
return attr
class BaseRPCHandler(RequestHandler):
"""
This is the base handler to be subclassed by the actual
implementations and by the end user.
"""
_RPC_ = None
#_requests = 1
rpcrequests = None
_error = None
_RPC_finished = False
def prepare(self):
"""
Parse request_body, prepares self.rpcrequest
On error call finish or set self._error - to be serialized by export procedure
"""
try:
requests = self._RPC_.parse_request(self.request.body)
if not isinstance(requests, types.TupleType):
# SHOULD be the result of a fault call,
# according tothe parse_request spec below.
if isinstance(requests, basestring):
# Should be the response text of a fault
# This will break in Python 3.x
self.finish(requests)
elif hasattr(requests, 'response'):
# Fault types should have a 'response' method
self.finish(requests.response())
elif hasattr(requests, 'faultCode'):
# XML-RPC fault types need to be properly dispatched. This
# should only happen if there was an error parsing the
self._error = requests
else:
# No idea, hopefully the handler knows what it is doing.
self.finish(requests)
return
self.rpcrequests = requests
except (AttributeError,Exception):
self._RPC_.traceback()
self._error = self._RPC_.faults.parse_error()
@tornado.web.asynchronous
@gen.coroutine
def post(self):
# Dispatches request methods
# rpcrequests are prepared in self.prepare()
if self._error:
responses = (self._error,)
else:
futures = [self._dispatch(method, args) for method,args in self.rpcrequests ]
if len(futures) == 1:
response = yield futures[0]
responses = (response,)
else:
responses = yield futures
responses = tuple(responses)
response_text = self._RPC_.parse_responses(responses)
self.set_header('Content-Type', self._RPC_.content_type)
self.finish(response_text)
#self._RPC_.run(self, request_body)
@gen.coroutine
def _dispatch(self, method_name, params):
"""
This method walks the attribute tree in the method
and passes the parameters, either in positional or
keyword form, into the appropriate method on the
Handler class. Currently supports only positional
or keyword arguments, not mixed.
"""
try:
assert(not hasattr(RequestHandler, method_name))
print method_name
method = self
method_list = dir(method)
method_list.sort()
attr_tree = method_name.split('.')
for attr_name in attr_tree:
method = self._RPC_.check_method(attr_name, method)
assert(callable(method))
assert(not method_name.startswith('_'))
assert(not getattr(method, 'private', False))
except Exception,e :
raise gen.Return(self._RPC_.faults.method_not_found())
args = []
kwargs = {}
try:
if isinstance(params, dict):
# The parameters are keyword-based
kwargs = params
elif type(params) in (list, tuple):
# The parameters are positional
args = params
else:
# Bad argument formatting?
raise Exception()
# Validating call arguments
final_kwargs, extra_args = getcallargs(method, *args, **kwargs)
except Exception:
raise gen.Return(self._RPC_.faults.invalid_params())
try:
if getattr(method, 'coroutine', False):
method=tornado.gen.coroutine(method)
response = yield method(*extra_args, **final_kwargs)
else:
response = method(*extra_args, **final_kwargs)
except Exception:
self._RPC_.traceback(method_name, params)
raise gen.Return(self._RPC_.faults.internal_error())
raise gen.Return(response)
class FaultMethod(object):
"""
This is the 'dynamic' fault method so that the message can
be changed on request from the parser.faults call.
"""
def __init__(self, fault, code, message):
self.fault = fault
self.code = code
self.message = message
def __call__(self, message=None):
if message:
self.message = message
return self.fault(self.code, self.message)
class Faults(object):
"""
This holds the codes and messages for the RPC implementation.
It is attached (dynamically) to the Parser when called via the
parser.faults query, and returns a FaultMethod to be called so
that the message can be changed. If the 'dynamic' attribute is
not a key in the codes list, then it will error.
USAGE:
parser.fault.parse_error('Error parsing content.')
If no message is passed in, it will check the messages dictionary
for the same key as the codes dict. Otherwise, it just prettifies
the code 'key' from the codes dict.
"""
codes = {
'parse_error': -32700,
'method_not_found': -32601,
'invalid_request': -32600,
'invalid_params': -32602,
'internal_error': -32603
}
messages = {}
def __init__(self, parser, fault=None):
self.library = parser.library
self.fault = fault
if not self.fault:
self.fault = getattr(self.library, 'Fault')
def __getattr__(self, attr):
message = 'Error'
if attr in self.messages.keys():
message = self.messages[attr]
else:
message = ' '.join(map(str.capitalize, attr.split('_')))
fault = FaultMethod(self.fault, self.codes[attr], message)
return fault
"""
Utility Functions
"""
def private(func):
"""
Use this to make a method private.
It is intended to be used as a decorator.
If you wish to make a method tree private, just
create and set the 'private' variable to True
on the tree object itself.
"""
func.private = True
return func
#def async(func):
# """
# Use this to make a method asynchronous
# It is intended to be used as a decorator.
# Make sure you call "self.result" on any
# async method. Also, trees do not currently
# support async methods.
# """
# func.async = True
# return func
def coroutine(func):
func.coroutine = True
return func
def start_server(handlers, route=r'/', port=8080):
"""
This is just a friendly wrapper around the default
Tornado instantiation calls. It simplifies the imports
and setup calls you'd make otherwise.
USAGE:
start_server(handler_class, route=r'/', port=8181)
"""
if type(handlers) not in (types.ListType, types.TupleType):
handler = handlers
handlers = [(route, handler)]
if route != '/RPC2':
# friendly addition for /RPC2 if it's the only one
handlers.append(('/RPC2', handler))
application = tornado.web.Application(handlers)
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(port)
loop_instance = tornado.ioloop.IOLoop.instance()
""" Setting the '_server' attribute if not set """
for (route, handler) in handlers:
try:
setattr(handler, '_server', loop_instance)
except AttributeError:
handler._server = loop_instance
loop_instance.start()
return loop_instance
"""
The following is a test implementation which should work
for both the XMLRPC and the JSONRPC clients.
"""
class TestMethodTree(object):
def power(self, x, y=2):
return pow(x, y)
@private
def private(self):
# Shouldn't be called
return False
class TestRPCHandler(BaseRPCHandler):
_RPC_ = None
def add(self, x, y):
return x+y
def ping(self, x):
return x
def noargs(self):
return 'Works!'
tree = TestMethodTree()
def _private(self):
# Shouldn't be called
return False
@private
def private(self):
# Also shouldn't be called
return False
|
UniPiTechnology/evok
|
tornadorpc_evok/base.py
|
Python
|
apache-2.0
| 13,270
|
# Python
import pytest
import mock
from dateutil.parser import parse
from dateutil.relativedelta import relativedelta
from crum import impersonate
import datetime
# Django rest framework
from rest_framework.exceptions import PermissionDenied
from django.utils import timezone
# AWX
from awx.api.versioning import reverse
from awx.api.views import RelatedJobsPreventDeleteMixin, UnifiedJobDeletionMixin
from awx.main.models import (
JobTemplate,
User,
Job,
AdHocCommand,
ProjectUpdate,
)
@pytest.mark.django_db
def test_extra_credentials(get, organization_factory, job_template_factory, credential):
objs = organization_factory("org", superusers=['admin'])
jt = job_template_factory("jt", organization=objs.organization,
inventory='test_inv', project='test_proj').job_template
jt.credentials.add(credential)
jt.save()
job = jt.create_unified_job()
url = reverse('api:job_extra_credentials_list', kwargs={'version': 'v2', 'pk': job.pk})
response = get(url, user=objs.superusers.admin)
assert response.data.get('count') == 1
@pytest.mark.django_db
def test_job_relaunch_permission_denied_response(
post, get, inventory, project, credential, net_credential, machine_credential):
jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project)
jt.credentials.add(machine_credential)
jt_user = User.objects.create(username='jobtemplateuser')
jt.execute_role.members.add(jt_user)
with impersonate(jt_user):
job = jt.create_unified_job()
# User capability is shown for this
r = get(job.get_absolute_url(), jt_user, expect=200)
assert r.data['summary_fields']['user_capabilities']['start']
# Job has prompted extra_credential, launch denied w/ message
job.launch_config.credentials.add(net_credential)
r = post(reverse('api:job_relaunch', kwargs={'pk':job.pk}), {}, jt_user, expect=403)
assert 'launched with prompted fields' in r.data['detail']
assert 'do not have permission' in r.data['detail']
@pytest.mark.django_db
def test_job_relaunch_permission_denied_response_other_user(get, post, inventory, project, alice, bob):
'''
Asserts custom permission denied message corresponding to
awx/main/tests/functional/test_rbac_job.py::TestJobRelaunchAccess::test_other_user_prompts
'''
jt = JobTemplate.objects.create(
name='testjt', inventory=inventory, project=project,
ask_credential_on_launch=True,
ask_variables_on_launch=True)
jt.execute_role.members.add(alice, bob)
with impersonate(bob):
job = jt.create_unified_job(extra_vars={'job_var': 'foo2'})
# User capability is shown for this
r = get(job.get_absolute_url(), alice, expect=200)
assert r.data['summary_fields']['user_capabilities']['start']
# Job has prompted data, launch denied w/ message
r = post(reverse('api:job_relaunch', kwargs={'pk':job.pk}), {}, alice, expect=403)
assert 'Job was launched with prompts provided by another user' in r.data['detail']
@pytest.mark.django_db
def test_job_relaunch_without_creds(post, inventory, project, admin_user):
jt = JobTemplate.objects.create(
name='testjt', inventory=inventory,
project=project
)
job = jt.create_unified_job()
post(
url=reverse('api:job_relaunch', kwargs={'pk':job.pk}),
data={},
user=admin_user,
expect=201
)
@pytest.mark.django_db
@pytest.mark.parametrize("status,hosts", [
('all', 'host1,host2,host3'),
('failed', 'host3'),
])
def test_job_relaunch_on_failed_hosts(post, inventory, project, machine_credential, admin_user, status, hosts):
h1 = inventory.hosts.create(name='host1') # no-op
h2 = inventory.hosts.create(name='host2') # changed host
h3 = inventory.hosts.create(name='host3') # failed host
jt = JobTemplate.objects.create(
name='testjt', inventory=inventory,
project=project
)
jt.credentials.add(machine_credential)
job = jt.create_unified_job(_eager_fields={'status': 'failed'}, limit='host1,host2,host3')
job.job_events.create(event='playbook_on_stats')
job.job_host_summaries.create(host=h1, failed=False, ok=1, changed=0, failures=0, host_name=h1.name)
job.job_host_summaries.create(host=h2, failed=False, ok=0, changed=1, failures=0, host_name=h2.name)
job.job_host_summaries.create(host=h3, failed=False, ok=0, changed=0, failures=1, host_name=h3.name)
r = post(
url=reverse('api:job_relaunch', kwargs={'pk':job.pk}),
data={'hosts': status},
user=admin_user,
expect=201
)
assert r.data.get('limit') == hosts
@pytest.mark.django_db
def test_summary_fields_recent_jobs(job_template, admin_user, get):
jobs = []
for i in range(13):
jobs.append(Job.objects.create(
job_template=job_template,
status='failed',
created=timezone.make_aware(datetime.datetime(2017, 3, 21, 9, i)),
finished=timezone.make_aware(datetime.datetime(2017, 3, 21, 10, i))
))
r = get(
url = job_template.get_absolute_url(),
user = admin_user,
exepect = 200
)
recent_jobs = r.data['summary_fields']['recent_jobs']
assert len(recent_jobs) == 10
assert recent_jobs == [{
'id': job.id,
'status': 'failed',
'finished': job.finished,
'type': 'job'
} for job in jobs[-10:][::-1]]
@pytest.mark.django_db
def test_slice_jt_recent_jobs(slice_job_factory, admin_user, get):
workflow_job = slice_job_factory(3, spawn=True)
slice_jt = workflow_job.job_template
r = get(
url=slice_jt.get_absolute_url(),
user=admin_user,
expect=200
)
job_ids = [entry['id'] for entry in r.data['summary_fields']['recent_jobs']]
# decision is that workflow job should be shown in the related jobs
# joblets of the workflow job should NOT be shown
assert job_ids == [workflow_job.pk]
@pytest.mark.django_db
def test_block_unprocessed_events(delete, admin_user, mocker):
time_of_finish = parse("Thu Feb 28 09:10:20 2013 -0500")
job = Job.objects.create(
emitted_events=1,
status='finished',
finished=time_of_finish
)
request = mock.MagicMock()
class MockView(UnifiedJobDeletionMixin):
model = Job
def get_object(self):
return job
view = MockView()
time_of_request = time_of_finish + relativedelta(seconds=2)
with mock.patch('awx.api.views.mixin.now', lambda: time_of_request):
r = view.destroy(request)
assert r.status_code == 400
@pytest.mark.django_db
def test_block_related_unprocessed_events(mocker, organization, project, delete, admin_user):
job_template = JobTemplate.objects.create(
project=project,
playbook='helloworld.yml'
)
time_of_finish = parse("Thu Feb 23 14:17:24 2012 -0500")
Job.objects.create(
emitted_events=1,
status='finished',
finished=time_of_finish,
job_template=job_template,
project=project
)
view = RelatedJobsPreventDeleteMixin()
time_of_request = time_of_finish + relativedelta(seconds=2)
with mock.patch('awx.api.views.mixin.now', lambda: time_of_request):
with pytest.raises(PermissionDenied):
view.perform_destroy(organization)
@pytest.mark.django_db
def test_disallowed_http_update_methods(put, patch, post, inventory, project, admin_user):
jt = JobTemplate.objects.create(
name='test_disallowed_methods', inventory=inventory,
project=project
)
job = jt.create_unified_job()
post(
url=reverse('api:job_detail', kwargs={'pk': job.pk, 'version': 'v2'}),
data={},
user=admin_user,
expect=405
)
put(
url=reverse('api:job_detail', kwargs={'pk': job.pk, 'version': 'v2'}),
data={},
user=admin_user,
expect=405
)
patch(
url=reverse('api:job_detail', kwargs={'pk': job.pk, 'version': 'v2'}),
data={},
user=admin_user,
expect=405
)
class TestControllerNode():
@pytest.fixture
def project_update(self, project):
return ProjectUpdate.objects.create(project=project)
@pytest.fixture
def job(self):
return JobTemplate.objects.create().create_unified_job()
@pytest.fixture
def adhoc(self, inventory):
return AdHocCommand.objects.create(inventory=inventory)
@pytest.mark.django_db
def test_field_controller_node_exists(self, sqlite_copy_expert,
admin_user, job, project_update,
inventory_update, adhoc, get, system_job_factory):
system_job = system_job_factory()
r = get(reverse('api:unified_job_list') + '?id={}'.format(job.id), admin_user, expect=200)
assert 'controller_node' in r.data['results'][0]
r = get(job.get_absolute_url(), admin_user, expect=200)
assert 'controller_node' in r.data
r = get(reverse('api:ad_hoc_command_detail', kwargs={'pk': adhoc.pk}), admin_user, expect=200)
assert 'controller_node' in r.data
r = get(reverse('api:project_update_detail', kwargs={'pk': project_update.pk}), admin_user, expect=200)
assert 'controller_node' not in r.data
r = get(reverse('api:inventory_update_detail', kwargs={'pk': inventory_update.pk}), admin_user, expect=200)
assert 'controller_node' not in r.data
r = get(reverse('api:system_job_detail', kwargs={'pk': system_job.pk}), admin_user, expect=200)
assert 'controller_node' not in r.data
|
wwitzel3/awx
|
awx/main/tests/functional/api/test_job.py
|
Python
|
apache-2.0
| 9,725
|
from flask import Flask
app = Flask('keyhub')
|
ttycl/keyhub
|
keyhub/wsgi.py
|
Python
|
apache-2.0
| 46
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from collections import defaultdict
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.backend.jvm.tasks.classpath_products import ClasspathProducts
from pants.backend.jvm.tasks.jvm_dependency_usage import JvmDependencyUsage
from pants.goal.products import MultipleRootedProducts
from pants.util.dirutil import safe_mkdir, touch
from pants_test.tasks.task_test_base import TaskTestBase
class TestJvmDependencyUsage(TaskTestBase):
@classmethod
def task_type(cls):
return JvmDependencyUsage
def _setup(self, target_classfiles):
"""Takes a dict mapping targets to lists of classfiles."""
context = self.context(target_roots=target_classfiles.keys())
# Create classfiles in a target-specific directory, and add it to the classpath for the target.
classpath_products = context.products.get_data('runtime_classpath', ClasspathProducts)
for target, classfiles in target_classfiles.items():
target_dir = os.path.join(self.test_workdir, target.id)
safe_mkdir(target_dir)
for classfile in classfiles:
touch(os.path.join(target_dir, classfile))
classpath_products.add_for_target(target, [('default', target_dir)])
product_deps_by_src = context.products.get_data('product_deps_by_src', dict)
return self.create_task(context), product_deps_by_src
def make_java_target(self, *args, **kwargs):
assert 'target_type' not in kwargs
return self.make_target(target_type=JavaLibrary, *args, **kwargs)
def _cover_output(self, graph):
# coverage of the output code
self.assertNotEqual(graph.to_json(), "")
self.assertNotEqual(graph.to_summary(), "")
def test_simple_dep_usage_graph(self):
t1 = self.make_java_target(spec=':t1', sources=['a.java', 'b.java'])
t2 = self.make_java_target(spec=':t2', sources=['c.java'], dependencies=[t1])
t3 = self.make_java_target(spec=':t3', sources=['d.java', 'e.java'], dependencies=[t1])
self.set_options(size_estimator='filecount')
dep_usage, product_deps_by_src = self._setup({
t1: ['a.class', 'b.class'],
t2: ['c.class'],
t3: ['d.class', 'e.class'],
})
product_deps_by_src[t1] = {}
product_deps_by_src[t2] = {'c.java': ['a.class']}
product_deps_by_src[t3] = {'d.java': ['a.class', 'b.class'],
'e.java': ['a.class', 'b.class']}
graph = dep_usage.create_dep_usage_graph([t1, t2, t3], '')
self.assertEqual(graph._nodes[t1].products_total, 2)
self.assertEqual(graph._nodes[t2].products_total, 1)
self.assertEqual(graph._nodes[t3].products_total, 2)
self.assertEqual(graph._nodes[t1].dep_edges, {})
self.assertEqual(len(graph._nodes[t2].dep_edges[t1].products_used), 1)
self.assertEqual(len(graph._nodes[t3].dep_edges[t1].products_used), 2)
self.assertEqual(graph._trans_cost(t1), 2)
self.assertEqual(graph._trans_cost(t2), 3)
self.assertEqual(graph._trans_cost(t3), 4)
self._cover_output(graph)
def test_dep_usage_graph_with_synthetic_targets(self):
t1 = self.make_java_target(spec=':t1', sources=['t1.thrift'])
t1_x = self.make_java_target(spec=':t1.x', derived_from=t1)
t1_y = self.make_java_target(spec=':t1.y', derived_from=t1)
t1_z = self.make_java_target(spec=':t1.z', derived_from=t1)
t2 = self.make_java_target(spec=':t2',
sources=['a.java', 'b.java'],
dependencies=[t1, t1_x, t1_y, t1_z])
self.set_options(size_estimator='nosize')
dep_usage, product_deps_by_src = self._setup({
t1_x: ['x1.class'],
t1_y: ['y1.class'],
t1_z: ['z1.class', 'z2.class', 'z3.class'],
t2: ['a.class', 'b.class'],
})
product_deps_by_src[t1] = {}
product_deps_by_src[t1_x] = {}
product_deps_by_src[t1_y] = {}
product_deps_by_src[t1_z] = {}
product_deps_by_src[t2] = {'a.java': ['x1.class'],
'b.java': ['z1.class', 'z2.class']}
graph = dep_usage.create_dep_usage_graph([t1, t1_x, t1_y, t1_z, t2], '')
self.assertEqual(graph._nodes[t1].products_total, 5)
self.assertEqual(len(graph._nodes[t2].dep_edges[t1].products_used), 3)
self._cover_output(graph)
|
slyphon/pants
|
tests/python/pants_test/backend/jvm/tasks/test_jvm_dependency_usage.py
|
Python
|
apache-2.0
| 4,527
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ListPolicyTags
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-datacatalog
# [START datacatalog_v1_generated_PolicyTagManager_ListPolicyTags_sync]
from google.cloud import datacatalog_v1
def sample_list_policy_tags():
# Create a client
client = datacatalog_v1.PolicyTagManagerClient()
# Initialize request argument(s)
request = datacatalog_v1.ListPolicyTagsRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_policy_tags(request=request)
# Handle the response
for response in page_result:
print(response)
# [END datacatalog_v1_generated_PolicyTagManager_ListPolicyTags_sync]
|
googleapis/python-datacatalog
|
samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_policy_tags_sync.py
|
Python
|
apache-2.0
| 1,525
|
""" Cisco_IOS_XR_tunnel_l2tun_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR tunnel\-l2tun package operational data.
This module contains definitions
for the following management objects\:
l2tp\: L2TP operational data
l2tpv2\: l2tpv2
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class DigestHashEnum(Enum):
"""
DigestHashEnum
Digest hash types
.. data:: md5 = 0
MD5
.. data:: sha1 = 1
SHA1
"""
md5 = 0
sha1 = 1
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['DigestHashEnum']
class L2Tp(object):
"""
L2TP operational data
.. attribute:: classes
List of L2TP class names
**type**\: :py:class:`Classes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Classes>`
.. attribute:: counter_hist_fail
Failure events leading to disconnection
**type**\: :py:class:`CounterHistFail <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.CounterHistFail>`
.. attribute:: counters
L2TP control messages counters
**type**\: :py:class:`Counters <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters>`
.. attribute:: session
L2TP control messages counters
**type**\: :py:class:`Session <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Session>`
.. attribute:: sessions
List of session IDs
**type**\: :py:class:`Sessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Sessions>`
.. attribute:: tunnel_configurations
List of tunnel IDs
**type**\: :py:class:`TunnelConfigurations <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.TunnelConfigurations>`
.. attribute:: tunnels
List of tunnel IDs
**type**\: :py:class:`Tunnels <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Tunnels>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.classes = L2Tp.Classes()
self.classes.parent = self
self.counter_hist_fail = L2Tp.CounterHistFail()
self.counter_hist_fail.parent = self
self.counters = L2Tp.Counters()
self.counters.parent = self
self.session = L2Tp.Session()
self.session.parent = self
self.sessions = L2Tp.Sessions()
self.sessions.parent = self
self.tunnel_configurations = L2Tp.TunnelConfigurations()
self.tunnel_configurations.parent = self
self.tunnels = L2Tp.Tunnels()
self.tunnels.parent = self
class Counters(object):
"""
L2TP control messages counters
.. attribute:: control
L2TP control messages counters
**type**\: :py:class:`Control <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.control = L2Tp.Counters.Control()
self.control.parent = self
class Control(object):
"""
L2TP control messages counters
.. attribute:: tunnel_xr
L2TP control tunnel messages counters
**type**\: :py:class:`TunnelXr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.TunnelXr>`
.. attribute:: tunnels
Table of tunnel IDs of control message counters
**type**\: :py:class:`Tunnels <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.Tunnels>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.tunnel_xr = L2Tp.Counters.Control.TunnelXr()
self.tunnel_xr.parent = self
self.tunnels = L2Tp.Counters.Control.Tunnels()
self.tunnels.parent = self
class TunnelXr(object):
"""
L2TP control tunnel messages counters
.. attribute:: authentication
Tunnel authentication counters
**type**\: :py:class:`Authentication <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.TunnelXr.Authentication>`
.. attribute:: global_
Tunnel counters
**type**\: :py:class:`Global_ <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.TunnelXr.Global_>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.authentication = L2Tp.Counters.Control.TunnelXr.Authentication()
self.authentication.parent = self
self.global_ = L2Tp.Counters.Control.TunnelXr.Global_()
self.global_.parent = self
class Authentication(object):
"""
Tunnel authentication counters
.. attribute:: challenge_avp
Challenge AVP statistics
**type**\: :py:class:`ChallengeAvp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.TunnelXr.Authentication.ChallengeAvp>`
.. attribute:: challenge_reponse
Challenge response statistics
**type**\: :py:class:`ChallengeReponse <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.TunnelXr.Authentication.ChallengeReponse>`
.. attribute:: common_digest
Common digest statistics
**type**\: :py:class:`CommonDigest <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.TunnelXr.Authentication.CommonDigest>`
.. attribute:: integrity_check
Integrity check statistics
**type**\: :py:class:`IntegrityCheck <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.TunnelXr.Authentication.IntegrityCheck>`
.. attribute:: local_secret
Local secret statistics
**type**\: :py:class:`LocalSecret <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.TunnelXr.Authentication.LocalSecret>`
.. attribute:: nonce_avp
Nonce AVP statistics
**type**\: :py:class:`NonceAvp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.TunnelXr.Authentication.NonceAvp>`
.. attribute:: overall_statistics
Overall statistics
**type**\: :py:class:`OverallStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.TunnelXr.Authentication.OverallStatistics>`
.. attribute:: primary_digest
Primary digest statistics
**type**\: :py:class:`PrimaryDigest <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.TunnelXr.Authentication.PrimaryDigest>`
.. attribute:: secondary_digest
Secondary digest statistics
**type**\: :py:class:`SecondaryDigest <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.TunnelXr.Authentication.SecondaryDigest>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.challenge_avp = L2Tp.Counters.Control.TunnelXr.Authentication.ChallengeAvp()
self.challenge_avp.parent = self
self.challenge_reponse = L2Tp.Counters.Control.TunnelXr.Authentication.ChallengeReponse()
self.challenge_reponse.parent = self
self.common_digest = L2Tp.Counters.Control.TunnelXr.Authentication.CommonDigest()
self.common_digest.parent = self
self.integrity_check = L2Tp.Counters.Control.TunnelXr.Authentication.IntegrityCheck()
self.integrity_check.parent = self
self.local_secret = L2Tp.Counters.Control.TunnelXr.Authentication.LocalSecret()
self.local_secret.parent = self
self.nonce_avp = L2Tp.Counters.Control.TunnelXr.Authentication.NonceAvp()
self.nonce_avp.parent = self
self.overall_statistics = L2Tp.Counters.Control.TunnelXr.Authentication.OverallStatistics()
self.overall_statistics.parent = self
self.primary_digest = L2Tp.Counters.Control.TunnelXr.Authentication.PrimaryDigest()
self.primary_digest.parent = self
self.secondary_digest = L2Tp.Counters.Control.TunnelXr.Authentication.SecondaryDigest()
self.secondary_digest.parent = self
class NonceAvp(object):
"""
Nonce AVP statistics
.. attribute:: bad_hash
Bad hash
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_length
Bad length
**type**\: int
**range:** 0..4294967295
.. attribute:: failed
Failed
**type**\: int
**range:** 0..4294967295
.. attribute:: generate_response_failures
Generate response fail
**type**\: int
**range:** 0..4294967295
.. attribute:: ignored
Ignored
**type**\: int
**range:** 0..4294967295
.. attribute:: missing
Missing
**type**\: int
**range:** 0..4294967295
.. attribute:: passed
Passed
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped
Skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected
Unexpected
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_zlb
Unexpected ZLB
**type**\: int
**range:** 0..4294967295
.. attribute:: validate
Validate
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bad_hash = None
self.bad_length = None
self.failed = None
self.generate_response_failures = None
self.ignored = None
self.missing = None
self.passed = None
self.skipped = None
self.unexpected = None
self.unexpected_zlb = None
self.validate = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication/Cisco-IOS-XR-tunnel-l2tun-oper:nonce-avp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bad_hash is not None:
return True
if self.bad_length is not None:
return True
if self.failed is not None:
return True
if self.generate_response_failures is not None:
return True
if self.ignored is not None:
return True
if self.missing is not None:
return True
if self.passed is not None:
return True
if self.skipped is not None:
return True
if self.unexpected is not None:
return True
if self.unexpected_zlb is not None:
return True
if self.validate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.TunnelXr.Authentication.NonceAvp']['meta_info']
class CommonDigest(object):
"""
Common digest statistics
.. attribute:: bad_hash
Bad hash
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_length
Bad length
**type**\: int
**range:** 0..4294967295
.. attribute:: failed
Failed
**type**\: int
**range:** 0..4294967295
.. attribute:: generate_response_failures
Generate response fail
**type**\: int
**range:** 0..4294967295
.. attribute:: ignored
Ignored
**type**\: int
**range:** 0..4294967295
.. attribute:: missing
Missing
**type**\: int
**range:** 0..4294967295
.. attribute:: passed
Passed
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped
Skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected
Unexpected
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_zlb
Unexpected ZLB
**type**\: int
**range:** 0..4294967295
.. attribute:: validate
Validate
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bad_hash = None
self.bad_length = None
self.failed = None
self.generate_response_failures = None
self.ignored = None
self.missing = None
self.passed = None
self.skipped = None
self.unexpected = None
self.unexpected_zlb = None
self.validate = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication/Cisco-IOS-XR-tunnel-l2tun-oper:common-digest'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bad_hash is not None:
return True
if self.bad_length is not None:
return True
if self.failed is not None:
return True
if self.generate_response_failures is not None:
return True
if self.ignored is not None:
return True
if self.missing is not None:
return True
if self.passed is not None:
return True
if self.skipped is not None:
return True
if self.unexpected is not None:
return True
if self.unexpected_zlb is not None:
return True
if self.validate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.TunnelXr.Authentication.CommonDigest']['meta_info']
class PrimaryDigest(object):
"""
Primary digest statistics
.. attribute:: bad_hash
Bad hash
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_length
Bad length
**type**\: int
**range:** 0..4294967295
.. attribute:: failed
Failed
**type**\: int
**range:** 0..4294967295
.. attribute:: generate_response_failures
Generate response fail
**type**\: int
**range:** 0..4294967295
.. attribute:: ignored
Ignored
**type**\: int
**range:** 0..4294967295
.. attribute:: missing
Missing
**type**\: int
**range:** 0..4294967295
.. attribute:: passed
Passed
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped
Skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected
Unexpected
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_zlb
Unexpected ZLB
**type**\: int
**range:** 0..4294967295
.. attribute:: validate
Validate
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bad_hash = None
self.bad_length = None
self.failed = None
self.generate_response_failures = None
self.ignored = None
self.missing = None
self.passed = None
self.skipped = None
self.unexpected = None
self.unexpected_zlb = None
self.validate = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication/Cisco-IOS-XR-tunnel-l2tun-oper:primary-digest'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bad_hash is not None:
return True
if self.bad_length is not None:
return True
if self.failed is not None:
return True
if self.generate_response_failures is not None:
return True
if self.ignored is not None:
return True
if self.missing is not None:
return True
if self.passed is not None:
return True
if self.skipped is not None:
return True
if self.unexpected is not None:
return True
if self.unexpected_zlb is not None:
return True
if self.validate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.TunnelXr.Authentication.PrimaryDigest']['meta_info']
class SecondaryDigest(object):
"""
Secondary digest statistics
.. attribute:: bad_hash
Bad hash
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_length
Bad length
**type**\: int
**range:** 0..4294967295
.. attribute:: failed
Failed
**type**\: int
**range:** 0..4294967295
.. attribute:: generate_response_failures
Generate response fail
**type**\: int
**range:** 0..4294967295
.. attribute:: ignored
Ignored
**type**\: int
**range:** 0..4294967295
.. attribute:: missing
Missing
**type**\: int
**range:** 0..4294967295
.. attribute:: passed
Passed
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped
Skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected
Unexpected
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_zlb
Unexpected ZLB
**type**\: int
**range:** 0..4294967295
.. attribute:: validate
Validate
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bad_hash = None
self.bad_length = None
self.failed = None
self.generate_response_failures = None
self.ignored = None
self.missing = None
self.passed = None
self.skipped = None
self.unexpected = None
self.unexpected_zlb = None
self.validate = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication/Cisco-IOS-XR-tunnel-l2tun-oper:secondary-digest'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bad_hash is not None:
return True
if self.bad_length is not None:
return True
if self.failed is not None:
return True
if self.generate_response_failures is not None:
return True
if self.ignored is not None:
return True
if self.missing is not None:
return True
if self.passed is not None:
return True
if self.skipped is not None:
return True
if self.unexpected is not None:
return True
if self.unexpected_zlb is not None:
return True
if self.validate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.TunnelXr.Authentication.SecondaryDigest']['meta_info']
class IntegrityCheck(object):
"""
Integrity check statistics
.. attribute:: bad_hash
Bad hash
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_length
Bad length
**type**\: int
**range:** 0..4294967295
.. attribute:: failed
Failed
**type**\: int
**range:** 0..4294967295
.. attribute:: generate_response_failures
Generate response fail
**type**\: int
**range:** 0..4294967295
.. attribute:: ignored
Ignored
**type**\: int
**range:** 0..4294967295
.. attribute:: missing
Missing
**type**\: int
**range:** 0..4294967295
.. attribute:: passed
Passed
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped
Skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected
Unexpected
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_zlb
Unexpected ZLB
**type**\: int
**range:** 0..4294967295
.. attribute:: validate
Validate
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bad_hash = None
self.bad_length = None
self.failed = None
self.generate_response_failures = None
self.ignored = None
self.missing = None
self.passed = None
self.skipped = None
self.unexpected = None
self.unexpected_zlb = None
self.validate = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication/Cisco-IOS-XR-tunnel-l2tun-oper:integrity-check'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bad_hash is not None:
return True
if self.bad_length is not None:
return True
if self.failed is not None:
return True
if self.generate_response_failures is not None:
return True
if self.ignored is not None:
return True
if self.missing is not None:
return True
if self.passed is not None:
return True
if self.skipped is not None:
return True
if self.unexpected is not None:
return True
if self.unexpected_zlb is not None:
return True
if self.validate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.TunnelXr.Authentication.IntegrityCheck']['meta_info']
class LocalSecret(object):
"""
Local secret statistics
.. attribute:: bad_hash
Bad hash
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_length
Bad length
**type**\: int
**range:** 0..4294967295
.. attribute:: failed
Failed
**type**\: int
**range:** 0..4294967295
.. attribute:: generate_response_failures
Generate response fail
**type**\: int
**range:** 0..4294967295
.. attribute:: ignored
Ignored
**type**\: int
**range:** 0..4294967295
.. attribute:: missing
Missing
**type**\: int
**range:** 0..4294967295
.. attribute:: passed
Passed
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped
Skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected
Unexpected
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_zlb
Unexpected ZLB
**type**\: int
**range:** 0..4294967295
.. attribute:: validate
Validate
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bad_hash = None
self.bad_length = None
self.failed = None
self.generate_response_failures = None
self.ignored = None
self.missing = None
self.passed = None
self.skipped = None
self.unexpected = None
self.unexpected_zlb = None
self.validate = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication/Cisco-IOS-XR-tunnel-l2tun-oper:local-secret'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bad_hash is not None:
return True
if self.bad_length is not None:
return True
if self.failed is not None:
return True
if self.generate_response_failures is not None:
return True
if self.ignored is not None:
return True
if self.missing is not None:
return True
if self.passed is not None:
return True
if self.skipped is not None:
return True
if self.unexpected is not None:
return True
if self.unexpected_zlb is not None:
return True
if self.validate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.TunnelXr.Authentication.LocalSecret']['meta_info']
class ChallengeAvp(object):
"""
Challenge AVP statistics
.. attribute:: bad_hash
Bad hash
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_length
Bad length
**type**\: int
**range:** 0..4294967295
.. attribute:: failed
Failed
**type**\: int
**range:** 0..4294967295
.. attribute:: generate_response_failures
Generate response fail
**type**\: int
**range:** 0..4294967295
.. attribute:: ignored
Ignored
**type**\: int
**range:** 0..4294967295
.. attribute:: missing
Missing
**type**\: int
**range:** 0..4294967295
.. attribute:: passed
Passed
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped
Skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected
Unexpected
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_zlb
Unexpected ZLB
**type**\: int
**range:** 0..4294967295
.. attribute:: validate
Validate
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bad_hash = None
self.bad_length = None
self.failed = None
self.generate_response_failures = None
self.ignored = None
self.missing = None
self.passed = None
self.skipped = None
self.unexpected = None
self.unexpected_zlb = None
self.validate = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication/Cisco-IOS-XR-tunnel-l2tun-oper:challenge-avp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bad_hash is not None:
return True
if self.bad_length is not None:
return True
if self.failed is not None:
return True
if self.generate_response_failures is not None:
return True
if self.ignored is not None:
return True
if self.missing is not None:
return True
if self.passed is not None:
return True
if self.skipped is not None:
return True
if self.unexpected is not None:
return True
if self.unexpected_zlb is not None:
return True
if self.validate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.TunnelXr.Authentication.ChallengeAvp']['meta_info']
class ChallengeReponse(object):
"""
Challenge response statistics
.. attribute:: bad_hash
Bad hash
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_length
Bad length
**type**\: int
**range:** 0..4294967295
.. attribute:: failed
Failed
**type**\: int
**range:** 0..4294967295
.. attribute:: generate_response_failures
Generate response fail
**type**\: int
**range:** 0..4294967295
.. attribute:: ignored
Ignored
**type**\: int
**range:** 0..4294967295
.. attribute:: missing
Missing
**type**\: int
**range:** 0..4294967295
.. attribute:: passed
Passed
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped
Skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected
Unexpected
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_zlb
Unexpected ZLB
**type**\: int
**range:** 0..4294967295
.. attribute:: validate
Validate
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bad_hash = None
self.bad_length = None
self.failed = None
self.generate_response_failures = None
self.ignored = None
self.missing = None
self.passed = None
self.skipped = None
self.unexpected = None
self.unexpected_zlb = None
self.validate = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication/Cisco-IOS-XR-tunnel-l2tun-oper:challenge-reponse'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bad_hash is not None:
return True
if self.bad_length is not None:
return True
if self.failed is not None:
return True
if self.generate_response_failures is not None:
return True
if self.ignored is not None:
return True
if self.missing is not None:
return True
if self.passed is not None:
return True
if self.skipped is not None:
return True
if self.unexpected is not None:
return True
if self.unexpected_zlb is not None:
return True
if self.validate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.TunnelXr.Authentication.ChallengeReponse']['meta_info']
class OverallStatistics(object):
"""
Overall statistics
.. attribute:: bad_hash
Bad hash
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_length
Bad length
**type**\: int
**range:** 0..4294967295
.. attribute:: failed
Failed
**type**\: int
**range:** 0..4294967295
.. attribute:: generate_response_failures
Generate response fail
**type**\: int
**range:** 0..4294967295
.. attribute:: ignored
Ignored
**type**\: int
**range:** 0..4294967295
.. attribute:: missing
Missing
**type**\: int
**range:** 0..4294967295
.. attribute:: passed
Passed
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped
Skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected
Unexpected
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_zlb
Unexpected ZLB
**type**\: int
**range:** 0..4294967295
.. attribute:: validate
Validate
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bad_hash = None
self.bad_length = None
self.failed = None
self.generate_response_failures = None
self.ignored = None
self.missing = None
self.passed = None
self.skipped = None
self.unexpected = None
self.unexpected_zlb = None
self.validate = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication/Cisco-IOS-XR-tunnel-l2tun-oper:overall-statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bad_hash is not None:
return True
if self.bad_length is not None:
return True
if self.failed is not None:
return True
if self.generate_response_failures is not None:
return True
if self.ignored is not None:
return True
if self.missing is not None:
return True
if self.passed is not None:
return True
if self.skipped is not None:
return True
if self.unexpected is not None:
return True
if self.unexpected_zlb is not None:
return True
if self.validate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.TunnelXr.Authentication.OverallStatistics']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.challenge_avp is not None and self.challenge_avp._has_data():
return True
if self.challenge_reponse is not None and self.challenge_reponse._has_data():
return True
if self.common_digest is not None and self.common_digest._has_data():
return True
if self.integrity_check is not None and self.integrity_check._has_data():
return True
if self.local_secret is not None and self.local_secret._has_data():
return True
if self.nonce_avp is not None and self.nonce_avp._has_data():
return True
if self.overall_statistics is not None and self.overall_statistics._has_data():
return True
if self.primary_digest is not None and self.primary_digest._has_data():
return True
if self.secondary_digest is not None and self.secondary_digest._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.TunnelXr.Authentication']['meta_info']
class Global_(object):
"""
Tunnel counters
.. attribute:: drop
Drop data
**type**\: :py:class:`Drop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.TunnelXr.Global_.Drop>`
.. attribute:: received
Received data
**type**\: :py:class:`Received <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.TunnelXr.Global_.Received>`
.. attribute:: retransmit
Re transmit data
**type**\: :py:class:`Retransmit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.TunnelXr.Global_.Retransmit>`
.. attribute:: total_drop
Total drop
**type**\: int
**range:** 0..4294967295
.. attribute:: total_received
Total received
**type**\: int
**range:** 0..4294967295
.. attribute:: total_retransmit
Total retransmit
**type**\: int
**range:** 0..4294967295
.. attribute:: total_transmit
Total transmit
**type**\: int
**range:** 0..4294967295
.. attribute:: transmit
Transmit data
**type**\: :py:class:`Transmit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.TunnelXr.Global_.Transmit>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.drop = L2Tp.Counters.Control.TunnelXr.Global_.Drop()
self.drop.parent = self
self.received = L2Tp.Counters.Control.TunnelXr.Global_.Received()
self.received.parent = self
self.retransmit = L2Tp.Counters.Control.TunnelXr.Global_.Retransmit()
self.retransmit.parent = self
self.total_drop = None
self.total_received = None
self.total_retransmit = None
self.total_transmit = None
self.transmit = L2Tp.Counters.Control.TunnelXr.Global_.Transmit()
self.transmit.parent = self
class Transmit(object):
"""
Transmit data
.. attribute:: acknowledgement_packets
Packets acknowledgement
**type**\: int
**range:** 0..4294967295
.. attribute:: call_disconnect_notify_packets
Call disconnect notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets
Keep alive messages
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_connected_packets
Incoming call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_replies
Incoming call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_requests
Incoming call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_connected_packets
Outgoing call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_replies
Outgoing call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_requests
Outgoing call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_replies
Service relay reply counts
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_requests
Service relay request counts
**type**\: int
**range:** 0..4294967295
.. attribute:: set_link_info_packets
Set link info packets
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_notifications
Start control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_replies
Start control connection replies
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_requests
Start control connection requests
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_control_connection_notifications
Stop control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_packets
Unknown packets
**type**\: int
**range:** 0..4294967295
.. attribute:: wan_error_notify_packets
WAN error notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: zero_length_body_packets
Zero length body packets
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acknowledgement_packets = None
self.call_disconnect_notify_packets = None
self.hello_packets = None
self.incoming_call_connected_packets = None
self.incoming_call_replies = None
self.incoming_call_requests = None
self.outgoing_call_connected_packets = None
self.outgoing_call_replies = None
self.outgoing_call_requests = None
self.service_relay_replies = None
self.service_relay_requests = None
self.set_link_info_packets = None
self.start_control_connection_notifications = None
self.start_control_connection_replies = None
self.start_control_connection_requests = None
self.stop_control_connection_notifications = None
self.unknown_packets = None
self.wan_error_notify_packets = None
self.zero_length_body_packets = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:global/Cisco-IOS-XR-tunnel-l2tun-oper:transmit'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acknowledgement_packets is not None:
return True
if self.call_disconnect_notify_packets is not None:
return True
if self.hello_packets is not None:
return True
if self.incoming_call_connected_packets is not None:
return True
if self.incoming_call_replies is not None:
return True
if self.incoming_call_requests is not None:
return True
if self.outgoing_call_connected_packets is not None:
return True
if self.outgoing_call_replies is not None:
return True
if self.outgoing_call_requests is not None:
return True
if self.service_relay_replies is not None:
return True
if self.service_relay_requests is not None:
return True
if self.set_link_info_packets is not None:
return True
if self.start_control_connection_notifications is not None:
return True
if self.start_control_connection_replies is not None:
return True
if self.start_control_connection_requests is not None:
return True
if self.stop_control_connection_notifications is not None:
return True
if self.unknown_packets is not None:
return True
if self.wan_error_notify_packets is not None:
return True
if self.zero_length_body_packets is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.TunnelXr.Global_.Transmit']['meta_info']
class Retransmit(object):
"""
Re transmit data
.. attribute:: acknowledgement_packets
Packets acknowledgement
**type**\: int
**range:** 0..4294967295
.. attribute:: call_disconnect_notify_packets
Call disconnect notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets
Keep alive messages
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_connected_packets
Incoming call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_replies
Incoming call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_requests
Incoming call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_connected_packets
Outgoing call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_replies
Outgoing call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_requests
Outgoing call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_replies
Service relay reply counts
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_requests
Service relay request counts
**type**\: int
**range:** 0..4294967295
.. attribute:: set_link_info_packets
Set link info packets
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_notifications
Start control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_replies
Start control connection replies
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_requests
Start control connection requests
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_control_connection_notifications
Stop control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_packets
Unknown packets
**type**\: int
**range:** 0..4294967295
.. attribute:: wan_error_notify_packets
WAN error notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: zero_length_body_packets
Zero length body packets
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acknowledgement_packets = None
self.call_disconnect_notify_packets = None
self.hello_packets = None
self.incoming_call_connected_packets = None
self.incoming_call_replies = None
self.incoming_call_requests = None
self.outgoing_call_connected_packets = None
self.outgoing_call_replies = None
self.outgoing_call_requests = None
self.service_relay_replies = None
self.service_relay_requests = None
self.set_link_info_packets = None
self.start_control_connection_notifications = None
self.start_control_connection_replies = None
self.start_control_connection_requests = None
self.stop_control_connection_notifications = None
self.unknown_packets = None
self.wan_error_notify_packets = None
self.zero_length_body_packets = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:global/Cisco-IOS-XR-tunnel-l2tun-oper:retransmit'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acknowledgement_packets is not None:
return True
if self.call_disconnect_notify_packets is not None:
return True
if self.hello_packets is not None:
return True
if self.incoming_call_connected_packets is not None:
return True
if self.incoming_call_replies is not None:
return True
if self.incoming_call_requests is not None:
return True
if self.outgoing_call_connected_packets is not None:
return True
if self.outgoing_call_replies is not None:
return True
if self.outgoing_call_requests is not None:
return True
if self.service_relay_replies is not None:
return True
if self.service_relay_requests is not None:
return True
if self.set_link_info_packets is not None:
return True
if self.start_control_connection_notifications is not None:
return True
if self.start_control_connection_replies is not None:
return True
if self.start_control_connection_requests is not None:
return True
if self.stop_control_connection_notifications is not None:
return True
if self.unknown_packets is not None:
return True
if self.wan_error_notify_packets is not None:
return True
if self.zero_length_body_packets is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.TunnelXr.Global_.Retransmit']['meta_info']
class Received(object):
"""
Received data
.. attribute:: acknowledgement_packets
Packets acknowledgement
**type**\: int
**range:** 0..4294967295
.. attribute:: call_disconnect_notify_packets
Call disconnect notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets
Keep alive messages
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_connected_packets
Incoming call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_replies
Incoming call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_requests
Incoming call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_connected_packets
Outgoing call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_replies
Outgoing call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_requests
Outgoing call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_replies
Service relay reply counts
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_requests
Service relay request counts
**type**\: int
**range:** 0..4294967295
.. attribute:: set_link_info_packets
Set link info packets
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_notifications
Start control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_replies
Start control connection replies
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_requests
Start control connection requests
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_control_connection_notifications
Stop control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_packets
Unknown packets
**type**\: int
**range:** 0..4294967295
.. attribute:: wan_error_notify_packets
WAN error notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: zero_length_body_packets
Zero length body packets
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acknowledgement_packets = None
self.call_disconnect_notify_packets = None
self.hello_packets = None
self.incoming_call_connected_packets = None
self.incoming_call_replies = None
self.incoming_call_requests = None
self.outgoing_call_connected_packets = None
self.outgoing_call_replies = None
self.outgoing_call_requests = None
self.service_relay_replies = None
self.service_relay_requests = None
self.set_link_info_packets = None
self.start_control_connection_notifications = None
self.start_control_connection_replies = None
self.start_control_connection_requests = None
self.stop_control_connection_notifications = None
self.unknown_packets = None
self.wan_error_notify_packets = None
self.zero_length_body_packets = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:global/Cisco-IOS-XR-tunnel-l2tun-oper:received'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acknowledgement_packets is not None:
return True
if self.call_disconnect_notify_packets is not None:
return True
if self.hello_packets is not None:
return True
if self.incoming_call_connected_packets is not None:
return True
if self.incoming_call_replies is not None:
return True
if self.incoming_call_requests is not None:
return True
if self.outgoing_call_connected_packets is not None:
return True
if self.outgoing_call_replies is not None:
return True
if self.outgoing_call_requests is not None:
return True
if self.service_relay_replies is not None:
return True
if self.service_relay_requests is not None:
return True
if self.set_link_info_packets is not None:
return True
if self.start_control_connection_notifications is not None:
return True
if self.start_control_connection_replies is not None:
return True
if self.start_control_connection_requests is not None:
return True
if self.stop_control_connection_notifications is not None:
return True
if self.unknown_packets is not None:
return True
if self.wan_error_notify_packets is not None:
return True
if self.zero_length_body_packets is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.TunnelXr.Global_.Received']['meta_info']
class Drop(object):
"""
Drop data
.. attribute:: acknowledgement_packets
Packets acknowledgement
**type**\: int
**range:** 0..4294967295
.. attribute:: call_disconnect_notify_packets
Call disconnect notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets
Keep alive messages
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_connected_packets
Incoming call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_replies
Incoming call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_requests
Incoming call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_connected_packets
Outgoing call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_replies
Outgoing call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_requests
Outgoing call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_replies
Service relay reply counts
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_requests
Service relay request counts
**type**\: int
**range:** 0..4294967295
.. attribute:: set_link_info_packets
Set link info packets
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_notifications
Start control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_replies
Start control connection replies
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_requests
Start control connection requests
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_control_connection_notifications
Stop control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_packets
Unknown packets
**type**\: int
**range:** 0..4294967295
.. attribute:: wan_error_notify_packets
WAN error notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: zero_length_body_packets
Zero length body packets
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acknowledgement_packets = None
self.call_disconnect_notify_packets = None
self.hello_packets = None
self.incoming_call_connected_packets = None
self.incoming_call_replies = None
self.incoming_call_requests = None
self.outgoing_call_connected_packets = None
self.outgoing_call_replies = None
self.outgoing_call_requests = None
self.service_relay_replies = None
self.service_relay_requests = None
self.set_link_info_packets = None
self.start_control_connection_notifications = None
self.start_control_connection_replies = None
self.start_control_connection_requests = None
self.stop_control_connection_notifications = None
self.unknown_packets = None
self.wan_error_notify_packets = None
self.zero_length_body_packets = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:global/Cisco-IOS-XR-tunnel-l2tun-oper:drop'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acknowledgement_packets is not None:
return True
if self.call_disconnect_notify_packets is not None:
return True
if self.hello_packets is not None:
return True
if self.incoming_call_connected_packets is not None:
return True
if self.incoming_call_replies is not None:
return True
if self.incoming_call_requests is not None:
return True
if self.outgoing_call_connected_packets is not None:
return True
if self.outgoing_call_replies is not None:
return True
if self.outgoing_call_requests is not None:
return True
if self.service_relay_replies is not None:
return True
if self.service_relay_requests is not None:
return True
if self.set_link_info_packets is not None:
return True
if self.start_control_connection_notifications is not None:
return True
if self.start_control_connection_replies is not None:
return True
if self.start_control_connection_requests is not None:
return True
if self.stop_control_connection_notifications is not None:
return True
if self.unknown_packets is not None:
return True
if self.wan_error_notify_packets is not None:
return True
if self.zero_length_body_packets is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.TunnelXr.Global_.Drop']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:global'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.drop is not None and self.drop._has_data():
return True
if self.received is not None and self.received._has_data():
return True
if self.retransmit is not None and self.retransmit._has_data():
return True
if self.total_drop is not None:
return True
if self.total_received is not None:
return True
if self.total_retransmit is not None:
return True
if self.total_transmit is not None:
return True
if self.transmit is not None and self.transmit._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.TunnelXr.Global_']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.authentication is not None and self.authentication._has_data():
return True
if self.global_ is not None and self.global_._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.TunnelXr']['meta_info']
class Tunnels(object):
"""
Table of tunnel IDs of control message counters
.. attribute:: tunnel
L2TP tunnel control message counters
**type**\: list of :py:class:`Tunnel <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.Tunnels.Tunnel>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.tunnel = YList()
self.tunnel.parent = self
self.tunnel.name = 'tunnel'
class Tunnel(object):
"""
L2TP tunnel control message counters
.. attribute:: tunnel_id <key>
L2TP tunnel ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: brief
L2TP control message local and remote addresses
**type**\: :py:class:`Brief <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.Tunnels.Tunnel.Brief>`
.. attribute:: global_
Global data
**type**\: :py:class:`Global_ <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.Tunnels.Tunnel.Global_>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.tunnel_id = None
self.brief = L2Tp.Counters.Control.Tunnels.Tunnel.Brief()
self.brief.parent = self
self.global_ = L2Tp.Counters.Control.Tunnels.Tunnel.Global_()
self.global_.parent = self
class Brief(object):
"""
L2TP control message local and remote addresses
.. attribute:: local_address
Local IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: remote_address
Remote IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: remote_tunnel_id
Remote tunnel ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.local_address = None
self.remote_address = None
self.remote_tunnel_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:brief'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.local_address is not None:
return True
if self.remote_address is not None:
return True
if self.remote_tunnel_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.Tunnels.Tunnel.Brief']['meta_info']
class Global_(object):
"""
Global data
.. attribute:: drop
Drop data
**type**\: :py:class:`Drop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.Tunnels.Tunnel.Global_.Drop>`
.. attribute:: received
Received data
**type**\: :py:class:`Received <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.Tunnels.Tunnel.Global_.Received>`
.. attribute:: retransmit
Re transmit data
**type**\: :py:class:`Retransmit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.Tunnels.Tunnel.Global_.Retransmit>`
.. attribute:: total_drop
Total drop
**type**\: int
**range:** 0..4294967295
.. attribute:: total_received
Total received
**type**\: int
**range:** 0..4294967295
.. attribute:: total_retransmit
Total retransmit
**type**\: int
**range:** 0..4294967295
.. attribute:: total_transmit
Total transmit
**type**\: int
**range:** 0..4294967295
.. attribute:: transmit
Transmit data
**type**\: :py:class:`Transmit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Counters.Control.Tunnels.Tunnel.Global_.Transmit>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.drop = L2Tp.Counters.Control.Tunnels.Tunnel.Global_.Drop()
self.drop.parent = self
self.received = L2Tp.Counters.Control.Tunnels.Tunnel.Global_.Received()
self.received.parent = self
self.retransmit = L2Tp.Counters.Control.Tunnels.Tunnel.Global_.Retransmit()
self.retransmit.parent = self
self.total_drop = None
self.total_received = None
self.total_retransmit = None
self.total_transmit = None
self.transmit = L2Tp.Counters.Control.Tunnels.Tunnel.Global_.Transmit()
self.transmit.parent = self
class Transmit(object):
"""
Transmit data
.. attribute:: acknowledgement_packets
Packets acknowledgement
**type**\: int
**range:** 0..4294967295
.. attribute:: call_disconnect_notify_packets
Call disconnect notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets
Keep alive messages
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_connected_packets
Incoming call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_replies
Incoming call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_requests
Incoming call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_connected_packets
Outgoing call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_replies
Outgoing call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_requests
Outgoing call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_replies
Service relay reply counts
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_requests
Service relay request counts
**type**\: int
**range:** 0..4294967295
.. attribute:: set_link_info_packets
Set link info packets
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_notifications
Start control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_replies
Start control connection replies
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_requests
Start control connection requests
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_control_connection_notifications
Stop control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_packets
Unknown packets
**type**\: int
**range:** 0..4294967295
.. attribute:: wan_error_notify_packets
WAN error notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: zero_length_body_packets
Zero length body packets
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acknowledgement_packets = None
self.call_disconnect_notify_packets = None
self.hello_packets = None
self.incoming_call_connected_packets = None
self.incoming_call_replies = None
self.incoming_call_requests = None
self.outgoing_call_connected_packets = None
self.outgoing_call_replies = None
self.outgoing_call_requests = None
self.service_relay_replies = None
self.service_relay_requests = None
self.set_link_info_packets = None
self.start_control_connection_notifications = None
self.start_control_connection_replies = None
self.start_control_connection_requests = None
self.stop_control_connection_notifications = None
self.unknown_packets = None
self.wan_error_notify_packets = None
self.zero_length_body_packets = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:transmit'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acknowledgement_packets is not None:
return True
if self.call_disconnect_notify_packets is not None:
return True
if self.hello_packets is not None:
return True
if self.incoming_call_connected_packets is not None:
return True
if self.incoming_call_replies is not None:
return True
if self.incoming_call_requests is not None:
return True
if self.outgoing_call_connected_packets is not None:
return True
if self.outgoing_call_replies is not None:
return True
if self.outgoing_call_requests is not None:
return True
if self.service_relay_replies is not None:
return True
if self.service_relay_requests is not None:
return True
if self.set_link_info_packets is not None:
return True
if self.start_control_connection_notifications is not None:
return True
if self.start_control_connection_replies is not None:
return True
if self.start_control_connection_requests is not None:
return True
if self.stop_control_connection_notifications is not None:
return True
if self.unknown_packets is not None:
return True
if self.wan_error_notify_packets is not None:
return True
if self.zero_length_body_packets is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.Tunnels.Tunnel.Global_.Transmit']['meta_info']
class Retransmit(object):
"""
Re transmit data
.. attribute:: acknowledgement_packets
Packets acknowledgement
**type**\: int
**range:** 0..4294967295
.. attribute:: call_disconnect_notify_packets
Call disconnect notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets
Keep alive messages
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_connected_packets
Incoming call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_replies
Incoming call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_requests
Incoming call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_connected_packets
Outgoing call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_replies
Outgoing call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_requests
Outgoing call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_replies
Service relay reply counts
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_requests
Service relay request counts
**type**\: int
**range:** 0..4294967295
.. attribute:: set_link_info_packets
Set link info packets
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_notifications
Start control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_replies
Start control connection replies
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_requests
Start control connection requests
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_control_connection_notifications
Stop control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_packets
Unknown packets
**type**\: int
**range:** 0..4294967295
.. attribute:: wan_error_notify_packets
WAN error notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: zero_length_body_packets
Zero length body packets
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acknowledgement_packets = None
self.call_disconnect_notify_packets = None
self.hello_packets = None
self.incoming_call_connected_packets = None
self.incoming_call_replies = None
self.incoming_call_requests = None
self.outgoing_call_connected_packets = None
self.outgoing_call_replies = None
self.outgoing_call_requests = None
self.service_relay_replies = None
self.service_relay_requests = None
self.set_link_info_packets = None
self.start_control_connection_notifications = None
self.start_control_connection_replies = None
self.start_control_connection_requests = None
self.stop_control_connection_notifications = None
self.unknown_packets = None
self.wan_error_notify_packets = None
self.zero_length_body_packets = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:retransmit'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acknowledgement_packets is not None:
return True
if self.call_disconnect_notify_packets is not None:
return True
if self.hello_packets is not None:
return True
if self.incoming_call_connected_packets is not None:
return True
if self.incoming_call_replies is not None:
return True
if self.incoming_call_requests is not None:
return True
if self.outgoing_call_connected_packets is not None:
return True
if self.outgoing_call_replies is not None:
return True
if self.outgoing_call_requests is not None:
return True
if self.service_relay_replies is not None:
return True
if self.service_relay_requests is not None:
return True
if self.set_link_info_packets is not None:
return True
if self.start_control_connection_notifications is not None:
return True
if self.start_control_connection_replies is not None:
return True
if self.start_control_connection_requests is not None:
return True
if self.stop_control_connection_notifications is not None:
return True
if self.unknown_packets is not None:
return True
if self.wan_error_notify_packets is not None:
return True
if self.zero_length_body_packets is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.Tunnels.Tunnel.Global_.Retransmit']['meta_info']
class Received(object):
"""
Received data
.. attribute:: acknowledgement_packets
Packets acknowledgement
**type**\: int
**range:** 0..4294967295
.. attribute:: call_disconnect_notify_packets
Call disconnect notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets
Keep alive messages
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_connected_packets
Incoming call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_replies
Incoming call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_requests
Incoming call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_connected_packets
Outgoing call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_replies
Outgoing call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_requests
Outgoing call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_replies
Service relay reply counts
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_requests
Service relay request counts
**type**\: int
**range:** 0..4294967295
.. attribute:: set_link_info_packets
Set link info packets
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_notifications
Start control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_replies
Start control connection replies
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_requests
Start control connection requests
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_control_connection_notifications
Stop control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_packets
Unknown packets
**type**\: int
**range:** 0..4294967295
.. attribute:: wan_error_notify_packets
WAN error notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: zero_length_body_packets
Zero length body packets
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acknowledgement_packets = None
self.call_disconnect_notify_packets = None
self.hello_packets = None
self.incoming_call_connected_packets = None
self.incoming_call_replies = None
self.incoming_call_requests = None
self.outgoing_call_connected_packets = None
self.outgoing_call_replies = None
self.outgoing_call_requests = None
self.service_relay_replies = None
self.service_relay_requests = None
self.set_link_info_packets = None
self.start_control_connection_notifications = None
self.start_control_connection_replies = None
self.start_control_connection_requests = None
self.stop_control_connection_notifications = None
self.unknown_packets = None
self.wan_error_notify_packets = None
self.zero_length_body_packets = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:received'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acknowledgement_packets is not None:
return True
if self.call_disconnect_notify_packets is not None:
return True
if self.hello_packets is not None:
return True
if self.incoming_call_connected_packets is not None:
return True
if self.incoming_call_replies is not None:
return True
if self.incoming_call_requests is not None:
return True
if self.outgoing_call_connected_packets is not None:
return True
if self.outgoing_call_replies is not None:
return True
if self.outgoing_call_requests is not None:
return True
if self.service_relay_replies is not None:
return True
if self.service_relay_requests is not None:
return True
if self.set_link_info_packets is not None:
return True
if self.start_control_connection_notifications is not None:
return True
if self.start_control_connection_replies is not None:
return True
if self.start_control_connection_requests is not None:
return True
if self.stop_control_connection_notifications is not None:
return True
if self.unknown_packets is not None:
return True
if self.wan_error_notify_packets is not None:
return True
if self.zero_length_body_packets is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.Tunnels.Tunnel.Global_.Received']['meta_info']
class Drop(object):
"""
Drop data
.. attribute:: acknowledgement_packets
Packets acknowledgement
**type**\: int
**range:** 0..4294967295
.. attribute:: call_disconnect_notify_packets
Call disconnect notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets
Keep alive messages
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_connected_packets
Incoming call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_replies
Incoming call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_requests
Incoming call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_connected_packets
Outgoing call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_replies
Outgoing call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_requests
Outgoing call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_replies
Service relay reply counts
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_requests
Service relay request counts
**type**\: int
**range:** 0..4294967295
.. attribute:: set_link_info_packets
Set link info packets
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_notifications
Start control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_replies
Start control connection replies
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_requests
Start control connection requests
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_control_connection_notifications
Stop control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_packets
Unknown packets
**type**\: int
**range:** 0..4294967295
.. attribute:: wan_error_notify_packets
WAN error notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: zero_length_body_packets
Zero length body packets
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acknowledgement_packets = None
self.call_disconnect_notify_packets = None
self.hello_packets = None
self.incoming_call_connected_packets = None
self.incoming_call_replies = None
self.incoming_call_requests = None
self.outgoing_call_connected_packets = None
self.outgoing_call_replies = None
self.outgoing_call_requests = None
self.service_relay_replies = None
self.service_relay_requests = None
self.set_link_info_packets = None
self.start_control_connection_notifications = None
self.start_control_connection_replies = None
self.start_control_connection_requests = None
self.stop_control_connection_notifications = None
self.unknown_packets = None
self.wan_error_notify_packets = None
self.zero_length_body_packets = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:drop'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acknowledgement_packets is not None:
return True
if self.call_disconnect_notify_packets is not None:
return True
if self.hello_packets is not None:
return True
if self.incoming_call_connected_packets is not None:
return True
if self.incoming_call_replies is not None:
return True
if self.incoming_call_requests is not None:
return True
if self.outgoing_call_connected_packets is not None:
return True
if self.outgoing_call_replies is not None:
return True
if self.outgoing_call_requests is not None:
return True
if self.service_relay_replies is not None:
return True
if self.service_relay_requests is not None:
return True
if self.set_link_info_packets is not None:
return True
if self.start_control_connection_notifications is not None:
return True
if self.start_control_connection_replies is not None:
return True
if self.start_control_connection_requests is not None:
return True
if self.stop_control_connection_notifications is not None:
return True
if self.unknown_packets is not None:
return True
if self.wan_error_notify_packets is not None:
return True
if self.zero_length_body_packets is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.Tunnels.Tunnel.Global_.Drop']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:global'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.drop is not None and self.drop._has_data():
return True
if self.received is not None and self.received._has_data():
return True
if self.retransmit is not None and self.retransmit._has_data():
return True
if self.total_drop is not None:
return True
if self.total_received is not None:
return True
if self.total_retransmit is not None:
return True
if self.total_transmit is not None:
return True
if self.transmit is not None and self.transmit._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.Tunnels.Tunnel.Global_']['meta_info']
@property
def _common_path(self):
if self.tunnel_id is None:
raise YPYModelError('Key property tunnel_id is None')
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnels/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel[Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-id = ' + str(self.tunnel_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.tunnel_id is not None:
return True
if self.brief is not None and self.brief._has_data():
return True
if self.global_ is not None and self.global_._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.Tunnels.Tunnel']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnels'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.tunnel is not None:
for child_ref in self.tunnel:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control.Tunnels']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.tunnel_xr is not None and self.tunnel_xr._has_data():
return True
if self.tunnels is not None and self.tunnels._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters.Control']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counters'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.control is not None and self.control._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Counters']['meta_info']
class TunnelConfigurations(object):
"""
List of tunnel IDs
.. attribute:: tunnel_configuration
L2TP tunnel information
**type**\: list of :py:class:`TunnelConfiguration <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.TunnelConfigurations.TunnelConfiguration>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.tunnel_configuration = YList()
self.tunnel_configuration.parent = self
self.tunnel_configuration.name = 'tunnel_configuration'
class TunnelConfiguration(object):
"""
L2TP tunnel information
.. attribute:: local_tunnel_id <key>
Local tunnel ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: l2tp_class
L2Tp class data
**type**\: :py:class:`L2TpClass <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.TunnelConfigurations.TunnelConfiguration.L2TpClass>`
.. attribute:: remote_tunnel_id
Remote tunnel ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.local_tunnel_id = None
self.l2tp_class = L2Tp.TunnelConfigurations.TunnelConfiguration.L2TpClass()
self.l2tp_class.parent = self
self.remote_tunnel_id = None
class L2TpClass(object):
"""
L2Tp class data
.. attribute:: accounting_method_list
Accounting List
**type**\: str
**length:** 0..256
.. attribute:: class_name_xr
Class name
**type**\: str
**length:** 0..256
.. attribute:: digest_hash
Hash configured as MD5 or SHA1
**type**\: :py:class:`DigestHashEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.DigestHashEnum>`
.. attribute:: encoded_password
Encoded password
**type**\: str
**length:** 0..256
.. attribute:: hello_timeout
Hello timeout value in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: host_name
Host name
**type**\: str
**length:** 0..256
.. attribute:: initial_retransmit_maximum_timeout
Initial timeout maximum in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: initial_retransmit_minimum_timeout
Initial timeout minimum in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: initial_retransmit_retries
Initial retransmit retries
**type**\: int
**range:** 0..4294967295
.. attribute:: ip_tos
IP TOS
**type**\: int
**range:** 0..255
.. attribute:: is_authentication_enabled
True if authentication is enabled
**type**\: bool
.. attribute:: is_congestion_control_enabled
True if congestion control is enabled
**type**\: bool
.. attribute:: is_digest_check_enabled
True if digest check is enabled
**type**\: bool
.. attribute:: is_digest_enabled
True if digest authentication is enabled
**type**\: bool
.. attribute:: is_hidden
True if class is hidden
**type**\: bool
.. attribute:: is_peer_address_checked
True if peer address is checked
**type**\: bool
.. attribute:: password
Password
**type**\: str
**length:** 0..25
.. attribute:: receive_window_size
Receive window size
**type**\: int
**range:** 0..65535
.. attribute:: retransmit_maximum_timeout
Retransmit maximum timeout in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: retransmit_minimum_timeout
Retransmit minimum timeout in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: retransmit_retries
Retransmit retries
**type**\: int
**range:** 0..4294967295
.. attribute:: setup_timeout
Timeout setup value in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: timeout_no_user
Timeout no user
**type**\: int
**range:** 0..4294967295
.. attribute:: vrf_name
VRF name
**type**\: str
**length:** 0..256
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.accounting_method_list = None
self.class_name_xr = None
self.digest_hash = None
self.encoded_password = None
self.hello_timeout = None
self.host_name = None
self.initial_retransmit_maximum_timeout = None
self.initial_retransmit_minimum_timeout = None
self.initial_retransmit_retries = None
self.ip_tos = None
self.is_authentication_enabled = None
self.is_congestion_control_enabled = None
self.is_digest_check_enabled = None
self.is_digest_enabled = None
self.is_hidden = None
self.is_peer_address_checked = None
self.password = None
self.receive_window_size = None
self.retransmit_maximum_timeout = None
self.retransmit_minimum_timeout = None
self.retransmit_retries = None
self.setup_timeout = None
self.timeout_no_user = None
self.vrf_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp-class'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.accounting_method_list is not None:
return True
if self.class_name_xr is not None:
return True
if self.digest_hash is not None:
return True
if self.encoded_password is not None:
return True
if self.hello_timeout is not None:
return True
if self.host_name is not None:
return True
if self.initial_retransmit_maximum_timeout is not None:
return True
if self.initial_retransmit_minimum_timeout is not None:
return True
if self.initial_retransmit_retries is not None:
return True
if self.ip_tos is not None:
return True
if self.is_authentication_enabled is not None:
return True
if self.is_congestion_control_enabled is not None:
return True
if self.is_digest_check_enabled is not None:
return True
if self.is_digest_enabled is not None:
return True
if self.is_hidden is not None:
return True
if self.is_peer_address_checked is not None:
return True
if self.password is not None:
return True
if self.receive_window_size is not None:
return True
if self.retransmit_maximum_timeout is not None:
return True
if self.retransmit_minimum_timeout is not None:
return True
if self.retransmit_retries is not None:
return True
if self.setup_timeout is not None:
return True
if self.timeout_no_user is not None:
return True
if self.vrf_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.TunnelConfigurations.TunnelConfiguration.L2TpClass']['meta_info']
@property
def _common_path(self):
if self.local_tunnel_id is None:
raise YPYModelError('Key property local_tunnel_id is None')
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-configurations/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-configuration[Cisco-IOS-XR-tunnel-l2tun-oper:local-tunnel-id = ' + str(self.local_tunnel_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.local_tunnel_id is not None:
return True
if self.l2tp_class is not None and self.l2tp_class._has_data():
return True
if self.remote_tunnel_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.TunnelConfigurations.TunnelConfiguration']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-configurations'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.tunnel_configuration is not None:
for child_ref in self.tunnel_configuration:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.TunnelConfigurations']['meta_info']
class CounterHistFail(object):
"""
Failure events leading to disconnection
.. attribute:: pkt_timeout
timeout events by packet
**type**\: list of int
**range:** 0..4294967295
.. attribute:: rx_counters
Receive side counters
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
.. attribute:: sess_down_tmout
sesions affected due to timeout
**type**\: int
**range:** 0..4294967295
.. attribute:: tx_counters
Send side counters
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pkt_timeout = YLeafList()
self.pkt_timeout.parent = self
self.pkt_timeout.name = 'pkt_timeout'
self.rx_counters = None
self.sess_down_tmout = None
self.tx_counters = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:counter-hist-fail'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.pkt_timeout is not None:
for child in self.pkt_timeout:
if child is not None:
return True
if self.rx_counters is not None:
return True
if self.sess_down_tmout is not None:
return True
if self.tx_counters is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.CounterHistFail']['meta_info']
class Classes(object):
"""
List of L2TP class names
.. attribute:: class_
L2TP class name
**type**\: list of :py:class:`Class_ <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Classes.Class_>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.class_ = YList()
self.class_.parent = self
self.class_.name = 'class_'
class Class_(object):
"""
L2TP class name
.. attribute:: class_name <key>
L2TP class name
**type**\: str
**length:** 1..31
.. attribute:: accounting_method_list
Accounting List
**type**\: str
**length:** 0..256
.. attribute:: class_name_xr
Class name
**type**\: str
**length:** 0..256
.. attribute:: digest_hash
Hash configured as MD5 or SHA1
**type**\: :py:class:`DigestHashEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.DigestHashEnum>`
.. attribute:: encoded_password
Encoded password
**type**\: str
**length:** 0..256
.. attribute:: hello_timeout
Hello timeout value in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: host_name
Host name
**type**\: str
**length:** 0..256
.. attribute:: initial_retransmit_maximum_timeout
Initial timeout maximum in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: initial_retransmit_minimum_timeout
Initial timeout minimum in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: initial_retransmit_retries
Initial retransmit retries
**type**\: int
**range:** 0..4294967295
.. attribute:: ip_tos
IP TOS
**type**\: int
**range:** 0..255
.. attribute:: is_authentication_enabled
True if authentication is enabled
**type**\: bool
.. attribute:: is_congestion_control_enabled
True if congestion control is enabled
**type**\: bool
.. attribute:: is_digest_check_enabled
True if digest check is enabled
**type**\: bool
.. attribute:: is_digest_enabled
True if digest authentication is enabled
**type**\: bool
.. attribute:: is_hidden
True if class is hidden
**type**\: bool
.. attribute:: is_peer_address_checked
True if peer address is checked
**type**\: bool
.. attribute:: password
Password
**type**\: str
**length:** 0..25
.. attribute:: receive_window_size
Receive window size
**type**\: int
**range:** 0..65535
.. attribute:: retransmit_maximum_timeout
Retransmit maximum timeout in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: retransmit_minimum_timeout
Retransmit minimum timeout in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: retransmit_retries
Retransmit retries
**type**\: int
**range:** 0..4294967295
.. attribute:: setup_timeout
Timeout setup value in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: timeout_no_user
Timeout no user
**type**\: int
**range:** 0..4294967295
.. attribute:: vrf_name
VRF name
**type**\: str
**length:** 0..256
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.class_name = None
self.accounting_method_list = None
self.class_name_xr = None
self.digest_hash = None
self.encoded_password = None
self.hello_timeout = None
self.host_name = None
self.initial_retransmit_maximum_timeout = None
self.initial_retransmit_minimum_timeout = None
self.initial_retransmit_retries = None
self.ip_tos = None
self.is_authentication_enabled = None
self.is_congestion_control_enabled = None
self.is_digest_check_enabled = None
self.is_digest_enabled = None
self.is_hidden = None
self.is_peer_address_checked = None
self.password = None
self.receive_window_size = None
self.retransmit_maximum_timeout = None
self.retransmit_minimum_timeout = None
self.retransmit_retries = None
self.setup_timeout = None
self.timeout_no_user = None
self.vrf_name = None
@property
def _common_path(self):
if self.class_name is None:
raise YPYModelError('Key property class_name is None')
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:classes/Cisco-IOS-XR-tunnel-l2tun-oper:class[Cisco-IOS-XR-tunnel-l2tun-oper:class-name = ' + str(self.class_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.class_name is not None:
return True
if self.accounting_method_list is not None:
return True
if self.class_name_xr is not None:
return True
if self.digest_hash is not None:
return True
if self.encoded_password is not None:
return True
if self.hello_timeout is not None:
return True
if self.host_name is not None:
return True
if self.initial_retransmit_maximum_timeout is not None:
return True
if self.initial_retransmit_minimum_timeout is not None:
return True
if self.initial_retransmit_retries is not None:
return True
if self.ip_tos is not None:
return True
if self.is_authentication_enabled is not None:
return True
if self.is_congestion_control_enabled is not None:
return True
if self.is_digest_check_enabled is not None:
return True
if self.is_digest_enabled is not None:
return True
if self.is_hidden is not None:
return True
if self.is_peer_address_checked is not None:
return True
if self.password is not None:
return True
if self.receive_window_size is not None:
return True
if self.retransmit_maximum_timeout is not None:
return True
if self.retransmit_minimum_timeout is not None:
return True
if self.retransmit_retries is not None:
return True
if self.setup_timeout is not None:
return True
if self.timeout_no_user is not None:
return True
if self.vrf_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Classes.Class_']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:classes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.class_ is not None:
for child_ref in self.class_:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Classes']['meta_info']
class Tunnels(object):
"""
List of tunnel IDs
.. attribute:: tunnel
L2TP tunnel information
**type**\: list of :py:class:`Tunnel <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Tunnels.Tunnel>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.tunnel = YList()
self.tunnel.parent = self
self.tunnel.name = 'tunnel'
class Tunnel(object):
"""
L2TP tunnel information
.. attribute:: local_tunnel_id <key>
Local tunnel ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: active_sessions
Number of active sessions
**type**\: int
**range:** 0..4294967295
.. attribute:: class_name
L2TP class name
**type**\: str
**length:** 0..256
.. attribute:: digest_secrets
Control message authentication with digest secrets
**type**\: int
**range:** 0..65535
.. attribute:: is_congestion_control_enabled
True if congestion control is enabled else false
**type**\: bool
.. attribute:: is_pmtu_enabled
True if tunnel PMTU checking is enabled
**type**\: bool
.. attribute:: is_tunnel_up
True if tunnel is up
**type**\: bool
.. attribute:: local_address
Local tunnel address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: local_port
Local port
**type**\: int
**range:** 0..65535
.. attribute:: local_tunnel_name
Local tunnel name
**type**\: str
**length:** 0..256
.. attribute:: local_window_size
Local window size
**type**\: int
**range:** 0..65535
.. attribute:: maximum_retransmission_time
Maximum retransmission time in seconds
**type**\: int
**range:** 0..65535
**units**\: second
.. attribute:: order_queue_size
Order queue size
**type**\: int
**range:** 0..65535
.. attribute:: packet_queue_check
Current number session packet queue check
**type**\: int
**range:** 0..65535
.. attribute:: protocol
Protocol
**type**\: int
**range:** 0..255
.. attribute:: remote_address
Remote tunnel address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: remote_port
Remote port
**type**\: int
**range:** 0..65535
.. attribute:: remote_tunnel_id
Remote tunnel ID
**type**\: int
**range:** 0..4294967295
.. attribute:: remote_tunnel_name
Remote tunnel name
**type**\: str
**length:** 0..256
.. attribute:: remote_window_size
Remote window size
**type**\: int
**range:** 0..65535
.. attribute:: resend_maximum_queue_size
Resend maximum queue size
**type**\: int
**range:** 0..65535
.. attribute:: resend_queue_size
Resend queue size
**type**\: int
**range:** 0..65535
.. attribute:: resends
Total resends
**type**\: int
**range:** 0..4294967295
.. attribute:: retransmission_time
Retransmission time in seconds
**type**\: int
**range:** 0..65535
**units**\: second
.. attribute:: retransmit_time
Retransmit time distribution in seconds
**type**\: list of int
**range:** 0..65535
**units**\: second
.. attribute:: sequence_nr
Sequence NR
**type**\: int
**range:** 0..65535
.. attribute:: sequence_ns
Sequence NS
**type**\: int
**range:** 0..65535
.. attribute:: total_out_of_order_drop_packets
Total out of order dropped packets
**type**\: int
**range:** 0..4294967295
.. attribute:: total_out_of_order_reorder_packets
Total out of order reorder packets
**type**\: int
**range:** 0..4294967295
.. attribute:: total_peer_authentication_failures
Number of peer authentication failures
**type**\: int
**range:** 0..4294967295
.. attribute:: unsent_maximum_queue_size
Unsent maximum queue size
**type**\: int
**range:** 0..65535
.. attribute:: unsent_queue_size
Unsent queue size
**type**\: int
**range:** 0..65535
.. attribute:: zero_length_body_acknowledgement_sent
Total zero length body acknowledgement
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.local_tunnel_id = None
self.active_sessions = None
self.class_name = None
self.digest_secrets = None
self.is_congestion_control_enabled = None
self.is_pmtu_enabled = None
self.is_tunnel_up = None
self.local_address = None
self.local_port = None
self.local_tunnel_name = None
self.local_window_size = None
self.maximum_retransmission_time = None
self.order_queue_size = None
self.packet_queue_check = None
self.protocol = None
self.remote_address = None
self.remote_port = None
self.remote_tunnel_id = None
self.remote_tunnel_name = None
self.remote_window_size = None
self.resend_maximum_queue_size = None
self.resend_queue_size = None
self.resends = None
self.retransmission_time = None
self.retransmit_time = YLeafList()
self.retransmit_time.parent = self
self.retransmit_time.name = 'retransmit_time'
self.sequence_nr = None
self.sequence_ns = None
self.total_out_of_order_drop_packets = None
self.total_out_of_order_reorder_packets = None
self.total_peer_authentication_failures = None
self.unsent_maximum_queue_size = None
self.unsent_queue_size = None
self.zero_length_body_acknowledgement_sent = None
@property
def _common_path(self):
if self.local_tunnel_id is None:
raise YPYModelError('Key property local_tunnel_id is None')
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:tunnels/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel[Cisco-IOS-XR-tunnel-l2tun-oper:local-tunnel-id = ' + str(self.local_tunnel_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.local_tunnel_id is not None:
return True
if self.active_sessions is not None:
return True
if self.class_name is not None:
return True
if self.digest_secrets is not None:
return True
if self.is_congestion_control_enabled is not None:
return True
if self.is_pmtu_enabled is not None:
return True
if self.is_tunnel_up is not None:
return True
if self.local_address is not None:
return True
if self.local_port is not None:
return True
if self.local_tunnel_name is not None:
return True
if self.local_window_size is not None:
return True
if self.maximum_retransmission_time is not None:
return True
if self.order_queue_size is not None:
return True
if self.packet_queue_check is not None:
return True
if self.protocol is not None:
return True
if self.remote_address is not None:
return True
if self.remote_port is not None:
return True
if self.remote_tunnel_id is not None:
return True
if self.remote_tunnel_name is not None:
return True
if self.remote_window_size is not None:
return True
if self.resend_maximum_queue_size is not None:
return True
if self.resend_queue_size is not None:
return True
if self.resends is not None:
return True
if self.retransmission_time is not None:
return True
if self.retransmit_time is not None:
for child in self.retransmit_time:
if child is not None:
return True
if self.sequence_nr is not None:
return True
if self.sequence_ns is not None:
return True
if self.total_out_of_order_drop_packets is not None:
return True
if self.total_out_of_order_reorder_packets is not None:
return True
if self.total_peer_authentication_failures is not None:
return True
if self.unsent_maximum_queue_size is not None:
return True
if self.unsent_queue_size is not None:
return True
if self.zero_length_body_acknowledgement_sent is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Tunnels.Tunnel']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:tunnels'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.tunnel is not None:
for child_ref in self.tunnel:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Tunnels']['meta_info']
class Sessions(object):
"""
List of session IDs
.. attribute:: session
L2TP information for a particular session
**type**\: list of :py:class:`Session <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Sessions.Session>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.session = YList()
self.session.parent = self
self.session.name = 'session'
class Session(object):
"""
L2TP information for a particular session
.. attribute:: local_tunnel_id <key>
Local tunnel ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: local_session_id <key>
Local session ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: call_serial_number
Call serial number
**type**\: int
**range:** 0..4294967295
.. attribute:: interface_name
Interface name
**type**\: str
**length:** 0..256
.. attribute:: is_conditional_debug_enabled
True if conditional debugging is enabled
**type**\: bool
.. attribute:: is_sequencing_on
True if session sequence is on
**type**\: bool
.. attribute:: is_session_locally_initiated
True if session initiated locally
**type**\: bool
.. attribute:: is_session_manual
True if session is manual
**type**\: bool
.. attribute:: is_session_state_established
True if session state is established
**type**\: bool
.. attribute:: is_session_up
True if session is up
**type**\: bool
.. attribute:: is_udp_checksum_enabled
True if UDP checksum enabled
**type**\: bool
.. attribute:: l2tp_sh_sess_tie_breaker
l2tp sh sess tie breaker
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: l2tp_sh_sess_tie_breaker_enabled
l2tp sh sess tie breaker enabled
**type**\: int
**range:** 0..255
.. attribute:: l2tp_sh_sess_udp_lport
l2tp sh sess udp lport
**type**\: int
**range:** 0..65535
.. attribute:: l2tp_sh_sess_udp_rport
l2tp sh sess udp rport
**type**\: int
**range:** 0..65535
.. attribute:: local_ip_address
Local session IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: local_tunnel_name
Local tunnel name
**type**\: str
**length:** 0..256
.. attribute:: protocol
Protocol
**type**\: int
**range:** 0..255
.. attribute:: remote_ip_address
Remote session IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: remote_session_id
Remote session ID
**type**\: int
**range:** 0..4294967295
.. attribute:: remote_tunnel_id
Remote tunnel ID
**type**\: int
**range:** 0..4294967295
.. attribute:: remote_tunnel_name
Remote tunnel name
**type**\: str
**length:** 0..256
.. attribute:: session_application_data
Session application data
**type**\: :py:class:`SessionApplicationData <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Sessions.Session.SessionApplicationData>`
.. attribute:: unique_id
Unique ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.local_tunnel_id = None
self.local_session_id = None
self.call_serial_number = None
self.interface_name = None
self.is_conditional_debug_enabled = None
self.is_sequencing_on = None
self.is_session_locally_initiated = None
self.is_session_manual = None
self.is_session_state_established = None
self.is_session_up = None
self.is_udp_checksum_enabled = None
self.l2tp_sh_sess_tie_breaker = None
self.l2tp_sh_sess_tie_breaker_enabled = None
self.l2tp_sh_sess_udp_lport = None
self.l2tp_sh_sess_udp_rport = None
self.local_ip_address = None
self.local_tunnel_name = None
self.protocol = None
self.remote_ip_address = None
self.remote_session_id = None
self.remote_tunnel_id = None
self.remote_tunnel_name = None
self.session_application_data = L2Tp.Sessions.Session.SessionApplicationData()
self.session_application_data.parent = self
self.unique_id = None
class SessionApplicationData(object):
"""
Session application data
.. attribute:: l2tp_sh_sess_app_type
l2tp sh sess app type
**type**\: int
**range:** 0..4294967295
.. attribute:: vpdn
VPDN data
**type**\: :py:class:`Vpdn <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Sessions.Session.SessionApplicationData.Vpdn>`
.. attribute:: xconnect
Xconnect data
**type**\: :py:class:`Xconnect <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Sessions.Session.SessionApplicationData.Xconnect>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.l2tp_sh_sess_app_type = None
self.vpdn = L2Tp.Sessions.Session.SessionApplicationData.Vpdn()
self.vpdn.parent = self
self.xconnect = L2Tp.Sessions.Session.SessionApplicationData.Xconnect()
self.xconnect.parent = self
class Xconnect(object):
"""
Xconnect data
.. attribute:: circuit_name
Circuit name
**type**\: str
.. attribute:: ipv6_protocol_tunneling
IPv6ProtocolTunneling
**type**\: bool
.. attribute:: is_circuit_state_up
True if circuit state is up
**type**\: bool
.. attribute:: is_local_circuit_state_up
True if local circuit state is up
**type**\: bool
.. attribute:: is_remote_circuit_state_up
True if remote circuit state is up
**type**\: bool
.. attribute:: sessionvc_id
Session VC ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.circuit_name = None
self.ipv6_protocol_tunneling = None
self.is_circuit_state_up = None
self.is_local_circuit_state_up = None
self.is_remote_circuit_state_up = None
self.sessionvc_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:xconnect'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.circuit_name is not None:
return True
if self.ipv6_protocol_tunneling is not None:
return True
if self.is_circuit_state_up is not None:
return True
if self.is_local_circuit_state_up is not None:
return True
if self.is_remote_circuit_state_up is not None:
return True
if self.sessionvc_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Sessions.Session.SessionApplicationData.Xconnect']['meta_info']
class Vpdn(object):
"""
VPDN data
.. attribute:: interface_name
Interface name
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: username
Session username
**type**\: str
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_name = None
self.username = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:vpdn'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.interface_name is not None:
return True
if self.username is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Sessions.Session.SessionApplicationData.Vpdn']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:session-application-data'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.l2tp_sh_sess_app_type is not None:
return True
if self.vpdn is not None and self.vpdn._has_data():
return True
if self.xconnect is not None and self.xconnect._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Sessions.Session.SessionApplicationData']['meta_info']
@property
def _common_path(self):
if self.local_tunnel_id is None:
raise YPYModelError('Key property local_tunnel_id is None')
if self.local_session_id is None:
raise YPYModelError('Key property local_session_id is None')
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:sessions/Cisco-IOS-XR-tunnel-l2tun-oper:session[Cisco-IOS-XR-tunnel-l2tun-oper:local-tunnel-id = ' + str(self.local_tunnel_id) + '][Cisco-IOS-XR-tunnel-l2tun-oper:local-session-id = ' + str(self.local_session_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.local_tunnel_id is not None:
return True
if self.local_session_id is not None:
return True
if self.call_serial_number is not None:
return True
if self.interface_name is not None:
return True
if self.is_conditional_debug_enabled is not None:
return True
if self.is_sequencing_on is not None:
return True
if self.is_session_locally_initiated is not None:
return True
if self.is_session_manual is not None:
return True
if self.is_session_state_established is not None:
return True
if self.is_session_up is not None:
return True
if self.is_udp_checksum_enabled is not None:
return True
if self.l2tp_sh_sess_tie_breaker is not None:
return True
if self.l2tp_sh_sess_tie_breaker_enabled is not None:
return True
if self.l2tp_sh_sess_udp_lport is not None:
return True
if self.l2tp_sh_sess_udp_rport is not None:
return True
if self.local_ip_address is not None:
return True
if self.local_tunnel_name is not None:
return True
if self.protocol is not None:
return True
if self.remote_ip_address is not None:
return True
if self.remote_session_id is not None:
return True
if self.remote_tunnel_id is not None:
return True
if self.remote_tunnel_name is not None:
return True
if self.session_application_data is not None and self.session_application_data._has_data():
return True
if self.unique_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Sessions.Session']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:sessions'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.session is not None:
for child_ref in self.session:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Sessions']['meta_info']
class Session(object):
"""
L2TP control messages counters
.. attribute:: unavailable
L2TP session unavailable information
**type**\: :py:class:`Unavailable <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tp.Session.Unavailable>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.unavailable = L2Tp.Session.Unavailable()
self.unavailable.parent = self
class Unavailable(object):
"""
L2TP session unavailable information
.. attribute:: sessions_on_hold
Number of session ID in hold database
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.sessions_on_hold = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:session/Cisco-IOS-XR-tunnel-l2tun-oper:unavailable'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.sessions_on_hold is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Session.Unavailable']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp/Cisco-IOS-XR-tunnel-l2tun-oper:session'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.unavailable is not None and self.unavailable._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp.Session']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.classes is not None and self.classes._has_data():
return True
if self.counter_hist_fail is not None and self.counter_hist_fail._has_data():
return True
if self.counters is not None and self.counters._has_data():
return True
if self.session is not None and self.session._has_data():
return True
if self.sessions is not None and self.sessions._has_data():
return True
if self.tunnel_configurations is not None and self.tunnel_configurations._has_data():
return True
if self.tunnels is not None and self.tunnels._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tp']['meta_info']
class L2Tpv2(object):
"""
l2tpv2
.. attribute:: classes
List of L2TP class names
**type**\: :py:class:`Classes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Classes>`
.. attribute:: counter_hist_fail
Failure events leading to disconnection
**type**\: :py:class:`CounterHistFail <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.CounterHistFail>`
.. attribute:: counters
L2TP control messages counters
**type**\: :py:class:`Counters <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters>`
.. attribute:: session
L2TP control messages counters
**type**\: :py:class:`Session <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Session>`
.. attribute:: sessions
List of session IDs
**type**\: :py:class:`Sessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Sessions>`
.. attribute:: statistics
L2TP v2 statistics information
**type**\: :py:class:`Statistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Statistics>`
.. attribute:: tunnel
L2TPv2 tunnel
**type**\: :py:class:`Tunnel <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Tunnel>`
.. attribute:: tunnel_configurations
List of tunnel IDs
**type**\: :py:class:`TunnelConfigurations <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.TunnelConfigurations>`
.. attribute:: tunnels
List of tunnel IDs
**type**\: :py:class:`Tunnels <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Tunnels>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.classes = L2Tpv2.Classes()
self.classes.parent = self
self.counter_hist_fail = L2Tpv2.CounterHistFail()
self.counter_hist_fail.parent = self
self.counters = L2Tpv2.Counters()
self.counters.parent = self
self.session = L2Tpv2.Session()
self.session.parent = self
self.sessions = L2Tpv2.Sessions()
self.sessions.parent = self
self.statistics = L2Tpv2.Statistics()
self.statistics.parent = self
self.tunnel = L2Tpv2.Tunnel()
self.tunnel.parent = self
self.tunnel_configurations = L2Tpv2.TunnelConfigurations()
self.tunnel_configurations.parent = self
self.tunnels = L2Tpv2.Tunnels()
self.tunnels.parent = self
class Counters(object):
"""
L2TP control messages counters
.. attribute:: control
L2TP control messages counters
**type**\: :py:class:`Control <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control>`
.. attribute:: forwarding
L2TP forwarding messages counters
**type**\: :py:class:`Forwarding <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Forwarding>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.control = L2Tpv2.Counters.Control()
self.control.parent = self
self.forwarding = L2Tpv2.Counters.Forwarding()
self.forwarding.parent = self
class Forwarding(object):
"""
L2TP forwarding messages counters
.. attribute:: sessions
List of class and session IDs
**type**\: :py:class:`Sessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Forwarding.Sessions>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.sessions = L2Tpv2.Counters.Forwarding.Sessions()
self.sessions.parent = self
class Sessions(object):
"""
List of class and session IDs
.. attribute:: session
L2TP information for a particular session
**type**\: list of :py:class:`Session <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Forwarding.Sessions.Session>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.session = YList()
self.session.parent = self
self.session.name = 'session'
class Session(object):
"""
L2TP information for a particular session
.. attribute:: tunnel_id <key>
Local tunnel ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: session_id <key>
Local session ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: in_bytes
Number of bytes sent in
**type**\: int
**range:** 0..18446744073709551615
**units**\: byte
.. attribute:: in_packets
Number of packets sent in
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_bytes
Number of bytes sent out
**type**\: int
**range:** 0..18446744073709551615
**units**\: byte
.. attribute:: out_packets
Number of packets sent out
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: remote_session_id
Remote session ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.tunnel_id = None
self.session_id = None
self.in_bytes = None
self.in_packets = None
self.out_bytes = None
self.out_packets = None
self.remote_session_id = None
@property
def _common_path(self):
if self.tunnel_id is None:
raise YPYModelError('Key property tunnel_id is None')
if self.session_id is None:
raise YPYModelError('Key property session_id is None')
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:forwarding/Cisco-IOS-XR-tunnel-l2tun-oper:sessions/Cisco-IOS-XR-tunnel-l2tun-oper:session[Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-id = ' + str(self.tunnel_id) + '][Cisco-IOS-XR-tunnel-l2tun-oper:session-id = ' + str(self.session_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.tunnel_id is not None:
return True
if self.session_id is not None:
return True
if self.in_bytes is not None:
return True
if self.in_packets is not None:
return True
if self.out_bytes is not None:
return True
if self.out_packets is not None:
return True
if self.remote_session_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Forwarding.Sessions.Session']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:forwarding/Cisco-IOS-XR-tunnel-l2tun-oper:sessions'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.session is not None:
for child_ref in self.session:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Forwarding.Sessions']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:forwarding'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.sessions is not None and self.sessions._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Forwarding']['meta_info']
class Control(object):
"""
L2TP control messages counters
.. attribute:: tunnel_xr
L2TP control tunnel messages counters
**type**\: :py:class:`TunnelXr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.TunnelXr>`
.. attribute:: tunnels
Table of tunnel IDs of control message counters
**type**\: :py:class:`Tunnels <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.Tunnels>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.tunnel_xr = L2Tpv2.Counters.Control.TunnelXr()
self.tunnel_xr.parent = self
self.tunnels = L2Tpv2.Counters.Control.Tunnels()
self.tunnels.parent = self
class TunnelXr(object):
"""
L2TP control tunnel messages counters
.. attribute:: authentication
Tunnel authentication counters
**type**\: :py:class:`Authentication <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.TunnelXr.Authentication>`
.. attribute:: global_
Tunnel counters
**type**\: :py:class:`Global_ <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.TunnelXr.Global_>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.authentication = L2Tpv2.Counters.Control.TunnelXr.Authentication()
self.authentication.parent = self
self.global_ = L2Tpv2.Counters.Control.TunnelXr.Global_()
self.global_.parent = self
class Authentication(object):
"""
Tunnel authentication counters
.. attribute:: challenge_avp
Challenge AVP statistics
**type**\: :py:class:`ChallengeAvp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.TunnelXr.Authentication.ChallengeAvp>`
.. attribute:: challenge_reponse
Challenge response statistics
**type**\: :py:class:`ChallengeReponse <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.TunnelXr.Authentication.ChallengeReponse>`
.. attribute:: common_digest
Common digest statistics
**type**\: :py:class:`CommonDigest <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.TunnelXr.Authentication.CommonDigest>`
.. attribute:: integrity_check
Integrity check statistics
**type**\: :py:class:`IntegrityCheck <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.TunnelXr.Authentication.IntegrityCheck>`
.. attribute:: local_secret
Local secret statistics
**type**\: :py:class:`LocalSecret <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.TunnelXr.Authentication.LocalSecret>`
.. attribute:: nonce_avp
Nonce AVP statistics
**type**\: :py:class:`NonceAvp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.TunnelXr.Authentication.NonceAvp>`
.. attribute:: overall_statistics
Overall statistics
**type**\: :py:class:`OverallStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.TunnelXr.Authentication.OverallStatistics>`
.. attribute:: primary_digest
Primary digest statistics
**type**\: :py:class:`PrimaryDigest <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.TunnelXr.Authentication.PrimaryDigest>`
.. attribute:: secondary_digest
Secondary digest statistics
**type**\: :py:class:`SecondaryDigest <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.TunnelXr.Authentication.SecondaryDigest>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.challenge_avp = L2Tpv2.Counters.Control.TunnelXr.Authentication.ChallengeAvp()
self.challenge_avp.parent = self
self.challenge_reponse = L2Tpv2.Counters.Control.TunnelXr.Authentication.ChallengeReponse()
self.challenge_reponse.parent = self
self.common_digest = L2Tpv2.Counters.Control.TunnelXr.Authentication.CommonDigest()
self.common_digest.parent = self
self.integrity_check = L2Tpv2.Counters.Control.TunnelXr.Authentication.IntegrityCheck()
self.integrity_check.parent = self
self.local_secret = L2Tpv2.Counters.Control.TunnelXr.Authentication.LocalSecret()
self.local_secret.parent = self
self.nonce_avp = L2Tpv2.Counters.Control.TunnelXr.Authentication.NonceAvp()
self.nonce_avp.parent = self
self.overall_statistics = L2Tpv2.Counters.Control.TunnelXr.Authentication.OverallStatistics()
self.overall_statistics.parent = self
self.primary_digest = L2Tpv2.Counters.Control.TunnelXr.Authentication.PrimaryDigest()
self.primary_digest.parent = self
self.secondary_digest = L2Tpv2.Counters.Control.TunnelXr.Authentication.SecondaryDigest()
self.secondary_digest.parent = self
class NonceAvp(object):
"""
Nonce AVP statistics
.. attribute:: bad_hash
Bad hash
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_length
Bad length
**type**\: int
**range:** 0..4294967295
.. attribute:: failed
Failed
**type**\: int
**range:** 0..4294967295
.. attribute:: generate_response_failures
Generate response fail
**type**\: int
**range:** 0..4294967295
.. attribute:: ignored
Ignored
**type**\: int
**range:** 0..4294967295
.. attribute:: missing
Missing
**type**\: int
**range:** 0..4294967295
.. attribute:: passed
Passed
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped
Skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected
Unexpected
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_zlb
Unexpected ZLB
**type**\: int
**range:** 0..4294967295
.. attribute:: validate
Validate
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bad_hash = None
self.bad_length = None
self.failed = None
self.generate_response_failures = None
self.ignored = None
self.missing = None
self.passed = None
self.skipped = None
self.unexpected = None
self.unexpected_zlb = None
self.validate = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication/Cisco-IOS-XR-tunnel-l2tun-oper:nonce-avp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bad_hash is not None:
return True
if self.bad_length is not None:
return True
if self.failed is not None:
return True
if self.generate_response_failures is not None:
return True
if self.ignored is not None:
return True
if self.missing is not None:
return True
if self.passed is not None:
return True
if self.skipped is not None:
return True
if self.unexpected is not None:
return True
if self.unexpected_zlb is not None:
return True
if self.validate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.TunnelXr.Authentication.NonceAvp']['meta_info']
class CommonDigest(object):
"""
Common digest statistics
.. attribute:: bad_hash
Bad hash
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_length
Bad length
**type**\: int
**range:** 0..4294967295
.. attribute:: failed
Failed
**type**\: int
**range:** 0..4294967295
.. attribute:: generate_response_failures
Generate response fail
**type**\: int
**range:** 0..4294967295
.. attribute:: ignored
Ignored
**type**\: int
**range:** 0..4294967295
.. attribute:: missing
Missing
**type**\: int
**range:** 0..4294967295
.. attribute:: passed
Passed
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped
Skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected
Unexpected
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_zlb
Unexpected ZLB
**type**\: int
**range:** 0..4294967295
.. attribute:: validate
Validate
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bad_hash = None
self.bad_length = None
self.failed = None
self.generate_response_failures = None
self.ignored = None
self.missing = None
self.passed = None
self.skipped = None
self.unexpected = None
self.unexpected_zlb = None
self.validate = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication/Cisco-IOS-XR-tunnel-l2tun-oper:common-digest'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bad_hash is not None:
return True
if self.bad_length is not None:
return True
if self.failed is not None:
return True
if self.generate_response_failures is not None:
return True
if self.ignored is not None:
return True
if self.missing is not None:
return True
if self.passed is not None:
return True
if self.skipped is not None:
return True
if self.unexpected is not None:
return True
if self.unexpected_zlb is not None:
return True
if self.validate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.TunnelXr.Authentication.CommonDigest']['meta_info']
class PrimaryDigest(object):
"""
Primary digest statistics
.. attribute:: bad_hash
Bad hash
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_length
Bad length
**type**\: int
**range:** 0..4294967295
.. attribute:: failed
Failed
**type**\: int
**range:** 0..4294967295
.. attribute:: generate_response_failures
Generate response fail
**type**\: int
**range:** 0..4294967295
.. attribute:: ignored
Ignored
**type**\: int
**range:** 0..4294967295
.. attribute:: missing
Missing
**type**\: int
**range:** 0..4294967295
.. attribute:: passed
Passed
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped
Skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected
Unexpected
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_zlb
Unexpected ZLB
**type**\: int
**range:** 0..4294967295
.. attribute:: validate
Validate
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bad_hash = None
self.bad_length = None
self.failed = None
self.generate_response_failures = None
self.ignored = None
self.missing = None
self.passed = None
self.skipped = None
self.unexpected = None
self.unexpected_zlb = None
self.validate = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication/Cisco-IOS-XR-tunnel-l2tun-oper:primary-digest'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bad_hash is not None:
return True
if self.bad_length is not None:
return True
if self.failed is not None:
return True
if self.generate_response_failures is not None:
return True
if self.ignored is not None:
return True
if self.missing is not None:
return True
if self.passed is not None:
return True
if self.skipped is not None:
return True
if self.unexpected is not None:
return True
if self.unexpected_zlb is not None:
return True
if self.validate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.TunnelXr.Authentication.PrimaryDigest']['meta_info']
class SecondaryDigest(object):
"""
Secondary digest statistics
.. attribute:: bad_hash
Bad hash
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_length
Bad length
**type**\: int
**range:** 0..4294967295
.. attribute:: failed
Failed
**type**\: int
**range:** 0..4294967295
.. attribute:: generate_response_failures
Generate response fail
**type**\: int
**range:** 0..4294967295
.. attribute:: ignored
Ignored
**type**\: int
**range:** 0..4294967295
.. attribute:: missing
Missing
**type**\: int
**range:** 0..4294967295
.. attribute:: passed
Passed
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped
Skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected
Unexpected
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_zlb
Unexpected ZLB
**type**\: int
**range:** 0..4294967295
.. attribute:: validate
Validate
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bad_hash = None
self.bad_length = None
self.failed = None
self.generate_response_failures = None
self.ignored = None
self.missing = None
self.passed = None
self.skipped = None
self.unexpected = None
self.unexpected_zlb = None
self.validate = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication/Cisco-IOS-XR-tunnel-l2tun-oper:secondary-digest'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bad_hash is not None:
return True
if self.bad_length is not None:
return True
if self.failed is not None:
return True
if self.generate_response_failures is not None:
return True
if self.ignored is not None:
return True
if self.missing is not None:
return True
if self.passed is not None:
return True
if self.skipped is not None:
return True
if self.unexpected is not None:
return True
if self.unexpected_zlb is not None:
return True
if self.validate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.TunnelXr.Authentication.SecondaryDigest']['meta_info']
class IntegrityCheck(object):
"""
Integrity check statistics
.. attribute:: bad_hash
Bad hash
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_length
Bad length
**type**\: int
**range:** 0..4294967295
.. attribute:: failed
Failed
**type**\: int
**range:** 0..4294967295
.. attribute:: generate_response_failures
Generate response fail
**type**\: int
**range:** 0..4294967295
.. attribute:: ignored
Ignored
**type**\: int
**range:** 0..4294967295
.. attribute:: missing
Missing
**type**\: int
**range:** 0..4294967295
.. attribute:: passed
Passed
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped
Skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected
Unexpected
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_zlb
Unexpected ZLB
**type**\: int
**range:** 0..4294967295
.. attribute:: validate
Validate
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bad_hash = None
self.bad_length = None
self.failed = None
self.generate_response_failures = None
self.ignored = None
self.missing = None
self.passed = None
self.skipped = None
self.unexpected = None
self.unexpected_zlb = None
self.validate = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication/Cisco-IOS-XR-tunnel-l2tun-oper:integrity-check'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bad_hash is not None:
return True
if self.bad_length is not None:
return True
if self.failed is not None:
return True
if self.generate_response_failures is not None:
return True
if self.ignored is not None:
return True
if self.missing is not None:
return True
if self.passed is not None:
return True
if self.skipped is not None:
return True
if self.unexpected is not None:
return True
if self.unexpected_zlb is not None:
return True
if self.validate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.TunnelXr.Authentication.IntegrityCheck']['meta_info']
class LocalSecret(object):
"""
Local secret statistics
.. attribute:: bad_hash
Bad hash
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_length
Bad length
**type**\: int
**range:** 0..4294967295
.. attribute:: failed
Failed
**type**\: int
**range:** 0..4294967295
.. attribute:: generate_response_failures
Generate response fail
**type**\: int
**range:** 0..4294967295
.. attribute:: ignored
Ignored
**type**\: int
**range:** 0..4294967295
.. attribute:: missing
Missing
**type**\: int
**range:** 0..4294967295
.. attribute:: passed
Passed
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped
Skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected
Unexpected
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_zlb
Unexpected ZLB
**type**\: int
**range:** 0..4294967295
.. attribute:: validate
Validate
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bad_hash = None
self.bad_length = None
self.failed = None
self.generate_response_failures = None
self.ignored = None
self.missing = None
self.passed = None
self.skipped = None
self.unexpected = None
self.unexpected_zlb = None
self.validate = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication/Cisco-IOS-XR-tunnel-l2tun-oper:local-secret'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bad_hash is not None:
return True
if self.bad_length is not None:
return True
if self.failed is not None:
return True
if self.generate_response_failures is not None:
return True
if self.ignored is not None:
return True
if self.missing is not None:
return True
if self.passed is not None:
return True
if self.skipped is not None:
return True
if self.unexpected is not None:
return True
if self.unexpected_zlb is not None:
return True
if self.validate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.TunnelXr.Authentication.LocalSecret']['meta_info']
class ChallengeAvp(object):
"""
Challenge AVP statistics
.. attribute:: bad_hash
Bad hash
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_length
Bad length
**type**\: int
**range:** 0..4294967295
.. attribute:: failed
Failed
**type**\: int
**range:** 0..4294967295
.. attribute:: generate_response_failures
Generate response fail
**type**\: int
**range:** 0..4294967295
.. attribute:: ignored
Ignored
**type**\: int
**range:** 0..4294967295
.. attribute:: missing
Missing
**type**\: int
**range:** 0..4294967295
.. attribute:: passed
Passed
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped
Skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected
Unexpected
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_zlb
Unexpected ZLB
**type**\: int
**range:** 0..4294967295
.. attribute:: validate
Validate
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bad_hash = None
self.bad_length = None
self.failed = None
self.generate_response_failures = None
self.ignored = None
self.missing = None
self.passed = None
self.skipped = None
self.unexpected = None
self.unexpected_zlb = None
self.validate = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication/Cisco-IOS-XR-tunnel-l2tun-oper:challenge-avp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bad_hash is not None:
return True
if self.bad_length is not None:
return True
if self.failed is not None:
return True
if self.generate_response_failures is not None:
return True
if self.ignored is not None:
return True
if self.missing is not None:
return True
if self.passed is not None:
return True
if self.skipped is not None:
return True
if self.unexpected is not None:
return True
if self.unexpected_zlb is not None:
return True
if self.validate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.TunnelXr.Authentication.ChallengeAvp']['meta_info']
class ChallengeReponse(object):
"""
Challenge response statistics
.. attribute:: bad_hash
Bad hash
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_length
Bad length
**type**\: int
**range:** 0..4294967295
.. attribute:: failed
Failed
**type**\: int
**range:** 0..4294967295
.. attribute:: generate_response_failures
Generate response fail
**type**\: int
**range:** 0..4294967295
.. attribute:: ignored
Ignored
**type**\: int
**range:** 0..4294967295
.. attribute:: missing
Missing
**type**\: int
**range:** 0..4294967295
.. attribute:: passed
Passed
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped
Skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected
Unexpected
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_zlb
Unexpected ZLB
**type**\: int
**range:** 0..4294967295
.. attribute:: validate
Validate
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bad_hash = None
self.bad_length = None
self.failed = None
self.generate_response_failures = None
self.ignored = None
self.missing = None
self.passed = None
self.skipped = None
self.unexpected = None
self.unexpected_zlb = None
self.validate = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication/Cisco-IOS-XR-tunnel-l2tun-oper:challenge-reponse'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bad_hash is not None:
return True
if self.bad_length is not None:
return True
if self.failed is not None:
return True
if self.generate_response_failures is not None:
return True
if self.ignored is not None:
return True
if self.missing is not None:
return True
if self.passed is not None:
return True
if self.skipped is not None:
return True
if self.unexpected is not None:
return True
if self.unexpected_zlb is not None:
return True
if self.validate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.TunnelXr.Authentication.ChallengeReponse']['meta_info']
class OverallStatistics(object):
"""
Overall statistics
.. attribute:: bad_hash
Bad hash
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_length
Bad length
**type**\: int
**range:** 0..4294967295
.. attribute:: failed
Failed
**type**\: int
**range:** 0..4294967295
.. attribute:: generate_response_failures
Generate response fail
**type**\: int
**range:** 0..4294967295
.. attribute:: ignored
Ignored
**type**\: int
**range:** 0..4294967295
.. attribute:: missing
Missing
**type**\: int
**range:** 0..4294967295
.. attribute:: passed
Passed
**type**\: int
**range:** 0..4294967295
.. attribute:: skipped
Skipped
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected
Unexpected
**type**\: int
**range:** 0..4294967295
.. attribute:: unexpected_zlb
Unexpected ZLB
**type**\: int
**range:** 0..4294967295
.. attribute:: validate
Validate
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bad_hash = None
self.bad_length = None
self.failed = None
self.generate_response_failures = None
self.ignored = None
self.missing = None
self.passed = None
self.skipped = None
self.unexpected = None
self.unexpected_zlb = None
self.validate = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication/Cisco-IOS-XR-tunnel-l2tun-oper:overall-statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bad_hash is not None:
return True
if self.bad_length is not None:
return True
if self.failed is not None:
return True
if self.generate_response_failures is not None:
return True
if self.ignored is not None:
return True
if self.missing is not None:
return True
if self.passed is not None:
return True
if self.skipped is not None:
return True
if self.unexpected is not None:
return True
if self.unexpected_zlb is not None:
return True
if self.validate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.TunnelXr.Authentication.OverallStatistics']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:authentication'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.challenge_avp is not None and self.challenge_avp._has_data():
return True
if self.challenge_reponse is not None and self.challenge_reponse._has_data():
return True
if self.common_digest is not None and self.common_digest._has_data():
return True
if self.integrity_check is not None and self.integrity_check._has_data():
return True
if self.local_secret is not None and self.local_secret._has_data():
return True
if self.nonce_avp is not None and self.nonce_avp._has_data():
return True
if self.overall_statistics is not None and self.overall_statistics._has_data():
return True
if self.primary_digest is not None and self.primary_digest._has_data():
return True
if self.secondary_digest is not None and self.secondary_digest._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.TunnelXr.Authentication']['meta_info']
class Global_(object):
"""
Tunnel counters
.. attribute:: drop
Drop data
**type**\: :py:class:`Drop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.TunnelXr.Global_.Drop>`
.. attribute:: received
Received data
**type**\: :py:class:`Received <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.TunnelXr.Global_.Received>`
.. attribute:: retransmit
Re transmit data
**type**\: :py:class:`Retransmit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.TunnelXr.Global_.Retransmit>`
.. attribute:: total_drop
Total drop
**type**\: int
**range:** 0..4294967295
.. attribute:: total_received
Total received
**type**\: int
**range:** 0..4294967295
.. attribute:: total_retransmit
Total retransmit
**type**\: int
**range:** 0..4294967295
.. attribute:: total_transmit
Total transmit
**type**\: int
**range:** 0..4294967295
.. attribute:: transmit
Transmit data
**type**\: :py:class:`Transmit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.TunnelXr.Global_.Transmit>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.drop = L2Tpv2.Counters.Control.TunnelXr.Global_.Drop()
self.drop.parent = self
self.received = L2Tpv2.Counters.Control.TunnelXr.Global_.Received()
self.received.parent = self
self.retransmit = L2Tpv2.Counters.Control.TunnelXr.Global_.Retransmit()
self.retransmit.parent = self
self.total_drop = None
self.total_received = None
self.total_retransmit = None
self.total_transmit = None
self.transmit = L2Tpv2.Counters.Control.TunnelXr.Global_.Transmit()
self.transmit.parent = self
class Transmit(object):
"""
Transmit data
.. attribute:: acknowledgement_packets
Packets acknowledgement
**type**\: int
**range:** 0..4294967295
.. attribute:: call_disconnect_notify_packets
Call disconnect notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets
Keep alive messages
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_connected_packets
Incoming call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_replies
Incoming call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_requests
Incoming call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_connected_packets
Outgoing call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_replies
Outgoing call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_requests
Outgoing call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_replies
Service relay reply counts
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_requests
Service relay request counts
**type**\: int
**range:** 0..4294967295
.. attribute:: set_link_info_packets
Set link info packets
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_notifications
Start control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_replies
Start control connection replies
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_requests
Start control connection requests
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_control_connection_notifications
Stop control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_packets
Unknown packets
**type**\: int
**range:** 0..4294967295
.. attribute:: wan_error_notify_packets
WAN error notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: zero_length_body_packets
Zero length body packets
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acknowledgement_packets = None
self.call_disconnect_notify_packets = None
self.hello_packets = None
self.incoming_call_connected_packets = None
self.incoming_call_replies = None
self.incoming_call_requests = None
self.outgoing_call_connected_packets = None
self.outgoing_call_replies = None
self.outgoing_call_requests = None
self.service_relay_replies = None
self.service_relay_requests = None
self.set_link_info_packets = None
self.start_control_connection_notifications = None
self.start_control_connection_replies = None
self.start_control_connection_requests = None
self.stop_control_connection_notifications = None
self.unknown_packets = None
self.wan_error_notify_packets = None
self.zero_length_body_packets = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:global/Cisco-IOS-XR-tunnel-l2tun-oper:transmit'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acknowledgement_packets is not None:
return True
if self.call_disconnect_notify_packets is not None:
return True
if self.hello_packets is not None:
return True
if self.incoming_call_connected_packets is not None:
return True
if self.incoming_call_replies is not None:
return True
if self.incoming_call_requests is not None:
return True
if self.outgoing_call_connected_packets is not None:
return True
if self.outgoing_call_replies is not None:
return True
if self.outgoing_call_requests is not None:
return True
if self.service_relay_replies is not None:
return True
if self.service_relay_requests is not None:
return True
if self.set_link_info_packets is not None:
return True
if self.start_control_connection_notifications is not None:
return True
if self.start_control_connection_replies is not None:
return True
if self.start_control_connection_requests is not None:
return True
if self.stop_control_connection_notifications is not None:
return True
if self.unknown_packets is not None:
return True
if self.wan_error_notify_packets is not None:
return True
if self.zero_length_body_packets is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.TunnelXr.Global_.Transmit']['meta_info']
class Retransmit(object):
"""
Re transmit data
.. attribute:: acknowledgement_packets
Packets acknowledgement
**type**\: int
**range:** 0..4294967295
.. attribute:: call_disconnect_notify_packets
Call disconnect notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets
Keep alive messages
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_connected_packets
Incoming call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_replies
Incoming call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_requests
Incoming call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_connected_packets
Outgoing call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_replies
Outgoing call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_requests
Outgoing call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_replies
Service relay reply counts
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_requests
Service relay request counts
**type**\: int
**range:** 0..4294967295
.. attribute:: set_link_info_packets
Set link info packets
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_notifications
Start control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_replies
Start control connection replies
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_requests
Start control connection requests
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_control_connection_notifications
Stop control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_packets
Unknown packets
**type**\: int
**range:** 0..4294967295
.. attribute:: wan_error_notify_packets
WAN error notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: zero_length_body_packets
Zero length body packets
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acknowledgement_packets = None
self.call_disconnect_notify_packets = None
self.hello_packets = None
self.incoming_call_connected_packets = None
self.incoming_call_replies = None
self.incoming_call_requests = None
self.outgoing_call_connected_packets = None
self.outgoing_call_replies = None
self.outgoing_call_requests = None
self.service_relay_replies = None
self.service_relay_requests = None
self.set_link_info_packets = None
self.start_control_connection_notifications = None
self.start_control_connection_replies = None
self.start_control_connection_requests = None
self.stop_control_connection_notifications = None
self.unknown_packets = None
self.wan_error_notify_packets = None
self.zero_length_body_packets = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:global/Cisco-IOS-XR-tunnel-l2tun-oper:retransmit'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acknowledgement_packets is not None:
return True
if self.call_disconnect_notify_packets is not None:
return True
if self.hello_packets is not None:
return True
if self.incoming_call_connected_packets is not None:
return True
if self.incoming_call_replies is not None:
return True
if self.incoming_call_requests is not None:
return True
if self.outgoing_call_connected_packets is not None:
return True
if self.outgoing_call_replies is not None:
return True
if self.outgoing_call_requests is not None:
return True
if self.service_relay_replies is not None:
return True
if self.service_relay_requests is not None:
return True
if self.set_link_info_packets is not None:
return True
if self.start_control_connection_notifications is not None:
return True
if self.start_control_connection_replies is not None:
return True
if self.start_control_connection_requests is not None:
return True
if self.stop_control_connection_notifications is not None:
return True
if self.unknown_packets is not None:
return True
if self.wan_error_notify_packets is not None:
return True
if self.zero_length_body_packets is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.TunnelXr.Global_.Retransmit']['meta_info']
class Received(object):
"""
Received data
.. attribute:: acknowledgement_packets
Packets acknowledgement
**type**\: int
**range:** 0..4294967295
.. attribute:: call_disconnect_notify_packets
Call disconnect notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets
Keep alive messages
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_connected_packets
Incoming call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_replies
Incoming call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_requests
Incoming call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_connected_packets
Outgoing call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_replies
Outgoing call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_requests
Outgoing call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_replies
Service relay reply counts
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_requests
Service relay request counts
**type**\: int
**range:** 0..4294967295
.. attribute:: set_link_info_packets
Set link info packets
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_notifications
Start control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_replies
Start control connection replies
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_requests
Start control connection requests
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_control_connection_notifications
Stop control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_packets
Unknown packets
**type**\: int
**range:** 0..4294967295
.. attribute:: wan_error_notify_packets
WAN error notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: zero_length_body_packets
Zero length body packets
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acknowledgement_packets = None
self.call_disconnect_notify_packets = None
self.hello_packets = None
self.incoming_call_connected_packets = None
self.incoming_call_replies = None
self.incoming_call_requests = None
self.outgoing_call_connected_packets = None
self.outgoing_call_replies = None
self.outgoing_call_requests = None
self.service_relay_replies = None
self.service_relay_requests = None
self.set_link_info_packets = None
self.start_control_connection_notifications = None
self.start_control_connection_replies = None
self.start_control_connection_requests = None
self.stop_control_connection_notifications = None
self.unknown_packets = None
self.wan_error_notify_packets = None
self.zero_length_body_packets = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:global/Cisco-IOS-XR-tunnel-l2tun-oper:received'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acknowledgement_packets is not None:
return True
if self.call_disconnect_notify_packets is not None:
return True
if self.hello_packets is not None:
return True
if self.incoming_call_connected_packets is not None:
return True
if self.incoming_call_replies is not None:
return True
if self.incoming_call_requests is not None:
return True
if self.outgoing_call_connected_packets is not None:
return True
if self.outgoing_call_replies is not None:
return True
if self.outgoing_call_requests is not None:
return True
if self.service_relay_replies is not None:
return True
if self.service_relay_requests is not None:
return True
if self.set_link_info_packets is not None:
return True
if self.start_control_connection_notifications is not None:
return True
if self.start_control_connection_replies is not None:
return True
if self.start_control_connection_requests is not None:
return True
if self.stop_control_connection_notifications is not None:
return True
if self.unknown_packets is not None:
return True
if self.wan_error_notify_packets is not None:
return True
if self.zero_length_body_packets is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.TunnelXr.Global_.Received']['meta_info']
class Drop(object):
"""
Drop data
.. attribute:: acknowledgement_packets
Packets acknowledgement
**type**\: int
**range:** 0..4294967295
.. attribute:: call_disconnect_notify_packets
Call disconnect notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets
Keep alive messages
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_connected_packets
Incoming call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_replies
Incoming call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_requests
Incoming call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_connected_packets
Outgoing call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_replies
Outgoing call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_requests
Outgoing call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_replies
Service relay reply counts
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_requests
Service relay request counts
**type**\: int
**range:** 0..4294967295
.. attribute:: set_link_info_packets
Set link info packets
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_notifications
Start control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_replies
Start control connection replies
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_requests
Start control connection requests
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_control_connection_notifications
Stop control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_packets
Unknown packets
**type**\: int
**range:** 0..4294967295
.. attribute:: wan_error_notify_packets
WAN error notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: zero_length_body_packets
Zero length body packets
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acknowledgement_packets = None
self.call_disconnect_notify_packets = None
self.hello_packets = None
self.incoming_call_connected_packets = None
self.incoming_call_replies = None
self.incoming_call_requests = None
self.outgoing_call_connected_packets = None
self.outgoing_call_replies = None
self.outgoing_call_requests = None
self.service_relay_replies = None
self.service_relay_requests = None
self.set_link_info_packets = None
self.start_control_connection_notifications = None
self.start_control_connection_replies = None
self.start_control_connection_requests = None
self.stop_control_connection_notifications = None
self.unknown_packets = None
self.wan_error_notify_packets = None
self.zero_length_body_packets = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:global/Cisco-IOS-XR-tunnel-l2tun-oper:drop'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acknowledgement_packets is not None:
return True
if self.call_disconnect_notify_packets is not None:
return True
if self.hello_packets is not None:
return True
if self.incoming_call_connected_packets is not None:
return True
if self.incoming_call_replies is not None:
return True
if self.incoming_call_requests is not None:
return True
if self.outgoing_call_connected_packets is not None:
return True
if self.outgoing_call_replies is not None:
return True
if self.outgoing_call_requests is not None:
return True
if self.service_relay_replies is not None:
return True
if self.service_relay_requests is not None:
return True
if self.set_link_info_packets is not None:
return True
if self.start_control_connection_notifications is not None:
return True
if self.start_control_connection_replies is not None:
return True
if self.start_control_connection_requests is not None:
return True
if self.stop_control_connection_notifications is not None:
return True
if self.unknown_packets is not None:
return True
if self.wan_error_notify_packets is not None:
return True
if self.zero_length_body_packets is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.TunnelXr.Global_.Drop']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr/Cisco-IOS-XR-tunnel-l2tun-oper:global'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.drop is not None and self.drop._has_data():
return True
if self.received is not None and self.received._has_data():
return True
if self.retransmit is not None and self.retransmit._has_data():
return True
if self.total_drop is not None:
return True
if self.total_received is not None:
return True
if self.total_retransmit is not None:
return True
if self.total_transmit is not None:
return True
if self.transmit is not None and self.transmit._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.TunnelXr.Global_']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-xr'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.authentication is not None and self.authentication._has_data():
return True
if self.global_ is not None and self.global_._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.TunnelXr']['meta_info']
class Tunnels(object):
"""
Table of tunnel IDs of control message counters
.. attribute:: tunnel
L2TP tunnel control message counters
**type**\: list of :py:class:`Tunnel <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.Tunnels.Tunnel>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.tunnel = YList()
self.tunnel.parent = self
self.tunnel.name = 'tunnel'
class Tunnel(object):
"""
L2TP tunnel control message counters
.. attribute:: tunnel_id <key>
L2TP tunnel ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: brief
L2TP control message local and remote addresses
**type**\: :py:class:`Brief <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.Tunnels.Tunnel.Brief>`
.. attribute:: global_
Global data
**type**\: :py:class:`Global_ <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.Tunnels.Tunnel.Global_>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.tunnel_id = None
self.brief = L2Tpv2.Counters.Control.Tunnels.Tunnel.Brief()
self.brief.parent = self
self.global_ = L2Tpv2.Counters.Control.Tunnels.Tunnel.Global_()
self.global_.parent = self
class Brief(object):
"""
L2TP control message local and remote addresses
.. attribute:: local_address
Local IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: remote_address
Remote IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: remote_tunnel_id
Remote tunnel ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.local_address = None
self.remote_address = None
self.remote_tunnel_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:brief'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.local_address is not None:
return True
if self.remote_address is not None:
return True
if self.remote_tunnel_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.Tunnels.Tunnel.Brief']['meta_info']
class Global_(object):
"""
Global data
.. attribute:: drop
Drop data
**type**\: :py:class:`Drop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.Tunnels.Tunnel.Global_.Drop>`
.. attribute:: received
Received data
**type**\: :py:class:`Received <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.Tunnels.Tunnel.Global_.Received>`
.. attribute:: retransmit
Re transmit data
**type**\: :py:class:`Retransmit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.Tunnels.Tunnel.Global_.Retransmit>`
.. attribute:: total_drop
Total drop
**type**\: int
**range:** 0..4294967295
.. attribute:: total_received
Total received
**type**\: int
**range:** 0..4294967295
.. attribute:: total_retransmit
Total retransmit
**type**\: int
**range:** 0..4294967295
.. attribute:: total_transmit
Total transmit
**type**\: int
**range:** 0..4294967295
.. attribute:: transmit
Transmit data
**type**\: :py:class:`Transmit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Counters.Control.Tunnels.Tunnel.Global_.Transmit>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.drop = L2Tpv2.Counters.Control.Tunnels.Tunnel.Global_.Drop()
self.drop.parent = self
self.received = L2Tpv2.Counters.Control.Tunnels.Tunnel.Global_.Received()
self.received.parent = self
self.retransmit = L2Tpv2.Counters.Control.Tunnels.Tunnel.Global_.Retransmit()
self.retransmit.parent = self
self.total_drop = None
self.total_received = None
self.total_retransmit = None
self.total_transmit = None
self.transmit = L2Tpv2.Counters.Control.Tunnels.Tunnel.Global_.Transmit()
self.transmit.parent = self
class Transmit(object):
"""
Transmit data
.. attribute:: acknowledgement_packets
Packets acknowledgement
**type**\: int
**range:** 0..4294967295
.. attribute:: call_disconnect_notify_packets
Call disconnect notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets
Keep alive messages
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_connected_packets
Incoming call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_replies
Incoming call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_requests
Incoming call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_connected_packets
Outgoing call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_replies
Outgoing call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_requests
Outgoing call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_replies
Service relay reply counts
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_requests
Service relay request counts
**type**\: int
**range:** 0..4294967295
.. attribute:: set_link_info_packets
Set link info packets
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_notifications
Start control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_replies
Start control connection replies
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_requests
Start control connection requests
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_control_connection_notifications
Stop control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_packets
Unknown packets
**type**\: int
**range:** 0..4294967295
.. attribute:: wan_error_notify_packets
WAN error notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: zero_length_body_packets
Zero length body packets
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acknowledgement_packets = None
self.call_disconnect_notify_packets = None
self.hello_packets = None
self.incoming_call_connected_packets = None
self.incoming_call_replies = None
self.incoming_call_requests = None
self.outgoing_call_connected_packets = None
self.outgoing_call_replies = None
self.outgoing_call_requests = None
self.service_relay_replies = None
self.service_relay_requests = None
self.set_link_info_packets = None
self.start_control_connection_notifications = None
self.start_control_connection_replies = None
self.start_control_connection_requests = None
self.stop_control_connection_notifications = None
self.unknown_packets = None
self.wan_error_notify_packets = None
self.zero_length_body_packets = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:transmit'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acknowledgement_packets is not None:
return True
if self.call_disconnect_notify_packets is not None:
return True
if self.hello_packets is not None:
return True
if self.incoming_call_connected_packets is not None:
return True
if self.incoming_call_replies is not None:
return True
if self.incoming_call_requests is not None:
return True
if self.outgoing_call_connected_packets is not None:
return True
if self.outgoing_call_replies is not None:
return True
if self.outgoing_call_requests is not None:
return True
if self.service_relay_replies is not None:
return True
if self.service_relay_requests is not None:
return True
if self.set_link_info_packets is not None:
return True
if self.start_control_connection_notifications is not None:
return True
if self.start_control_connection_replies is not None:
return True
if self.start_control_connection_requests is not None:
return True
if self.stop_control_connection_notifications is not None:
return True
if self.unknown_packets is not None:
return True
if self.wan_error_notify_packets is not None:
return True
if self.zero_length_body_packets is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.Tunnels.Tunnel.Global_.Transmit']['meta_info']
class Retransmit(object):
"""
Re transmit data
.. attribute:: acknowledgement_packets
Packets acknowledgement
**type**\: int
**range:** 0..4294967295
.. attribute:: call_disconnect_notify_packets
Call disconnect notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets
Keep alive messages
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_connected_packets
Incoming call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_replies
Incoming call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_requests
Incoming call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_connected_packets
Outgoing call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_replies
Outgoing call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_requests
Outgoing call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_replies
Service relay reply counts
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_requests
Service relay request counts
**type**\: int
**range:** 0..4294967295
.. attribute:: set_link_info_packets
Set link info packets
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_notifications
Start control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_replies
Start control connection replies
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_requests
Start control connection requests
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_control_connection_notifications
Stop control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_packets
Unknown packets
**type**\: int
**range:** 0..4294967295
.. attribute:: wan_error_notify_packets
WAN error notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: zero_length_body_packets
Zero length body packets
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acknowledgement_packets = None
self.call_disconnect_notify_packets = None
self.hello_packets = None
self.incoming_call_connected_packets = None
self.incoming_call_replies = None
self.incoming_call_requests = None
self.outgoing_call_connected_packets = None
self.outgoing_call_replies = None
self.outgoing_call_requests = None
self.service_relay_replies = None
self.service_relay_requests = None
self.set_link_info_packets = None
self.start_control_connection_notifications = None
self.start_control_connection_replies = None
self.start_control_connection_requests = None
self.stop_control_connection_notifications = None
self.unknown_packets = None
self.wan_error_notify_packets = None
self.zero_length_body_packets = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:retransmit'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acknowledgement_packets is not None:
return True
if self.call_disconnect_notify_packets is not None:
return True
if self.hello_packets is not None:
return True
if self.incoming_call_connected_packets is not None:
return True
if self.incoming_call_replies is not None:
return True
if self.incoming_call_requests is not None:
return True
if self.outgoing_call_connected_packets is not None:
return True
if self.outgoing_call_replies is not None:
return True
if self.outgoing_call_requests is not None:
return True
if self.service_relay_replies is not None:
return True
if self.service_relay_requests is not None:
return True
if self.set_link_info_packets is not None:
return True
if self.start_control_connection_notifications is not None:
return True
if self.start_control_connection_replies is not None:
return True
if self.start_control_connection_requests is not None:
return True
if self.stop_control_connection_notifications is not None:
return True
if self.unknown_packets is not None:
return True
if self.wan_error_notify_packets is not None:
return True
if self.zero_length_body_packets is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.Tunnels.Tunnel.Global_.Retransmit']['meta_info']
class Received(object):
"""
Received data
.. attribute:: acknowledgement_packets
Packets acknowledgement
**type**\: int
**range:** 0..4294967295
.. attribute:: call_disconnect_notify_packets
Call disconnect notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets
Keep alive messages
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_connected_packets
Incoming call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_replies
Incoming call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_requests
Incoming call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_connected_packets
Outgoing call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_replies
Outgoing call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_requests
Outgoing call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_replies
Service relay reply counts
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_requests
Service relay request counts
**type**\: int
**range:** 0..4294967295
.. attribute:: set_link_info_packets
Set link info packets
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_notifications
Start control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_replies
Start control connection replies
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_requests
Start control connection requests
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_control_connection_notifications
Stop control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_packets
Unknown packets
**type**\: int
**range:** 0..4294967295
.. attribute:: wan_error_notify_packets
WAN error notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: zero_length_body_packets
Zero length body packets
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acknowledgement_packets = None
self.call_disconnect_notify_packets = None
self.hello_packets = None
self.incoming_call_connected_packets = None
self.incoming_call_replies = None
self.incoming_call_requests = None
self.outgoing_call_connected_packets = None
self.outgoing_call_replies = None
self.outgoing_call_requests = None
self.service_relay_replies = None
self.service_relay_requests = None
self.set_link_info_packets = None
self.start_control_connection_notifications = None
self.start_control_connection_replies = None
self.start_control_connection_requests = None
self.stop_control_connection_notifications = None
self.unknown_packets = None
self.wan_error_notify_packets = None
self.zero_length_body_packets = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:received'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acknowledgement_packets is not None:
return True
if self.call_disconnect_notify_packets is not None:
return True
if self.hello_packets is not None:
return True
if self.incoming_call_connected_packets is not None:
return True
if self.incoming_call_replies is not None:
return True
if self.incoming_call_requests is not None:
return True
if self.outgoing_call_connected_packets is not None:
return True
if self.outgoing_call_replies is not None:
return True
if self.outgoing_call_requests is not None:
return True
if self.service_relay_replies is not None:
return True
if self.service_relay_requests is not None:
return True
if self.set_link_info_packets is not None:
return True
if self.start_control_connection_notifications is not None:
return True
if self.start_control_connection_replies is not None:
return True
if self.start_control_connection_requests is not None:
return True
if self.stop_control_connection_notifications is not None:
return True
if self.unknown_packets is not None:
return True
if self.wan_error_notify_packets is not None:
return True
if self.zero_length_body_packets is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.Tunnels.Tunnel.Global_.Received']['meta_info']
class Drop(object):
"""
Drop data
.. attribute:: acknowledgement_packets
Packets acknowledgement
**type**\: int
**range:** 0..4294967295
.. attribute:: call_disconnect_notify_packets
Call disconnect notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets
Keep alive messages
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_connected_packets
Incoming call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_replies
Incoming call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_call_requests
Incoming call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_connected_packets
Outgoing call connected packets
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_replies
Outgoing call replies
**type**\: int
**range:** 0..4294967295
.. attribute:: outgoing_call_requests
Outgoing call requests
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_replies
Service relay reply counts
**type**\: int
**range:** 0..4294967295
.. attribute:: service_relay_requests
Service relay request counts
**type**\: int
**range:** 0..4294967295
.. attribute:: set_link_info_packets
Set link info packets
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_notifications
Start control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_replies
Start control connection replies
**type**\: int
**range:** 0..4294967295
.. attribute:: start_control_connection_requests
Start control connection requests
**type**\: int
**range:** 0..4294967295
.. attribute:: stop_control_connection_notifications
Stop control connection notifications
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_packets
Unknown packets
**type**\: int
**range:** 0..4294967295
.. attribute:: wan_error_notify_packets
WAN error notify packets
**type**\: int
**range:** 0..4294967295
.. attribute:: zero_length_body_packets
Zero length body packets
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.acknowledgement_packets = None
self.call_disconnect_notify_packets = None
self.hello_packets = None
self.incoming_call_connected_packets = None
self.incoming_call_replies = None
self.incoming_call_requests = None
self.outgoing_call_connected_packets = None
self.outgoing_call_replies = None
self.outgoing_call_requests = None
self.service_relay_replies = None
self.service_relay_requests = None
self.set_link_info_packets = None
self.start_control_connection_notifications = None
self.start_control_connection_replies = None
self.start_control_connection_requests = None
self.stop_control_connection_notifications = None
self.unknown_packets = None
self.wan_error_notify_packets = None
self.zero_length_body_packets = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:drop'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.acknowledgement_packets is not None:
return True
if self.call_disconnect_notify_packets is not None:
return True
if self.hello_packets is not None:
return True
if self.incoming_call_connected_packets is not None:
return True
if self.incoming_call_replies is not None:
return True
if self.incoming_call_requests is not None:
return True
if self.outgoing_call_connected_packets is not None:
return True
if self.outgoing_call_replies is not None:
return True
if self.outgoing_call_requests is not None:
return True
if self.service_relay_replies is not None:
return True
if self.service_relay_requests is not None:
return True
if self.set_link_info_packets is not None:
return True
if self.start_control_connection_notifications is not None:
return True
if self.start_control_connection_replies is not None:
return True
if self.start_control_connection_requests is not None:
return True
if self.stop_control_connection_notifications is not None:
return True
if self.unknown_packets is not None:
return True
if self.wan_error_notify_packets is not None:
return True
if self.zero_length_body_packets is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.Tunnels.Tunnel.Global_.Drop']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:global'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.drop is not None and self.drop._has_data():
return True
if self.received is not None and self.received._has_data():
return True
if self.retransmit is not None and self.retransmit._has_data():
return True
if self.total_drop is not None:
return True
if self.total_received is not None:
return True
if self.total_retransmit is not None:
return True
if self.total_transmit is not None:
return True
if self.transmit is not None and self.transmit._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.Tunnels.Tunnel.Global_']['meta_info']
@property
def _common_path(self):
if self.tunnel_id is None:
raise YPYModelError('Key property tunnel_id is None')
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnels/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel[Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-id = ' + str(self.tunnel_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.tunnel_id is not None:
return True
if self.brief is not None and self.brief._has_data():
return True
if self.global_ is not None and self.global_._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.Tunnels.Tunnel']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control/Cisco-IOS-XR-tunnel-l2tun-oper:tunnels'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.tunnel is not None:
for child_ref in self.tunnel:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control.Tunnels']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters/Cisco-IOS-XR-tunnel-l2tun-oper:control'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.tunnel_xr is not None and self.tunnel_xr._has_data():
return True
if self.tunnels is not None and self.tunnels._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters.Control']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counters'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.control is not None and self.control._has_data():
return True
if self.forwarding is not None and self.forwarding._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Counters']['meta_info']
class Statistics(object):
"""
L2TP v2 statistics information
.. attribute:: average_packet_processing_time
Average processing time for received packets (in micro seconds)
**type**\: int
**range:** 0..4294967295
**units**\: microsecond
.. attribute:: buffered_packets
Bufferred packets
**type**\: int
**range:** 0..4294967295
.. attribute:: incoming_dropped_packets
In coming packets dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: netio_packets
Packets RX in netio
**type**\: int
**range:** 0..4294967295
.. attribute:: received_out_of_order_packets
Out of order packets received
**type**\: int
**range:** 0..4294967295
.. attribute:: received_packets
Number of packets received
**type**\: int
**range:** 0..4294967295
.. attribute:: reorder_deviation_packets
Re order deviation
**type**\: int
**range:** 0..4294967295
.. attribute:: reorder_packets
Re order packets
**type**\: int
**range:** 0..4294967295
.. attribute:: sent_packets
Number of packets sent
**type**\: int
**range:** 0..4294967295
.. attribute:: sessions
Number of sessions
**type**\: int
**range:** 0..4294967295
.. attribute:: tunnels
Number of tunnels
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.average_packet_processing_time = None
self.buffered_packets = None
self.incoming_dropped_packets = None
self.netio_packets = None
self.received_out_of_order_packets = None
self.received_packets = None
self.reorder_deviation_packets = None
self.reorder_packets = None
self.sent_packets = None
self.sessions = None
self.tunnels = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.average_packet_processing_time is not None:
return True
if self.buffered_packets is not None:
return True
if self.incoming_dropped_packets is not None:
return True
if self.netio_packets is not None:
return True
if self.received_out_of_order_packets is not None:
return True
if self.received_packets is not None:
return True
if self.reorder_deviation_packets is not None:
return True
if self.reorder_packets is not None:
return True
if self.sent_packets is not None:
return True
if self.sessions is not None:
return True
if self.tunnels is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Statistics']['meta_info']
class Tunnel(object):
"""
L2TPv2 tunnel
.. attribute:: accounting
Tunnel accounting counters
**type**\: :py:class:`Accounting <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Tunnel.Accounting>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.accounting = L2Tpv2.Tunnel.Accounting()
self.accounting.parent = self
class Accounting(object):
"""
Tunnel accounting counters
.. attribute:: statistics
Tunnel accounting statistics
**type**\: :py:class:`Statistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Tunnel.Accounting.Statistics>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.statistics = L2Tpv2.Tunnel.Accounting.Statistics()
self.statistics.parent = self
class Statistics(object):
"""
Tunnel accounting statistics
.. attribute:: current_size
Current checkpoint size
**type**\: int
**range:** 0..4294967295
.. attribute:: memory_failures
Memory failures
**type**\: int
**range:** 0..4294967295
.. attribute:: negative_acknowledgement
Negative acknowledgement
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: positive_acknowledgement
Positive acknowledgement
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: queue_statistics_size
Queue statistics size
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: records_checkpointed
Total records checkpointed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: records_fail_to_recover
Records fail to recover
**type**\: int
**range:** 0..4294967295
.. attribute:: records_failed_to_checkpoint
Records fail to checkpoint
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: records_recovered_from_checkpoint
Records recovered from checkpoint
**type**\: int
**range:** 0..4294967295
.. attribute:: records_sent_from_queue
Records sent from queue
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: records_sent_successfully
Accounting records sent successfully
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: reject
Accounting reject
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: start
Accounting start
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: stop
Accounting stop
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: transport_failures
Transport failures
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.current_size = None
self.memory_failures = None
self.negative_acknowledgement = None
self.positive_acknowledgement = None
self.queue_statistics_size = None
self.records_checkpointed = None
self.records_fail_to_recover = None
self.records_failed_to_checkpoint = None
self.records_recovered_from_checkpoint = None
self.records_sent_from_queue = None
self.records_sent_successfully = None
self.reject = None
self.start = None
self.stop = None
self.transport_failures = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel/Cisco-IOS-XR-tunnel-l2tun-oper:accounting/Cisco-IOS-XR-tunnel-l2tun-oper:statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.current_size is not None:
return True
if self.memory_failures is not None:
return True
if self.negative_acknowledgement is not None:
return True
if self.positive_acknowledgement is not None:
return True
if self.queue_statistics_size is not None:
return True
if self.records_checkpointed is not None:
return True
if self.records_fail_to_recover is not None:
return True
if self.records_failed_to_checkpoint is not None:
return True
if self.records_recovered_from_checkpoint is not None:
return True
if self.records_sent_from_queue is not None:
return True
if self.records_sent_successfully is not None:
return True
if self.reject is not None:
return True
if self.start is not None:
return True
if self.stop is not None:
return True
if self.transport_failures is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Tunnel.Accounting.Statistics']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel/Cisco-IOS-XR-tunnel-l2tun-oper:accounting'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.statistics is not None and self.statistics._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Tunnel.Accounting']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.accounting is not None and self.accounting._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Tunnel']['meta_info']
class TunnelConfigurations(object):
"""
List of tunnel IDs
.. attribute:: tunnel_configuration
L2TP tunnel information
**type**\: list of :py:class:`TunnelConfiguration <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.TunnelConfigurations.TunnelConfiguration>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.tunnel_configuration = YList()
self.tunnel_configuration.parent = self
self.tunnel_configuration.name = 'tunnel_configuration'
class TunnelConfiguration(object):
"""
L2TP tunnel information
.. attribute:: local_tunnel_id <key>
Local tunnel ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: l2tp_class
L2Tp class data
**type**\: :py:class:`L2TpClass <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.TunnelConfigurations.TunnelConfiguration.L2TpClass>`
.. attribute:: remote_tunnel_id
Remote tunnel ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.local_tunnel_id = None
self.l2tp_class = L2Tpv2.TunnelConfigurations.TunnelConfiguration.L2TpClass()
self.l2tp_class.parent = self
self.remote_tunnel_id = None
class L2TpClass(object):
"""
L2Tp class data
.. attribute:: accounting_method_list
Accounting List
**type**\: str
**length:** 0..256
.. attribute:: class_name_xr
Class name
**type**\: str
**length:** 0..256
.. attribute:: digest_hash
Hash configured as MD5 or SHA1
**type**\: :py:class:`DigestHashEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.DigestHashEnum>`
.. attribute:: encoded_password
Encoded password
**type**\: str
**length:** 0..256
.. attribute:: hello_timeout
Hello timeout value in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: host_name
Host name
**type**\: str
**length:** 0..256
.. attribute:: initial_retransmit_maximum_timeout
Initial timeout maximum in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: initial_retransmit_minimum_timeout
Initial timeout minimum in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: initial_retransmit_retries
Initial retransmit retries
**type**\: int
**range:** 0..4294967295
.. attribute:: ip_tos
IP TOS
**type**\: int
**range:** 0..255
.. attribute:: is_authentication_enabled
True if authentication is enabled
**type**\: bool
.. attribute:: is_congestion_control_enabled
True if congestion control is enabled
**type**\: bool
.. attribute:: is_digest_check_enabled
True if digest check is enabled
**type**\: bool
.. attribute:: is_digest_enabled
True if digest authentication is enabled
**type**\: bool
.. attribute:: is_hidden
True if class is hidden
**type**\: bool
.. attribute:: is_peer_address_checked
True if peer address is checked
**type**\: bool
.. attribute:: password
Password
**type**\: str
**length:** 0..25
.. attribute:: receive_window_size
Receive window size
**type**\: int
**range:** 0..65535
.. attribute:: retransmit_maximum_timeout
Retransmit maximum timeout in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: retransmit_minimum_timeout
Retransmit minimum timeout in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: retransmit_retries
Retransmit retries
**type**\: int
**range:** 0..4294967295
.. attribute:: setup_timeout
Timeout setup value in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: timeout_no_user
Timeout no user
**type**\: int
**range:** 0..4294967295
.. attribute:: vrf_name
VRF name
**type**\: str
**length:** 0..256
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.accounting_method_list = None
self.class_name_xr = None
self.digest_hash = None
self.encoded_password = None
self.hello_timeout = None
self.host_name = None
self.initial_retransmit_maximum_timeout = None
self.initial_retransmit_minimum_timeout = None
self.initial_retransmit_retries = None
self.ip_tos = None
self.is_authentication_enabled = None
self.is_congestion_control_enabled = None
self.is_digest_check_enabled = None
self.is_digest_enabled = None
self.is_hidden = None
self.is_peer_address_checked = None
self.password = None
self.receive_window_size = None
self.retransmit_maximum_timeout = None
self.retransmit_minimum_timeout = None
self.retransmit_retries = None
self.setup_timeout = None
self.timeout_no_user = None
self.vrf_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:l2tp-class'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.accounting_method_list is not None:
return True
if self.class_name_xr is not None:
return True
if self.digest_hash is not None:
return True
if self.encoded_password is not None:
return True
if self.hello_timeout is not None:
return True
if self.host_name is not None:
return True
if self.initial_retransmit_maximum_timeout is not None:
return True
if self.initial_retransmit_minimum_timeout is not None:
return True
if self.initial_retransmit_retries is not None:
return True
if self.ip_tos is not None:
return True
if self.is_authentication_enabled is not None:
return True
if self.is_congestion_control_enabled is not None:
return True
if self.is_digest_check_enabled is not None:
return True
if self.is_digest_enabled is not None:
return True
if self.is_hidden is not None:
return True
if self.is_peer_address_checked is not None:
return True
if self.password is not None:
return True
if self.receive_window_size is not None:
return True
if self.retransmit_maximum_timeout is not None:
return True
if self.retransmit_minimum_timeout is not None:
return True
if self.retransmit_retries is not None:
return True
if self.setup_timeout is not None:
return True
if self.timeout_no_user is not None:
return True
if self.vrf_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.TunnelConfigurations.TunnelConfiguration.L2TpClass']['meta_info']
@property
def _common_path(self):
if self.local_tunnel_id is None:
raise YPYModelError('Key property local_tunnel_id is None')
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-configurations/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-configuration[Cisco-IOS-XR-tunnel-l2tun-oper:local-tunnel-id = ' + str(self.local_tunnel_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.local_tunnel_id is not None:
return True
if self.l2tp_class is not None and self.l2tp_class._has_data():
return True
if self.remote_tunnel_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.TunnelConfigurations.TunnelConfiguration']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel-configurations'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.tunnel_configuration is not None:
for child_ref in self.tunnel_configuration:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.TunnelConfigurations']['meta_info']
class CounterHistFail(object):
"""
Failure events leading to disconnection
.. attribute:: pkt_timeout
timeout events by packet
**type**\: list of int
**range:** 0..4294967295
.. attribute:: rx_counters
Receive side counters
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
.. attribute:: sess_down_tmout
sesions affected due to timeout
**type**\: int
**range:** 0..4294967295
.. attribute:: tx_counters
Send side counters
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.pkt_timeout = YLeafList()
self.pkt_timeout.parent = self
self.pkt_timeout.name = 'pkt_timeout'
self.rx_counters = None
self.sess_down_tmout = None
self.tx_counters = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:counter-hist-fail'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.pkt_timeout is not None:
for child in self.pkt_timeout:
if child is not None:
return True
if self.rx_counters is not None:
return True
if self.sess_down_tmout is not None:
return True
if self.tx_counters is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.CounterHistFail']['meta_info']
class Classes(object):
"""
List of L2TP class names
.. attribute:: class_
L2TP class name
**type**\: list of :py:class:`Class_ <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Classes.Class_>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.class_ = YList()
self.class_.parent = self
self.class_.name = 'class_'
class Class_(object):
"""
L2TP class name
.. attribute:: class_name <key>
L2TP class name
**type**\: str
**length:** 1..31
.. attribute:: accounting_method_list
Accounting List
**type**\: str
**length:** 0..256
.. attribute:: class_name_xr
Class name
**type**\: str
**length:** 0..256
.. attribute:: digest_hash
Hash configured as MD5 or SHA1
**type**\: :py:class:`DigestHashEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.DigestHashEnum>`
.. attribute:: encoded_password
Encoded password
**type**\: str
**length:** 0..256
.. attribute:: hello_timeout
Hello timeout value in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: host_name
Host name
**type**\: str
**length:** 0..256
.. attribute:: initial_retransmit_maximum_timeout
Initial timeout maximum in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: initial_retransmit_minimum_timeout
Initial timeout minimum in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: initial_retransmit_retries
Initial retransmit retries
**type**\: int
**range:** 0..4294967295
.. attribute:: ip_tos
IP TOS
**type**\: int
**range:** 0..255
.. attribute:: is_authentication_enabled
True if authentication is enabled
**type**\: bool
.. attribute:: is_congestion_control_enabled
True if congestion control is enabled
**type**\: bool
.. attribute:: is_digest_check_enabled
True if digest check is enabled
**type**\: bool
.. attribute:: is_digest_enabled
True if digest authentication is enabled
**type**\: bool
.. attribute:: is_hidden
True if class is hidden
**type**\: bool
.. attribute:: is_peer_address_checked
True if peer address is checked
**type**\: bool
.. attribute:: password
Password
**type**\: str
**length:** 0..25
.. attribute:: receive_window_size
Receive window size
**type**\: int
**range:** 0..65535
.. attribute:: retransmit_maximum_timeout
Retransmit maximum timeout in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: retransmit_minimum_timeout
Retransmit minimum timeout in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: retransmit_retries
Retransmit retries
**type**\: int
**range:** 0..4294967295
.. attribute:: setup_timeout
Timeout setup value in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: timeout_no_user
Timeout no user
**type**\: int
**range:** 0..4294967295
.. attribute:: vrf_name
VRF name
**type**\: str
**length:** 0..256
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.class_name = None
self.accounting_method_list = None
self.class_name_xr = None
self.digest_hash = None
self.encoded_password = None
self.hello_timeout = None
self.host_name = None
self.initial_retransmit_maximum_timeout = None
self.initial_retransmit_minimum_timeout = None
self.initial_retransmit_retries = None
self.ip_tos = None
self.is_authentication_enabled = None
self.is_congestion_control_enabled = None
self.is_digest_check_enabled = None
self.is_digest_enabled = None
self.is_hidden = None
self.is_peer_address_checked = None
self.password = None
self.receive_window_size = None
self.retransmit_maximum_timeout = None
self.retransmit_minimum_timeout = None
self.retransmit_retries = None
self.setup_timeout = None
self.timeout_no_user = None
self.vrf_name = None
@property
def _common_path(self):
if self.class_name is None:
raise YPYModelError('Key property class_name is None')
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:classes/Cisco-IOS-XR-tunnel-l2tun-oper:class[Cisco-IOS-XR-tunnel-l2tun-oper:class-name = ' + str(self.class_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.class_name is not None:
return True
if self.accounting_method_list is not None:
return True
if self.class_name_xr is not None:
return True
if self.digest_hash is not None:
return True
if self.encoded_password is not None:
return True
if self.hello_timeout is not None:
return True
if self.host_name is not None:
return True
if self.initial_retransmit_maximum_timeout is not None:
return True
if self.initial_retransmit_minimum_timeout is not None:
return True
if self.initial_retransmit_retries is not None:
return True
if self.ip_tos is not None:
return True
if self.is_authentication_enabled is not None:
return True
if self.is_congestion_control_enabled is not None:
return True
if self.is_digest_check_enabled is not None:
return True
if self.is_digest_enabled is not None:
return True
if self.is_hidden is not None:
return True
if self.is_peer_address_checked is not None:
return True
if self.password is not None:
return True
if self.receive_window_size is not None:
return True
if self.retransmit_maximum_timeout is not None:
return True
if self.retransmit_minimum_timeout is not None:
return True
if self.retransmit_retries is not None:
return True
if self.setup_timeout is not None:
return True
if self.timeout_no_user is not None:
return True
if self.vrf_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Classes.Class_']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:classes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.class_ is not None:
for child_ref in self.class_:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Classes']['meta_info']
class Tunnels(object):
"""
List of tunnel IDs
.. attribute:: tunnel
L2TP tunnel information
**type**\: list of :py:class:`Tunnel <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Tunnels.Tunnel>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.tunnel = YList()
self.tunnel.parent = self
self.tunnel.name = 'tunnel'
class Tunnel(object):
"""
L2TP tunnel information
.. attribute:: local_tunnel_id <key>
Local tunnel ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: active_sessions
Number of active sessions
**type**\: int
**range:** 0..4294967295
.. attribute:: class_name
L2TP class name
**type**\: str
**length:** 0..256
.. attribute:: digest_secrets
Control message authentication with digest secrets
**type**\: int
**range:** 0..65535
.. attribute:: is_congestion_control_enabled
True if congestion control is enabled else false
**type**\: bool
.. attribute:: is_pmtu_enabled
True if tunnel PMTU checking is enabled
**type**\: bool
.. attribute:: is_tunnel_up
True if tunnel is up
**type**\: bool
.. attribute:: local_address
Local tunnel address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: local_port
Local port
**type**\: int
**range:** 0..65535
.. attribute:: local_tunnel_name
Local tunnel name
**type**\: str
**length:** 0..256
.. attribute:: local_window_size
Local window size
**type**\: int
**range:** 0..65535
.. attribute:: maximum_retransmission_time
Maximum retransmission time in seconds
**type**\: int
**range:** 0..65535
**units**\: second
.. attribute:: order_queue_size
Order queue size
**type**\: int
**range:** 0..65535
.. attribute:: packet_queue_check
Current number session packet queue check
**type**\: int
**range:** 0..65535
.. attribute:: protocol
Protocol
**type**\: int
**range:** 0..255
.. attribute:: remote_address
Remote tunnel address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: remote_port
Remote port
**type**\: int
**range:** 0..65535
.. attribute:: remote_tunnel_id
Remote tunnel ID
**type**\: int
**range:** 0..4294967295
.. attribute:: remote_tunnel_name
Remote tunnel name
**type**\: str
**length:** 0..256
.. attribute:: remote_window_size
Remote window size
**type**\: int
**range:** 0..65535
.. attribute:: resend_maximum_queue_size
Resend maximum queue size
**type**\: int
**range:** 0..65535
.. attribute:: resend_queue_size
Resend queue size
**type**\: int
**range:** 0..65535
.. attribute:: resends
Total resends
**type**\: int
**range:** 0..4294967295
.. attribute:: retransmission_time
Retransmission time in seconds
**type**\: int
**range:** 0..65535
**units**\: second
.. attribute:: retransmit_time
Retransmit time distribution in seconds
**type**\: list of int
**range:** 0..65535
**units**\: second
.. attribute:: sequence_nr
Sequence NR
**type**\: int
**range:** 0..65535
.. attribute:: sequence_ns
Sequence NS
**type**\: int
**range:** 0..65535
.. attribute:: total_out_of_order_drop_packets
Total out of order dropped packets
**type**\: int
**range:** 0..4294967295
.. attribute:: total_out_of_order_reorder_packets
Total out of order reorder packets
**type**\: int
**range:** 0..4294967295
.. attribute:: total_peer_authentication_failures
Number of peer authentication failures
**type**\: int
**range:** 0..4294967295
.. attribute:: unsent_maximum_queue_size
Unsent maximum queue size
**type**\: int
**range:** 0..65535
.. attribute:: unsent_queue_size
Unsent queue size
**type**\: int
**range:** 0..65535
.. attribute:: zero_length_body_acknowledgement_sent
Total zero length body acknowledgement
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.local_tunnel_id = None
self.active_sessions = None
self.class_name = None
self.digest_secrets = None
self.is_congestion_control_enabled = None
self.is_pmtu_enabled = None
self.is_tunnel_up = None
self.local_address = None
self.local_port = None
self.local_tunnel_name = None
self.local_window_size = None
self.maximum_retransmission_time = None
self.order_queue_size = None
self.packet_queue_check = None
self.protocol = None
self.remote_address = None
self.remote_port = None
self.remote_tunnel_id = None
self.remote_tunnel_name = None
self.remote_window_size = None
self.resend_maximum_queue_size = None
self.resend_queue_size = None
self.resends = None
self.retransmission_time = None
self.retransmit_time = YLeafList()
self.retransmit_time.parent = self
self.retransmit_time.name = 'retransmit_time'
self.sequence_nr = None
self.sequence_ns = None
self.total_out_of_order_drop_packets = None
self.total_out_of_order_reorder_packets = None
self.total_peer_authentication_failures = None
self.unsent_maximum_queue_size = None
self.unsent_queue_size = None
self.zero_length_body_acknowledgement_sent = None
@property
def _common_path(self):
if self.local_tunnel_id is None:
raise YPYModelError('Key property local_tunnel_id is None')
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:tunnels/Cisco-IOS-XR-tunnel-l2tun-oper:tunnel[Cisco-IOS-XR-tunnel-l2tun-oper:local-tunnel-id = ' + str(self.local_tunnel_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.local_tunnel_id is not None:
return True
if self.active_sessions is not None:
return True
if self.class_name is not None:
return True
if self.digest_secrets is not None:
return True
if self.is_congestion_control_enabled is not None:
return True
if self.is_pmtu_enabled is not None:
return True
if self.is_tunnel_up is not None:
return True
if self.local_address is not None:
return True
if self.local_port is not None:
return True
if self.local_tunnel_name is not None:
return True
if self.local_window_size is not None:
return True
if self.maximum_retransmission_time is not None:
return True
if self.order_queue_size is not None:
return True
if self.packet_queue_check is not None:
return True
if self.protocol is not None:
return True
if self.remote_address is not None:
return True
if self.remote_port is not None:
return True
if self.remote_tunnel_id is not None:
return True
if self.remote_tunnel_name is not None:
return True
if self.remote_window_size is not None:
return True
if self.resend_maximum_queue_size is not None:
return True
if self.resend_queue_size is not None:
return True
if self.resends is not None:
return True
if self.retransmission_time is not None:
return True
if self.retransmit_time is not None:
for child in self.retransmit_time:
if child is not None:
return True
if self.sequence_nr is not None:
return True
if self.sequence_ns is not None:
return True
if self.total_out_of_order_drop_packets is not None:
return True
if self.total_out_of_order_reorder_packets is not None:
return True
if self.total_peer_authentication_failures is not None:
return True
if self.unsent_maximum_queue_size is not None:
return True
if self.unsent_queue_size is not None:
return True
if self.zero_length_body_acknowledgement_sent is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Tunnels.Tunnel']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:tunnels'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.tunnel is not None:
for child_ref in self.tunnel:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Tunnels']['meta_info']
class Sessions(object):
"""
List of session IDs
.. attribute:: session
L2TP information for a particular session
**type**\: list of :py:class:`Session <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Sessions.Session>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.session = YList()
self.session.parent = self
self.session.name = 'session'
class Session(object):
"""
L2TP information for a particular session
.. attribute:: local_tunnel_id <key>
Local tunnel ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: local_session_id <key>
Local session ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: call_serial_number
Call serial number
**type**\: int
**range:** 0..4294967295
.. attribute:: interface_name
Interface name
**type**\: str
**length:** 0..256
.. attribute:: is_conditional_debug_enabled
True if conditional debugging is enabled
**type**\: bool
.. attribute:: is_sequencing_on
True if session sequence is on
**type**\: bool
.. attribute:: is_session_locally_initiated
True if session initiated locally
**type**\: bool
.. attribute:: is_session_manual
True if session is manual
**type**\: bool
.. attribute:: is_session_state_established
True if session state is established
**type**\: bool
.. attribute:: is_session_up
True if session is up
**type**\: bool
.. attribute:: is_udp_checksum_enabled
True if UDP checksum enabled
**type**\: bool
.. attribute:: l2tp_sh_sess_tie_breaker
l2tp sh sess tie breaker
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: l2tp_sh_sess_tie_breaker_enabled
l2tp sh sess tie breaker enabled
**type**\: int
**range:** 0..255
.. attribute:: l2tp_sh_sess_udp_lport
l2tp sh sess udp lport
**type**\: int
**range:** 0..65535
.. attribute:: l2tp_sh_sess_udp_rport
l2tp sh sess udp rport
**type**\: int
**range:** 0..65535
.. attribute:: local_ip_address
Local session IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: local_tunnel_name
Local tunnel name
**type**\: str
**length:** 0..256
.. attribute:: protocol
Protocol
**type**\: int
**range:** 0..255
.. attribute:: remote_ip_address
Remote session IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: remote_session_id
Remote session ID
**type**\: int
**range:** 0..4294967295
.. attribute:: remote_tunnel_id
Remote tunnel ID
**type**\: int
**range:** 0..4294967295
.. attribute:: remote_tunnel_name
Remote tunnel name
**type**\: str
**length:** 0..256
.. attribute:: session_application_data
Session application data
**type**\: :py:class:`SessionApplicationData <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Sessions.Session.SessionApplicationData>`
.. attribute:: unique_id
Unique ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.local_tunnel_id = None
self.local_session_id = None
self.call_serial_number = None
self.interface_name = None
self.is_conditional_debug_enabled = None
self.is_sequencing_on = None
self.is_session_locally_initiated = None
self.is_session_manual = None
self.is_session_state_established = None
self.is_session_up = None
self.is_udp_checksum_enabled = None
self.l2tp_sh_sess_tie_breaker = None
self.l2tp_sh_sess_tie_breaker_enabled = None
self.l2tp_sh_sess_udp_lport = None
self.l2tp_sh_sess_udp_rport = None
self.local_ip_address = None
self.local_tunnel_name = None
self.protocol = None
self.remote_ip_address = None
self.remote_session_id = None
self.remote_tunnel_id = None
self.remote_tunnel_name = None
self.session_application_data = L2Tpv2.Sessions.Session.SessionApplicationData()
self.session_application_data.parent = self
self.unique_id = None
class SessionApplicationData(object):
"""
Session application data
.. attribute:: l2tp_sh_sess_app_type
l2tp sh sess app type
**type**\: int
**range:** 0..4294967295
.. attribute:: vpdn
VPDN data
**type**\: :py:class:`Vpdn <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Sessions.Session.SessionApplicationData.Vpdn>`
.. attribute:: xconnect
Xconnect data
**type**\: :py:class:`Xconnect <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Sessions.Session.SessionApplicationData.Xconnect>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.l2tp_sh_sess_app_type = None
self.vpdn = L2Tpv2.Sessions.Session.SessionApplicationData.Vpdn()
self.vpdn.parent = self
self.xconnect = L2Tpv2.Sessions.Session.SessionApplicationData.Xconnect()
self.xconnect.parent = self
class Xconnect(object):
"""
Xconnect data
.. attribute:: circuit_name
Circuit name
**type**\: str
.. attribute:: ipv6_protocol_tunneling
IPv6ProtocolTunneling
**type**\: bool
.. attribute:: is_circuit_state_up
True if circuit state is up
**type**\: bool
.. attribute:: is_local_circuit_state_up
True if local circuit state is up
**type**\: bool
.. attribute:: is_remote_circuit_state_up
True if remote circuit state is up
**type**\: bool
.. attribute:: sessionvc_id
Session VC ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.circuit_name = None
self.ipv6_protocol_tunneling = None
self.is_circuit_state_up = None
self.is_local_circuit_state_up = None
self.is_remote_circuit_state_up = None
self.sessionvc_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:xconnect'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.circuit_name is not None:
return True
if self.ipv6_protocol_tunneling is not None:
return True
if self.is_circuit_state_up is not None:
return True
if self.is_local_circuit_state_up is not None:
return True
if self.is_remote_circuit_state_up is not None:
return True
if self.sessionvc_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Sessions.Session.SessionApplicationData.Xconnect']['meta_info']
class Vpdn(object):
"""
VPDN data
.. attribute:: interface_name
Interface name
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: username
Session username
**type**\: str
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_name = None
self.username = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:vpdn'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.interface_name is not None:
return True
if self.username is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Sessions.Session.SessionApplicationData.Vpdn']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-tunnel-l2tun-oper:session-application-data'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.l2tp_sh_sess_app_type is not None:
return True
if self.vpdn is not None and self.vpdn._has_data():
return True
if self.xconnect is not None and self.xconnect._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Sessions.Session.SessionApplicationData']['meta_info']
@property
def _common_path(self):
if self.local_tunnel_id is None:
raise YPYModelError('Key property local_tunnel_id is None')
if self.local_session_id is None:
raise YPYModelError('Key property local_session_id is None')
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:sessions/Cisco-IOS-XR-tunnel-l2tun-oper:session[Cisco-IOS-XR-tunnel-l2tun-oper:local-tunnel-id = ' + str(self.local_tunnel_id) + '][Cisco-IOS-XR-tunnel-l2tun-oper:local-session-id = ' + str(self.local_session_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.local_tunnel_id is not None:
return True
if self.local_session_id is not None:
return True
if self.call_serial_number is not None:
return True
if self.interface_name is not None:
return True
if self.is_conditional_debug_enabled is not None:
return True
if self.is_sequencing_on is not None:
return True
if self.is_session_locally_initiated is not None:
return True
if self.is_session_manual is not None:
return True
if self.is_session_state_established is not None:
return True
if self.is_session_up is not None:
return True
if self.is_udp_checksum_enabled is not None:
return True
if self.l2tp_sh_sess_tie_breaker is not None:
return True
if self.l2tp_sh_sess_tie_breaker_enabled is not None:
return True
if self.l2tp_sh_sess_udp_lport is not None:
return True
if self.l2tp_sh_sess_udp_rport is not None:
return True
if self.local_ip_address is not None:
return True
if self.local_tunnel_name is not None:
return True
if self.protocol is not None:
return True
if self.remote_ip_address is not None:
return True
if self.remote_session_id is not None:
return True
if self.remote_tunnel_id is not None:
return True
if self.remote_tunnel_name is not None:
return True
if self.session_application_data is not None and self.session_application_data._has_data():
return True
if self.unique_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Sessions.Session']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:sessions'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.session is not None:
for child_ref in self.session:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Sessions']['meta_info']
class Session(object):
"""
L2TP control messages counters
.. attribute:: unavailable
L2TP session unavailable information
**type**\: :py:class:`Unavailable <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Session.Unavailable>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.unavailable = L2Tpv2.Session.Unavailable()
self.unavailable.parent = self
class Unavailable(object):
"""
L2TP session unavailable information
.. attribute:: sessions_on_hold
Number of session ID in hold database
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.sessions_on_hold = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:session/Cisco-IOS-XR-tunnel-l2tun-oper:unavailable'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.sessions_on_hold is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Session.Unavailable']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/Cisco-IOS-XR-tunnel-l2tun-oper:session'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.unavailable is not None and self.unavailable._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2.Session']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.classes is not None and self.classes._has_data():
return True
if self.counter_hist_fail is not None and self.counter_hist_fail._has_data():
return True
if self.counters is not None and self.counters._has_data():
return True
if self.session is not None and self.session._has_data():
return True
if self.sessions is not None and self.sessions._has_data():
return True
if self.statistics is not None and self.statistics._has_data():
return True
if self.tunnel is not None and self.tunnel._has_data():
return True
if self.tunnel_configurations is not None and self.tunnel_configurations._has_data():
return True
if self.tunnels is not None and self.tunnels._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_tunnel_l2tun_oper as meta
return meta._meta_table['L2Tpv2']['meta_info']
|
111pontes/ydk-py
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_tunnel_l2tun_oper.py
|
Python
|
apache-2.0
| 488,131
|
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013,2014,2015,2016 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the logic for `aq update machine`."""
import re
from aquilon.exceptions_ import ArgumentError
from aquilon.aqdb.model import (Chassis, ChassisSlot, Model, Machine,
Resource, BundleResource, Share, Filesystem)
from aquilon.aqdb.types import CpuType
from aquilon.worker.broker import BrokerCommand
from aquilon.worker.dbwrappers.hardware_entity import update_primary_ip
from aquilon.worker.dbwrappers.interface import set_port_group, generate_ip
from aquilon.worker.dbwrappers.location import get_location
from aquilon.worker.dbwrappers.resources import (find_resource,
get_resource_holder)
from aquilon.worker.templates import (PlenaryHostData,
PlenaryServiceInstanceToplevel)
from aquilon.worker.processes import DSDBRunner
_disk_map_re = re.compile(r'^([^/]+)/(?:([^/]+)/)?([^/]+):([^/]+)/(?:([^/]+)/)?([^/]+)$')
def parse_remap_disk(old_vmholder, new_vmholder, remap_disk):
result = {}
if not remap_disk:
return result
maps = remap_disk.split(",")
for map in maps:
res = _disk_map_re.match(map)
if not res:
raise ArgumentError("Invalid disk backend remapping "
"specification: '%s'" % map)
src_type, src_rg, src_name, dst_type, dst_rg, dst_name = res.groups()
src_cls = Resource.polymorphic_subclass(src_type,
"Invalid resource type")
dst_cls = Resource.polymorphic_subclass(dst_type,
"Invalid resource type")
if dst_cls not in (Share, Filesystem):
raise ArgumentError("%s is not a valid virtual disk backend "
"resource type." % dst_type)
src_backend = find_resource(src_cls, old_vmholder, src_rg, src_name)
dst_backend = find_resource(dst_cls, new_vmholder, dst_rg, dst_name)
result[src_backend] = dst_backend
return result
def get_metacluster(holder):
if hasattr(holder, "metacluster"):
return holder.metacluster
# vmhost
if hasattr(holder, "cluster") and holder.cluster:
return holder.cluster.metacluster
else:
# TODO vlocal still has clusters, so this case not tested yet.
return None
def update_disk_backing_stores(dbmachine, old_holder, new_holder, remap_disk):
if not old_holder:
old_holder = dbmachine.vm_container.holder.holder_object
if not new_holder:
new_holder = old_holder
disk_mapping = parse_remap_disk(old_holder, new_holder, remap_disk)
for dbdisk in dbmachine.disks:
old_bstore = dbdisk.backing_store
if isinstance(old_bstore.holder, BundleResource):
resourcegroup = old_bstore.holder.resourcegroup.name
else:
resourcegroup = None
if old_bstore in disk_mapping:
new_bstore = disk_mapping[old_bstore]
else:
new_bstore = find_resource(old_bstore.__class__, new_holder,
resourcegroup, old_bstore.name,
error=ArgumentError)
dbdisk.backing_store = new_bstore
def update_interface_bindings(session, logger, dbmachine, autoip):
for dbinterface in dbmachine.interfaces:
old_pg = dbinterface.port_group
if not old_pg:
continue
old_net = old_pg.network
# Suppress the warning about PG mismatch - we'll update the addresses
# later
set_port_group(session, logger, dbinterface, old_pg.name,
check_pg_consistency=False)
logger.info("Updated {0:l} to use {1:l}.".format(dbinterface,
dbinterface.port_group))
new_net = dbinterface.port_group.network
if new_net == old_net or not autoip:
dbinterface.check_pg_consistency(logger=logger)
continue
for addr in dbinterface.assignments:
if addr.network != old_net:
continue
new_ip = generate_ip(session, logger, dbinterface, autoip=True,
network_environment=old_net.network_environment)
for dbdns_rec in addr.dns_records:
dbdns_rec.network = new_net
dbdns_rec.ip = new_ip
old_ip = addr.ip
addr.ip = new_ip
addr.network = new_net
logger.info("Changed {0:l} IP address from {1!s} to {2!s}."
.format(dbinterface, old_ip, new_ip))
dbinterface.check_pg_consistency(logger=logger)
def move_vm(session, logger, dbmachine, resholder, remap_disk,
allow_metacluster_change, autoip, plenaries):
old_holder = dbmachine.vm_container.holder.holder_object
if resholder:
new_holder = resholder.holder_object
else:
new_holder = old_holder
if new_holder != old_holder:
old_mc = get_metacluster(old_holder)
new_mc = get_metacluster(new_holder)
if old_mc != new_mc and not allow_metacluster_change:
raise ArgumentError("Moving VMs between metaclusters is "
"disabled by default. Use the "
"--allow_metacluster_change option to "
"override.")
plenaries.add(old_holder)
plenaries.add(new_holder)
dbmachine.vm_container.holder = resholder
if new_holder != old_holder or remap_disk:
update_disk_backing_stores(dbmachine, old_holder, new_holder, remap_disk)
if new_holder != old_holder or autoip:
update_interface_bindings(session, logger, dbmachine, autoip)
if hasattr(new_holder, 'location_constraint'):
dbmachine.location = new_holder.location_constraint
else:
dbmachine.location = new_holder.hardware_entity.location
class CommandUpdateMachine(BrokerCommand):
requires_plenaries = True
required_parameters = ["machine"]
def render(self, session, logger, plenaries, machine, model, vendor, serial, uuid,
clear_uuid, chassis, slot, clearchassis, multislot, vmhost,
cluster, metacluster, allow_metacluster_change, cpuname,
cpuvendor, cpucount, memory, ip, autoip, uri, remap_disk,
comments, **arguments):
dbmachine = Machine.get_unique(session, machine, compel=True)
oldinfo = DSDBRunner.snapshot_hw(dbmachine)
old_location = dbmachine.location
plenaries.add(dbmachine)
if dbmachine.vm_container:
plenaries.add(dbmachine.vm_container)
if dbmachine.host:
# Using PlenaryHostData directly, to avoid warnings if the host has
# not been configured yet
plenaries.add(dbmachine.host, cls=PlenaryHostData)
if clearchassis:
del dbmachine.chassis_slot[:]
if chassis:
dbchassis = Chassis.get_unique(session, chassis, compel=True)
dbmachine.location = dbchassis.location
if slot is None:
raise ArgumentError("Option --chassis requires --slot "
"information.")
self.adjust_slot(session, logger,
dbmachine, dbchassis, slot, multislot)
elif slot is not None:
dbchassis = None
for dbslot in dbmachine.chassis_slot:
if dbchassis and dbslot.chassis != dbchassis:
raise ArgumentError("Machine in multiple chassis, please "
"use --chassis argument.")
dbchassis = dbslot.chassis
if not dbchassis:
raise ArgumentError("Option --slot requires --chassis "
"information.")
self.adjust_slot(session, logger,
dbmachine, dbchassis, slot, multislot)
dblocation = get_location(session, **arguments)
if dblocation:
loc_clear_chassis = False
for dbslot in dbmachine.chassis_slot:
dbcl = dbslot.chassis.location
if dbcl != dblocation:
if chassis or slot is not None:
raise ArgumentError("{0} conflicts with chassis {1!s} "
"location {2}."
.format(dblocation, dbslot.chassis,
dbcl))
else:
loc_clear_chassis = True
if loc_clear_chassis:
del dbmachine.chassis_slot[:]
dbmachine.location = dblocation
if model:
# If overriding model, should probably overwrite default
# machine specs as well.
dbmodel = Model.get_unique(session, name=model, vendor=vendor,
compel=True)
if not dbmodel.model_type.isMachineType():
raise ArgumentError("The update_machine command cannot update "
"machines of type %s." %
dbmodel.model_type)
# We probably could do this by forcing either cluster or
# location data to be available as appropriate, but really?
# Failing seems reasonable.
if dbmodel.model_type != dbmachine.model.model_type and \
(dbmodel.model_type.isVirtualMachineType() or
dbmachine.model.model_type.isVirtualMachineType()):
raise ArgumentError("Cannot change machine from %s to %s." %
(dbmachine.model.model_type,
dbmodel.model_type))
old_nic_model = dbmachine.model.nic_model
new_nic_model = dbmodel.nic_model
if old_nic_model != new_nic_model:
for iface in dbmachine.interfaces:
if iface.model == old_nic_model:
iface.model = new_nic_model
dbmachine.model = dbmodel
if cpuname or cpuvendor:
dbcpu = Model.get_unique(session, name=cpuname, vendor=cpuvendor,
model_type=CpuType.Cpu, compel=True)
dbmachine.cpu_model = dbcpu
if cpucount is not None:
dbmachine.cpu_quantity = cpucount
if memory is not None:
dbmachine.memory = memory
if serial is not None:
dbmachine.serial_no = serial
if comments is not None:
dbmachine.comments = comments
if uuid:
q = session.query(Machine)
q = q.filter_by(uuid=uuid)
existing = q.first()
if existing:
raise ArgumentError("{0} is already using UUID {1!s}."
.format(existing, uuid))
dbmachine.uuid = uuid
elif clear_uuid:
dbmachine.uuid = None
if uri and not dbmachine.model.model_type.isVirtualMachineType():
raise ArgumentError("URI can be specified only for virtual "
"machines and the model's type is %s" %
dbmachine.model.model_type)
if uri is not None:
dbmachine.uri = uri
# FIXME: For now, if a machine has its interface(s) in a portgroup
# this command will need to be followed by an update_interface to
# re-evaluate the portgroup for overflow.
# It would be better to have --pg and --autopg options to let it
# happen at this point.
if cluster or vmhost or metacluster:
if not dbmachine.vm_container:
raise ArgumentError("Cannot convert a physical machine to "
"virtual.")
resholder = get_resource_holder(session, logger, hostname=vmhost,
cluster=cluster,
metacluster=metacluster,
compel=False)
move_vm(session, logger, dbmachine, resholder, remap_disk,
allow_metacluster_change, autoip, plenaries)
elif remap_disk:
update_disk_backing_stores(dbmachine, None, None, remap_disk)
if ip:
if dbmachine.host:
for srv in dbmachine.host.services_provided:
si = srv.service_instance
plenaries.add(si, cls=PlenaryServiceInstanceToplevel)
update_primary_ip(session, logger, dbmachine, ip)
if dbmachine.location != old_location and dbmachine.host:
for vm in dbmachine.host.virtual_machines:
plenaries.add(vm)
vm.location = dbmachine.location
session.flush()
# Check if the changed parameters still meet cluster capacity
# requiremets
if dbmachine.cluster:
dbmachine.cluster.validate()
if allow_metacluster_change and dbmachine.cluster.metacluster:
dbmachine.cluster.metacluster.validate()
if dbmachine.host and dbmachine.host.cluster:
dbmachine.host.cluster.validate()
for dbinterface in dbmachine.interfaces:
dbinterface.check_pg_consistency(logger=logger)
# The check to make sure a plenary file is not written out for
# dummy aurora hardware is within the call to write(). This way
# it is consistent without altering (and forgetting to alter)
# all the calls to the method.
with plenaries.transaction():
dsdb_runner = DSDBRunner(logger=logger)
dsdb_runner.update_host(dbmachine, oldinfo)
dsdb_runner.commit_or_rollback("Could not update machine in DSDB")
return
def adjust_slot(self, session, logger,
dbmachine, dbchassis, slot, multislot):
for dbslot in dbmachine.chassis_slot:
# This update is a noop, ignore.
# Technically, this could be a request to trim the list down
# to just this one slot - in that case --clearchassis will be
# required.
if dbslot.chassis == dbchassis and dbslot.slot_number == slot:
return
if len(dbmachine.chassis_slot) > 1 and not multislot:
raise ArgumentError("Use --multislot to support a machine in more "
"than one slot, or --clearchassis to remove "
"current chassis slot information.")
if not multislot:
slots = ", ".join(str(dbslot.slot_number) for dbslot in
dbmachine.chassis_slot)
logger.info("Clearing {0:l} out of {1:l} slot(s) "
"{2}".format(dbmachine, dbchassis, slots))
del dbmachine.chassis_slot[:]
q = session.query(ChassisSlot)
q = q.filter_by(chassis=dbchassis, slot_number=slot)
dbslot = q.first()
if dbslot:
if dbslot.machine:
raise ArgumentError("{0} slot {1} already has machine "
"{2}.".format(dbchassis, slot,
dbslot.machine.label))
else:
dbslot = ChassisSlot(chassis=dbchassis, slot_number=slot)
dbmachine.chassis_slot.append(dbslot)
return
|
guillaume-philippon/aquilon
|
lib/aquilon/worker/commands/update_machine.py
|
Python
|
apache-2.0
| 16,386
|
#!/usr/bin/python
#coding: utf-8
#auth: asher
#date: 20171027
#purpose: get usefulinfo from jsonfile
import ConfigParser
import time
import datetime
import requests
import fileinput
import sys
import os
import codecs
import json
import getWarranty
reload(sys)
sys.setdefaultencoding( "utf-8" )
def getConfig():
"""
将通用的一些数据读取放在一个函数里。不再每个函数里去写一遍了。
"""
global cmdbpath
global idccontactinfoJson,iprangesJson,itemsJson,serverJson,dellserverjson
fileName = os.path.abspath(__file__)
binPath = os.path.dirname(os.path.realpath(__file__))
basePath = os.path.dirname(binPath)
confPath = basePath + '/config/'
# print confPath
conf = ConfigParser.ConfigParser()
conf.read("%s/cmdb.ini" % confPath)
#####
cmdbpath = conf.get('getcmdbinfo','cmdbpath')
# JsonFilesPath = basePath + '/files/'
if not os.path.isdir(cmdbpath):
os.mkdir(cmdbpath)
#idccontactinfo = idccontactinfo.json
idccontactinfoJson = cmdbpath + conf.get('getcmdbinfo','idccontactinfo')
iprangesJson = cmdbpath + conf.get('getcmdbinfo','ipranges')
itemsJson = cmdbpath + conf.get('getcmdbinfo','items')
serverJson = cmdbpath + conf.get('getcmdbinfo','serverinfosforidcmaintain')
dellserverjson = cmdbpath + conf.get('getcmdbinfo','dellserverjson')
def cmdbServer(stg):
##通过传入的stg,返回服务器相关的信息和idc信息
newdict = {}
getConfig()
with open(serverJson,'r') as f:
serverinfor = json.loads(f.read())
if serverinfor.has_key(stg):
dicts = serverinfor[stg]
newdict['item_id'] = dicts['item_id']
#hostname:HN-dl8
newdict['hostname'] = dicts['hostname']
#status:项目专属
newdict['status'] = dicts['status']
#idc_id:海宁
newdict['idc_id'] = dicts['idc_id']
#floor:3
newdict['floor'] = dicts['floor']
#cabinet:K08
newdict['cabinet'] = dicts['cabinet']
#cabinet_pos:10
newdict['cabinet_pos'] = dicts['cabinet_pos']
return newdict
def idcContact(stg):
##得到所有idc信息,这是通过stg
##用法:
#iddc = idcContact(stg1)
#for k,v in iddc.items():
# print k,v
idcnew = {}
getConfig()
stg1 = stg
try:
dicts = cmdbServer(stg1)
idcid = u'%s' % dicts['idc_id'].encode('UTF-8')
with open(idccontactinfoJson,'r') as f:
#idcInf = json.loads(f.read(),encoding='utf-8')
idcInf = json.loads(f.read())
if idcInf.has_key(idcid):
idcnew['tel'] = idcInf[idcid]['tel']
idcnew['address'] = idcInf[idcid]['address']
idcnew['name'] = idcInf[idcid]['name']
#return idcInf[idcid]
return idcnew
except:
pass
def dellServerInfo(stg):
"""
通过本地已有的库去查找已从dell网站下载下来的服务器的保修过报情况
"""
dells = {}
getConfig()
stg1 = stg
with open(dellserverjson,'r') as f:
dellInf = json.loads(f.read())
if dellInf.has_key(stg1):
dells['MachineDescription'] = dellInf[stg1]['MachineDescription']
dells['StartDate'] = dellInf[stg1]['StartDate']
dells['EndDate'] = dellInf[stg1]['EndDate']
expiretime = dells['EndDate']
nowtime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
expire1 = datetime.datetime.strptime(expiretime,"%Y-%m-%d %H:%M:%S")
nowtime1 = datetime.datetime.strptime(nowtime,"%Y-%m-%d %H:%M:%S")
remaintime = str(expire1 - nowtime1).split('days')[0]
dells['RemainDays'] = remaintime
dells['ServiceLevelDescription'] = dellInf[stg1]['ServiceLevelDescription']
return dells
else:
try:
newinfos = getWarranty.getDellExpires(stg)
dells['MachineDescription'] = newinfos['MachineDescription']
dells['StartDate'] = newinfos['StartDate']
dells['EndDate'] = newinfos['EndDate']
expiretime = dells['EndDate']
nowtime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
expire1 = datetime.datetime.strptime(expiretime,"%Y-%m-%d %H:%M:%S")
nowtime1 = datetime.datetime.strptime(nowtime,"%Y-%m-%d %H:%M:%S")
remaintime = str(expire1 - nowtime1).split('days')[0]
dells['RemainDays'] = remaintime
dells['ServiceLevelDescription'] = newinfos['ServiceLevelDescription']
bigdicts = {}
bigdicts[stg1] = dells
getWarranty.writedict2json(bigdicts,dellserverjson)
return dells
except TypeError:
pass
except NoneType:
pass
#import getWarranty
if __name__ == '__main__':
#stg1 = 'H1LMKY1'
stg1 = 'JRQMKY1'
# stg1 = '6298JY1'
dic = cmdbServer(stg1)
#print dicts
if dic:
for k,v in dic.items():
print k,v
iddc = idcContact(stg1)
if iddc:
for k,v in iddc.items():
print k,v
dellcs = dellServerInfo(stg1)
if dellcs:
for k,v in dellcs.items():
print k,v
|
lichengshuang/createvhost
|
python/asher/getcmdbinfo/bin/getcmdbinfo.py
|
Python
|
apache-2.0
| 5,242
|
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from typing import DefaultDict, Sequence
from unittest import mock
import pytest
from pants.engine.fs import EMPTY_DIGEST
from pants.jvm.resolve.common import Coordinate, Coordinates
from pants.jvm.resolve.coursier_fetch import CoursierLockfileEntry, CoursierResolvedLockfile
from pants.jvm.resolve.key import CoursierResolveKey
coord1 = Coordinate("test", "art1", "1.0.0")
coord2 = Coordinate("test", "art2", "1.0.0")
coord3 = Coordinate("test", "art3", "1.0.0")
coord4 = Coordinate("test", "art4", "1.0.0")
coord5 = Coordinate("test", "art5", "1.0.0")
# No dependencies (coord1)
# 1 direct dependency, more transitive dependencies (coord2)
# 1 where direct dependencies provide no transitive dependencies (coord 4)
# 1 where direct dependencies provide repeated dependencies (coord5)
direct: dict[Coordinate, set[Coordinate]] = {
coord1: set(),
coord2: {
coord3,
}, # 1, 2, 3, 4, 5
coord3: {coord1, coord4, coord5}, # 1, 3, 4, 5
coord4: {
coord1,
}, # 1, 4
coord5: {coord1, coord4}, # 1, 4, 5
}
@pytest.fixture
def lockfile() -> CoursierResolvedLockfile:
# Calculate transitive deps
transitive_ = {(i, k) for i, j in direct.items() for k in j}
while True:
old_len = len(transitive_)
transitive_ |= {(i, k) for i, j in transitive_ for k in direct[j]}
if old_len == len(transitive_):
break
transitive = DefaultDict(set)
for (i, j) in transitive_:
transitive[i].add(j)
entries = (
CoursierLockfileEntry(
coord=coord,
file_name=f"{coord.artifact}.jar",
direct_dependencies=Coordinates(direct[coord]),
dependencies=Coordinates(transitive[coord]),
file_digest=mock.Mock(),
)
for coord in direct
)
return CoursierResolvedLockfile(entries=tuple(entries))
def test_no_deps(lockfile: CoursierResolvedLockfile) -> None:
filtered = filter(coord1, lockfile, False)
assert filtered == [coord1]
def test_filter_non_transitive_includes_direct_deps(lockfile: CoursierResolvedLockfile) -> None:
filtered = filter(coord2, lockfile, False)
assert filtered == [coord2, coord3]
def test_filter_transitive_includes_transitive_deps(lockfile: CoursierResolvedLockfile) -> None:
filtered = filter(coord2, lockfile, True)
assert set(filtered) == {coord1, coord2, coord3, coord4, coord5}
# Entries should only appear once.
assert len(filtered) == 5
def filter(coordinate, lockfile, transitive) -> Sequence[Coordinate]:
key = CoursierResolveKey("example", "example.json", EMPTY_DIGEST)
root, deps = (
lockfile.dependencies(key, coordinate)
if transitive
else lockfile.direct_dependencies(key, coordinate)
)
return [i.coord for i in (root, *deps)]
|
pantsbuild/pants
|
src/python/pants/jvm/resolve/coursier_fetch_filter_test.py
|
Python
|
apache-2.0
| 2,986
|
# Copyright 2008 German Aerospace Center (DLR)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from webdav.acp.Acl import ACL
from webdav.acp.Ace import ACE
from webdav.acp.GrantDeny import GrantDeny
from webdav.acp.Privilege import Privilege
from webdav.acp.Principal import Principal
__version__ = "$LastChangedRevision: 2 $"
|
antont/tundra
|
src/Application/PythonScriptModule/pymodules_old/lib/webdav/acp/__init__.py
|
Python
|
apache-2.0
| 829
|
import pygame
import time
import scripts
"""
Score class
Handles all the score area
package: ianna
"""
class IannaScore():
def __init__ (self, buffer, screen, game_entities):
self.score_image = pygame.image.load('artwork/marcador.png').convert()
self.font = pygame.image.load('artwork/font.png').convert()
self.chars = []
self.buffer = buffer
self.screen = screen
self.game_entities = game_entities
self.weapons = []
self.weapons.append(pygame.image.load('artwork/marcador_armas_sword.png').convert())
self.weapons.append(pygame.image.load('artwork/marcador_armas_eclipse.png').convert())
self.weapons.append(pygame.image.load('artwork/marcador_armas_axe.png').convert())
self.weapons.append(pygame.image.load('artwork/marcador_armas_blade.png').convert())
self.first_object_in_inventory = 0
# We have 64 chars, in ASCII order starting by BLANK (32)
# There are some special chars, look at the font!
for tile_x in range (0,32):
rect = (tile_x*8, 0, 8, 8)
self.chars.append(self.font.subsurface(rect))
for tile_x in range (0,32):
rect = (tile_x*8, 8, 8, 8)
self.chars.append(self.font.subsurface(rect))
def clean_text_area(self):
for y in range(0,3):
for x in range(0,30):
self.buffer.blit(self.chars[0],(8+x*8,168+y*8))
def print_string(self,string):
fpsClock = pygame.time.Clock()
y=0
x=0
i=0
while i < len(string):
word = ""
# Find the word
while string[i] != ',' and string[i] != '.' and string[i] != ' ':
word = word + string[i]
i = i + 1
# Add the punctuation character
word = word + string[i]
i = i + 1
# Now print it
if x + len(word) > 30:
y = y + 1
x = 0
if y == 3: # We need to wait until the player presses any key
self.buffer.blit(self.chars[32],(240,184))
pygame.transform.scale(self.buffer,(256*3,192*3),self.screen)
pygame.display.flip()
self.wait_for_keypress()
y = 0
self.clean_text_area()
j = 0
while j < len(word):
char = ord(word[j]) - 32
self.buffer.blit(self.chars[char],(8+x*8,168+y*8))
x = x + 1
j = j + 1
pygame.transform.scale(self.buffer,(256*3,192*3),self.screen)
pygame.display.flip()
fpsClock.tick(25) # run at 10 fps
self.buffer.blit(self.chars[32],(240,184))
pygame.transform.scale(self.buffer,(256*3,192*3),self.screen)
pygame.display.flip()
self.wait_for_keypress()
def print_char(self,char,x,y):
char = ord(str(char)) - 32
self.buffer.blit(self.chars[char],(x,y))
def wait_for_keypress(self):
'''
Silly function, just wait for a keypress to happen
In the Spectrum version, it should be way better
'''
keypressed = False
keyreleased = False
key = None
while (not keypressed) and (not keyreleased):
events = pygame.event.get()
for event in events:
if event.type == pygame.KEYDOWN: # keypressed, wait until it is released
key = event.key
keypressed = True
if event.type == pygame.KEYUP: # keypressed, wait until it is released
if key == event.key:
keyreleased = True
def print_meter(self,x,value, color):
'''
Display an entity health, on X
'''
y=191
value = value*23/100
rect = [x+2,y-value,5,value]
pygame.draw.rect(self.buffer,color,rect)
def print_inventory(self,player):
'''
Display the inventory
'''
currentx = 24
x = 0
if player.current_object > self.first_object_in_inventory + 2:
self.first_object_in_inventory = self.first_object_in_inventory + 1
elif player.current_object < self.first_object_in_inventory:
self.first_object_in_inventory = self.first_object_in_inventory - 1
for item in player.inventory[self.first_object_in_inventory:]:
if x == 3:
break
self.buffer.blit(player.map.tile_table[self.tiles_per_pickable_object[item]], (currentx,168))
currentx = currentx + 24
x = x + 1
# Use a marker for the current selected object
self.buffer.blit(self.chars[63],(24+(player.current_object-self.first_object_in_inventory)*24,184))
def draw(self):
self.buffer.set_clip(pygame.Rect(0,160,256,192)) # set clipping area for game, should then set clipping for score area
self.buffer.blit(self.score_image,(0,160))
# Print barbarian energy
self.print_meter(168,(self.game_entities[0].energy*100) / self.game_entities[0].get_entity_max_energy(),(255,0,0))
# Print barbarian level
self.print_meter(176,(self.game_entities[0].experience*100) / self.game_entities[0].get_player_max_exp(),(0,255,255))
# Print current weapon
self.buffer.blit(self.weapons[self.game_entities[0].weapon-1],(112,168))
if self.game_entities[1] and self.game_entities[1].enemy_type != "OBJECT_ENEMY_ROCK":
entity = self.game_entities[1]
energy = (entity.energy*100) / entity.enemy_energy[entity.enemy_type][entity.level]
self.print_meter(192,energy,(0,255,0))
# Print energy in numbers
if entity.energy > 99:
print "WARNING: enemy energy is > 100"
else:
self.print_char(entity.energy/10,200,176)
self.print_char(entity.energy%10,208,176)
self.print_char(entity.level,208,184)
if self.game_entities[2] and self.game_entities[2].enemy_type not in ('OBJECT_ENEMY_ROCK','OBJECT_ENEMY_SECONDARY'):
entity = self.game_entities[2]
energy = (entity.energy*100) / entity.enemy_energy[entity.enemy_type][entity.level]
self.print_meter(216,energy,(0,255,0))
if entity.energy > 99:
print "WARNING: enemy energy is > 100"
else:
self.print_char(entity.energy/10,224,176)
self.print_char(entity.energy%10,232,176)
self.print_char(entity.level,232,184)
self.print_inventory(self.game_entities[0])
# Remember to copy this from scripts.py when new objects are created
tiles_per_pickable_object = { "OBJECT_KEY_GREEN": 217,
"OBJECT_KEY_BLUE": 218,
"OBJECT_KEY_YELLOW": 219,
"OBJECT_BREAD": 220,
"OBJECT_MEAT": 221,
"OBJECT_HEALTH": 222,
"OBJECT_KEY_RED": 223,
"OBJECT_KEY_WHITE": 224,
"OBJECT_KEY_PURPLE": 225,
}
|
fjpena/sword-of-ianna-zx
|
python_src/ianna_score.py
|
Python
|
apache-2.0
| 6,072
|
# -*- coding: utf-8 -*-
# Copyright 2012 Yoshihisa Tanaka
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from setuptools import find_packages, setup
name = 'pyfluent'
version = '0.2.1'
readme = os.path.join(os.path.dirname(__file__), 'README.rst')
long_description = open(readme).read()
classifiers = [
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Logging',
'Topic :: System :: Networking'
]
setup(name=name,
version=version,
author='Yoshihisa Tanaka',
author_email='yt.hisa@gmail.com',
license='MIT',
url='https://github.com/yosisa/pyfluent',
description='A python client library for Fluentd',
long_description=long_description,
classifiers=classifiers,
keywords=['logging', 'fluentd', 'json'],
install_requires=['msgpack-python>=0.3.0'],
tests_require=['pytest', 'mock'],
packages=find_packages(exclude=['tests'])
)
|
yosisa/pyfluent
|
setup.py
|
Python
|
apache-2.0
| 1,712
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
#
# Copyright 2015-2021 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
""" Testing predictions with missing splits
"""
from bigmler.tests.world import (world, common_setup_module,
common_teardown_module, teardown_class)
import bigmler.tests.basic_tst_prediction_steps as test_pred
def setup_module():
"""Setup for the module
"""
common_setup_module()
def teardown_module():
"""Teardown for the module
"""
common_teardown_module()
class TestMissingSplits(object):
def teardown(self):
"""Calling generic teardown for every method
"""
print("\nEnd of tests in: %s\n-------------------\n" % __name__)
teardown_class()
def setup(self):
"""
Debug information
"""
print("\n-------------------\nTests in: %s\n" % __name__)
def test_scenario1(self):
"""
Scenario: Successfully building test predictions with missing-splits model:
Given I create BigML resources uploading train "<data>" file to test "<test>" with a missing-splits model and log predictions in "<output>"
And I check that the source has been created
And I check that the dataset has been created
And I check that the model has been created
And I check that the predictions are ready
Then the local prediction file is like "<predictions_file>"
Examples:
| data | test | output |predictions_file |
| ../data/iris_missing.csv | ../data/test_iris_missing.csv | ./scenario_mspl_1/predictions.csv | ./check_files/predictions_iris_missing.csv |
"""
print(self.test_scenario1.__doc__)
examples = [
['data/iris_missing.csv', 'data/test_iris_missing.csv', 'scenario_mspl_1/predictions.csv', 'check_files/predictions_iris_missing.csv']]
for example in examples:
print("\nTesting with:\n", example)
test_pred.i_create_all_resources_missing_splits(self, data=example[0], test=example[1], output=example[2])
test_pred.i_check_create_source(self)
test_pred.i_check_create_dataset(self, suffix=None)
test_pred.i_check_create_model(self)
test_pred.i_check_create_predictions(self)
test_pred.i_check_predictions(self, example[3])
def test_scenario2(self):
"""
Scenario: Successfully building test predictions from scratch:
Given I create BigML resources uploading train "<data>" file to test "<test>" remotely with a missing-splits model and log predictions in "<output>"
And I check that the source has been created
And I check that the dataset has been created
And I check that the model has been created
And I check that the source has been created from the test file
And I check that the dataset has been created from the test file
And I check that the batch prediction has been created
And I check that the predictions are ready
Then the local prediction file is like "<predictions_file>"
Examples:
| data | test | output |predictions_file |
| ../data/iris_missing.csv | ../data/test_iris_missing.csv | ./scenario_mspl_2/predictions.csv | ./check_files/predictions_iris_missing.csv
"""
print(self.test_scenario2.__doc__)
examples = [
['data/iris_missing.csv', 'data/test_iris_missing.csv', 'scenario_mspl_2/predictions.csv', 'check_files/predictions_iris_missing.csv']]
for example in examples:
print("\nTesting with:\n", example)
test_pred.i_create_all_resources_remote_missing_splits(self, data=example[0], test=example[1], output=example[2])
test_pred.i_check_create_source(self)
test_pred.i_check_create_dataset(self, suffix=None)
test_pred.i_check_create_model(self)
test_pred.i_check_create_test_source(self)
test_pred.i_check_create_test_dataset(self)
test_pred.i_check_create_batch_prediction(self)
test_pred.i_check_create_predictions(self)
test_pred.i_check_predictions(self, example[3])
|
bigmlcom/bigmler
|
bigmler/tests/test_06_missing_splits.py
|
Python
|
apache-2.0
| 5,076
|
from django.contrib import admin
# Register your models here.
from learning_logs.models import Topic, Entry
admin.site.register(Topic)
admin.site.register(Entry)
|
wsqhubapp/learning_log
|
learning_logs/admin.py
|
Python
|
apache-2.0
| 163
|
# (c) Copyright 2008-2015 Synapse Wireless, Inc.
"""System Info IDs - used in 'getInfo()' and 'getStat()' calls"""
# Types
SI_TYPE_VENDOR = 0
SI_TYPE_RADIO = 1
SI_TYPE_CPU = 2
SI_TYPE_PLATFORM = 3
SI_TYPE_BUILD = 4
SI_TYPE_VERSION_MAJOR = 5
SI_TYPE_VERSION_MINOR = 6
SI_TYPE_VERSION_BUILD = 7
SI_ENCRYPTION_INFO = 8
# SNAP 2.4 Additions
SI_RPC_PACKET_SENT_ID = 9
SI_RPC_IS_MULTICAST_ID = 10
SI_RPC_IS_MULTICAST = SI_RPC_IS_MULTICAST_ID # (just an alias)
SI_MULTI_PKT_TTL_ID = 11
SI_MULTI_PKT_TTL = SI_MULTI_PKT_TTL_ID # (just an alias)
SI_SMALL_STRS_REMAINING = 12 # Embedded nodes only
SI_MEDIUM_STRS_REMAINING = 13 # Embedded nodes only
SI_ROUTE_TABLE_SIZE = 14
SI_ROUTES_IN_TABLE = 15
SI_BANK_FREE_SPACE = 16 # Embedded nodes only
# SNAP 2.5 Additions
SI_RF200A_FLAG = 17 # Embedded nodes only
SI_STDIN_HOOK_STATUS = 18 # Embedded nodes only
# SNAP 2.6 Additions
SI_TINY_STRS_REMAINING = 19 # Embedded nodes only
SI_LARGE_STRS_REMAINING = 20 # Embedded nodes only
SI_SCRIPT_FIRST_RUN_STATUS = 21 # Embedded nodes only
SI_SCRIPT_BASE_ADDR = 22 # Embedded nodes only
SI_SCRIPT_BASE_BANK = 23 # Embedded nodes only
SI_RPC_IS_DIRECTED_MULTICAST = 24
SI_DELAY_FACTOR = 25 # Directed Multicast only
SI_ADDRESS_INDEX = 26 # Directed Multicast only
SI_MULTI_PKT_GROUP = 27 # Multicast or Directed Multicast only
SI_MULTI_PKT_ORIGINAL_TTL = 28 # Directed Multicast only
# Vendors
SI_VENDOR_SYNAPSE = 0
SI_VENDOR_FREESCALE = 2 # value = 1 skipped
SI_VENDOR_CEL = 3
SI_VENDOR_ATMEL = 4
SI_VENDOR_SILICON_LABS = 5
# Radios
SI_RADIO_802_15_4 = 0
SI_RADIO_NONE = 1
SI_RADIO_900 = 2
# CPUs
SI_CPU_MC9S08GT60A = 0
SI_CPU_8051 = 1
SI_CPU_MC9S08QE = 2
SI_CPU_COLDFIRE = 3
SI_CPU_ARM7 = 4
SI_CPU_ATMEGA = 5
SI_CPU_SI1000 = 6
SI_CPU_SI1000 = 6
SI_CPU_X86 = 7
SI_CPU_UNKNOWN = 8
SI_CPU_SPARC_LEON = 9
SI_CPU_ARM_CORTEX_M3 = 10
SI_CPU_ATMEGA128RFR2 = 12 # Depricated
SI_CPU_ATMEGA1284RFR2 = 13 # Depricated
# Platforms
SI_PLATFORM_RF_ENGINE = 0
SI_PLATFORM_CEL_ZIC2410 = 3 # values [1, 2, 4] skipped
SI_PLATFORM_MC1321X = 5
SI_PLATFORM_ATMEGA128RFA1 = 6
SI_PLATFORM_SNAPCOM = 7
SI_PLATFORM_SI1000 = 8
SI_PLATFORM_MC1322X = 9
SI100X_FHSS = 11 # value [10, 12] skipped
SI_PLATFORM_SI100X_KADEX = 11
SI_PLATFORM_RF300 = 13
SI_PLATFORM_RF200_PFM = 14
SI_PLATFORM_SM300 = 15
SI_PLATFORM_SM301 = 16
SI_PLATFORM_SM200_PFM = 17
SI_PLATFORM_RN_G2C547 = 18
SI_PLATFORM_RF266_PFM = 19
SI_PLATFORM_STM32W108xB = 20
SI_PLATFORM_SM222_PFM = 25 # value [21, 22, 23, 24] skipped
SI_PLATFORM_ATmega128RFR2_PFM = 26
SI_PLATFORM_SM220UF1_PFM = 27
SI_PLATFORM_ATmega1284RFR2_PFM = 28
# Builds
SI_BUILD_DEBUG = 0
SI_BUILD_RELEASE = 1
# Encryptions
SI_NO_ENCRYPTION = 0
SI_AES128_ENCRYPTION = 1
SI_SNAP_ENCRYPTION = 2
# getStat() Enumerations
STAT_DS_NULL_TX_BUFFERS = 0
STAT_DS_UART0_RX_BUFFERS = 1
STAT_DS_UART0_TX_BUFFERS = 2
STAT_DS_UART1_RX_BUFFERS = 3
STAT_DS_UART_TX_BUFFERS = 4
STAT_DS_TRANSPARENT_RX_BUFFERS = 5
STAT_DS_TRANSPARENT_TX_BUFFERS = 6
STAT_DS_PACKET_SERIAL_RX_BUFFERS = 7
STAT_DS_PACKET_SERIAL_TX_BUFFERS = 8
STAT_DS_RADIO_RX_BUFFERS = 9
STAT_DS_RADIO_TX_BUFFERS = 10
STAT_RADIO_FORWARDED_UNICASTS = 11
STAT_PACKET_SERIAL_FORWARDED_UNICASTS = 12
STAT_RADIO_FORWARDED_XCASTS = 13
STAT_PACKET_SERIAL_FORWARDED_XCASTS = 14
STAT_PACKET_SERIAL_RETRIES = 15 # Debug Builds Only
STAT_PACKET_SERIAL_FAILURES = 16 # Debug Builds Only
STAT_PACKET_SERIAL_RX_ERRORS = 17 # Debug Builds Only
STAT_PACKET_SERIAL_RX_BAD_CKSUM = 18 # Debug Builds Only
STAT_PACKET_SERIAL_NUM_RX_ACKS = 19 # Debug Builds Only
STAT_PACKET_SERIAL_NUM_RX_DUPS = 20 # Debug Builds Only
STAT_PACKET_SERIAL_NO_ROOMS = 21 # Debug Builds Only
|
synapse-wireless/bulk-reprogramming
|
snappyImages/synapse/sysInfo.py
|
Python
|
apache-2.0
| 4,335
|
import sys
if sys.version_info >= (3, 8):
from functools import singledispatchmethod
else:
from functools import singledispatch, update_wrapper
def singledispatchmethod(func):
dispatcher = singledispatch(func)
def wrapper(*args, **kw):
return dispatcher.dispatch(args[1].__class__)(*args, **kw)
wrapper.register = dispatcher.register
update_wrapper(wrapper, func)
return wrapper
|
adamcharnock/lightbus
|
lightbus/utilities/singledispatch.py
|
Python
|
apache-2.0
| 447
|
class DestinationNotFoundException(Exception):
pass
class InvalidDateFormat(Exception):
pass
|
kapucko/bus-train-search
|
btsearch/exceptions.py
|
Python
|
apache-2.0
| 101
|
# Copyright 2021 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for `mvn_from_bijector.py`."""
from typing import Tuple
from absl.testing import absltest
from absl.testing import parameterized
import chex
from distrax._src.bijectors import bijector
from distrax._src.bijectors.diag_affine import DiagAffine
from distrax._src.bijectors.triangular_affine import TriangularAffine
from distrax._src.distributions.mvn_from_bijector import MultivariateNormalFromBijector
import haiku as hk
import jax
import jax.numpy as jnp
import numpy as np
from tensorflow_probability.substrates import jax as tfp
tfd = tfp.distributions
Array = chex.Array
class DummyBijector(bijector.Bijector):
"""A dummy bijector."""
def forward_and_log_det(self, x: Array) -> Tuple[Array, Array]:
"""Computes y = f(x) and log|det J(f)(x)|."""
return x, jnp.zeros_like(x)[:-1]
class MultivariateNormalFromBijectorTest(parameterized.TestCase):
@parameterized.named_parameters(
('wrong event_ndims_in', 2, 1, True, np.zeros((4,))),
('wrong event_ndims_out', 1, 2, True, np.zeros((4,))),
('not constant Jacobian', 1, 1, False, np.zeros((4,))),
('loc is 0d', 1, 1, True, np.zeros(shape=())),
('loc has more dims than batch_shape', 1, 1, True,
np.zeros(shape=(2, 4))),
)
def test_raises_on_wrong_inputs(
self, event_ndims_in, event_ndims_out, is_constant_jacobian, loc):
bij = DummyBijector(event_ndims_in, event_ndims_out, is_constant_jacobian)
with self.assertRaises(ValueError):
MultivariateNormalFromBijector(loc, bij, batch_shape=())
@parameterized.named_parameters(
('no broadcast', np.ones((4,)), np.zeros((4,)), (4,)),
('broadcasted loc', np.ones((3, 4)), np.zeros((4,)), (3, 4)),
('broadcasted diag', np.ones((4,)), np.zeros((3, 4)), (3, 4)),
)
def test_loc_scale_and_shapes(self, diag, loc, expected_shape):
scale = DiagAffine(diag=diag, bias=jnp.zeros_like(diag))
batch_shape = jnp.broadcast_shapes(diag.shape, loc.shape)[:-1]
dist = MultivariateNormalFromBijector(loc, scale, batch_shape)
np.testing.assert_allclose(dist.loc, np.zeros(expected_shape))
self.assertTrue(scale.same_as(dist.scale))
self.assertEqual(dist.event_shape, (4,))
self.assertEqual(dist.batch_shape, batch_shape)
@chex.all_variants
def test_sample(self):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
diag = 0.5 + jax.random.uniform(next(prng), (4,))
loc = jax.random.normal(next(prng), (4,))
scale = DiagAffine(diag=diag, bias=jnp.zeros_like(diag))
dist = MultivariateNormalFromBijector(loc, scale, batch_shape=())
num_samples = 100_000
sample_fn = lambda seed: dist.sample(seed=seed, sample_shape=num_samples)
samples = self.variant(sample_fn)(jax.random.PRNGKey(2000))
self.assertEqual(samples.shape, (num_samples, 4))
np.testing.assert_allclose(jnp.mean(samples, axis=0), loc, rtol=0.1)
np.testing.assert_allclose(jnp.std(samples, axis=0), diag, rtol=0.1)
@chex.all_variants
def test_log_prob(self):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
diag = 0.5 + jax.random.uniform(next(prng), (4,))
loc = jax.random.normal(next(prng), (4,))
scale = DiagAffine(diag=diag, bias=jnp.zeros_like(diag))
dist = MultivariateNormalFromBijector(loc, scale, batch_shape=())
values = jax.random.normal(next(prng), (5, 4))
tfp_dist = tfd.MultivariateNormalDiag(loc=loc, scale_diag=diag)
np.testing.assert_allclose(
self.variant(dist.log_prob)(values), tfp_dist.log_prob(values))
@chex.all_variants(with_pmap=False)
@parameterized.named_parameters(
('no broadcast', (4,), (4,)),
('broadcasted loc', (3, 4), (4,)),
('broadcasted diag', (4,), (3, 4)),
)
def test_mean_median_mode(self, diag_shape, loc_shape):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
diag = jax.random.normal(next(prng), diag_shape)
loc = jax.random.normal(next(prng), loc_shape)
scale = DiagAffine(diag=diag, bias=jnp.zeros_like(diag))
batch_shape = jnp.broadcast_shapes(diag_shape, loc_shape)[:-1]
dist = MultivariateNormalFromBijector(loc, scale, batch_shape)
for method in ['mean', 'median', 'mode']:
with self.subTest(method=method):
fn = self.variant(getattr(dist, method))
np.testing.assert_allclose(
fn(), jnp.broadcast_to(loc, batch_shape + loc.shape[-1:]))
@chex.all_variants(with_pmap=False)
@parameterized.named_parameters(
('no broadcast', (4,), (4,)),
('broadcasted loc', (3, 4), (4,)),
('broadcasted diag', (4,), (3, 4)),
)
def test_variance_stddev_covariance_diag(self, scale_shape, loc_shape):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
scale_diag = jax.random.normal(next(prng), scale_shape)
loc = jax.random.normal(next(prng), loc_shape)
scale = DiagAffine(diag=scale_diag, bias=jnp.zeros_like(scale_diag))
batch_shape = jnp.broadcast_shapes(scale_shape[:-1], loc_shape[:-1])
dist = MultivariateNormalFromBijector(loc, scale, batch_shape)
for method in ['variance', 'stddev', 'covariance']:
with self.subTest(method=method):
fn = self.variant(getattr(dist, method))
if method == 'variance':
expected_result = jnp.broadcast_to(
jnp.square(scale_diag), batch_shape + loc.shape[-1:])
elif method == 'stddev':
expected_result = jnp.broadcast_to(
jnp.abs(scale_diag), batch_shape + loc.shape[-1:])
elif method == 'covariance':
expected_result = jnp.broadcast_to(
jnp.vectorize(jnp.diag, signature='(k)->(k,k)')(
jnp.square(scale_diag)),
batch_shape + loc.shape[-1:] + loc.shape[-1:])
np.testing.assert_allclose(fn(), expected_result, rtol=5e-3)
@chex.all_variants(with_pmap=False)
@parameterized.named_parameters(
('no broadcast', (4, 4), (4,)),
('broadcasted loc', (3, 4, 4), (4,)),
('broadcasted diag', (4, 4), (3, 4)),
)
def test_variance_stddev_covariance_no_diag(self, scale_shape, loc_shape):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
scale_tril = jnp.tril(jax.random.normal(next(prng), scale_shape))
loc = jax.random.normal(next(prng), loc_shape)
scale = TriangularAffine(
matrix=scale_tril,
bias=jnp.zeros_like(scale_tril[..., 0]),
is_lower=True)
batch_shape = jnp.broadcast_shapes(scale_shape[:-2], loc_shape[:-1])
dist = MultivariateNormalFromBijector(loc, scale, batch_shape)
for method in ['variance', 'stddev', 'covariance']:
with self.subTest(method=method):
fn = self.variant(getattr(dist, method))
scale_tril_t = jnp.vectorize(
jnp.transpose, signature='(k,k)->(k,k)')(scale_tril)
scale_times_scale_t = jnp.matmul(scale_tril, scale_tril_t)
if method == 'variance':
expected_result = jnp.vectorize(jnp.diag, signature='(k,k)->(k)')(
scale_times_scale_t)
expected_result = jnp.broadcast_to(
expected_result, batch_shape + loc.shape[-1:])
elif method == 'stddev':
expected_result = jnp.vectorize(jnp.diag, signature='(k,k)->(k)')(
jnp.sqrt(scale_times_scale_t))
expected_result = jnp.broadcast_to(
expected_result, batch_shape + loc.shape[-1:])
elif method == 'covariance':
expected_result = jnp.broadcast_to(
scale_times_scale_t, batch_shape + scale_tril.shape[-2:])
np.testing.assert_allclose(fn(), expected_result, rtol=5e-3)
@chex.all_variants(with_pmap=False)
def test_kl_divergence_diag_distributions(self):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
scale_diag1 = 0.1 + jax.random.uniform(next(prng), (3, 4))
loc1 = jax.random.normal(next(prng), (1, 4))
dist1_distrax = MultivariateNormalFromBijector(
loc=loc1,
scale=DiagAffine(diag=scale_diag1, bias=jnp.zeros((4,))),
batch_shape=(3,),
)
dist1_tfp = tfd.MultivariateNormalDiag(
loc=loc1, scale_diag=scale_diag1)
scale_diag2 = 0.1 + jax.random.uniform(next(prng), (4,))
loc2 = jax.random.normal(next(prng), (4,))
dist2_distrax = MultivariateNormalFromBijector(
loc=loc2,
scale=DiagAffine(diag=scale_diag2, bias=jnp.zeros((4,))),
batch_shape=(),
)
dist2_tfp = tfd.MultivariateNormalDiag(
loc=loc2, scale_diag=scale_diag2)
expected_result1 = dist1_tfp.kl_divergence(dist2_tfp)
expected_result2 = dist2_tfp.kl_divergence(dist1_tfp)
for mode in ['distrax_to_distrax', 'distrax_to_tfp', 'tfp_to_distrax']:
with self.subTest(mode=mode):
if mode == 'distrax_to_distrax':
result1 = self.variant(dist1_distrax.kl_divergence)(dist2_distrax)
result2 = self.variant(dist2_distrax.kl_divergence)(dist1_distrax)
elif mode == 'distrax_to_tfp':
result1 = self.variant(dist1_distrax.kl_divergence)(dist2_tfp)
result2 = self.variant(dist2_distrax.kl_divergence)(dist1_tfp)
elif mode == 'tfp_to_distrax':
result1 = self.variant(dist1_tfp.kl_divergence)(dist2_distrax)
result2 = self.variant(dist2_tfp.kl_divergence)(dist1_distrax)
np.testing.assert_allclose(result1, expected_result1, rtol=1e-3)
np.testing.assert_allclose(result2, expected_result2, rtol=1e-3)
@chex.all_variants(with_pmap=False)
def test_kl_divergence_non_diag_distributions(self):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
scale_tril1 = jnp.tril(jax.random.normal(next(prng), (3, 4, 4)))
loc1 = jax.random.normal(next(prng), (1, 4))
dist1_distrax = MultivariateNormalFromBijector(
loc=loc1,
scale=TriangularAffine(matrix=scale_tril1, bias=jnp.zeros((4,))),
batch_shape=(3,),
)
dist1_tfp = tfd.MultivariateNormalTriL(loc=loc1, scale_tril=scale_tril1)
scale_tril2 = jnp.tril(jax.random.normal(next(prng), (4, 4)))
loc2 = jax.random.normal(next(prng), (4,))
dist2_distrax = MultivariateNormalFromBijector(
loc=loc2,
scale=TriangularAffine(matrix=scale_tril2, bias=jnp.zeros((4,))),
batch_shape=(),
)
dist2_tfp = tfd.MultivariateNormalTriL(loc=loc2, scale_tril=scale_tril2)
expected_result1 = dist1_tfp.kl_divergence(dist2_tfp)
expected_result2 = dist2_tfp.kl_divergence(dist1_tfp)
for mode in ['distrax_to_distrax', 'distrax_to_tfp', 'tfp_to_distrax']:
with self.subTest(mode=mode):
if mode == 'distrax_to_distrax':
result1 = self.variant(dist1_distrax.kl_divergence)(dist2_distrax)
result2 = self.variant(dist2_distrax.kl_divergence)(dist1_distrax)
elif mode == 'distrax_to_tfp':
result1 = self.variant(dist1_distrax.kl_divergence)(dist2_tfp)
result2 = self.variant(dist2_distrax.kl_divergence)(dist1_tfp)
elif mode == 'tfp_to_distrax':
result1 = self.variant(dist1_tfp.kl_divergence)(dist2_distrax)
result2 = self.variant(dist2_tfp.kl_divergence)(dist1_distrax)
np.testing.assert_allclose(result1, expected_result1, rtol=1e-3)
np.testing.assert_allclose(result2, expected_result2, rtol=1e-3)
def test_kl_divergence_raises_on_incompatible_distributions(self):
dim = 4
dist1 = MultivariateNormalFromBijector(
loc=jnp.zeros((dim,)),
scale=DiagAffine(diag=jnp.ones((dim,)), bias=jnp.zeros((dim,))),
batch_shape=(),
)
dim = 5
dist2 = MultivariateNormalFromBijector(
loc=jnp.zeros((dim,)),
scale=DiagAffine(diag=jnp.ones((dim,)), bias=jnp.zeros((dim,))),
batch_shape=(),
)
with self.assertRaises(ValueError):
dist1.kl_divergence(dist2)
if __name__ == '__main__':
absltest.main()
|
deepmind/distrax
|
distrax/_src/distributions/mvn_from_bijector_test.py
|
Python
|
apache-2.0
| 12,417
|
import base64
import os
import re
import bpy
import gpu
LAMP_TYPES = [
gpu.GPU_DYNAMIC_LAMP_DYNVEC,
gpu.GPU_DYNAMIC_LAMP_DYNCO,
gpu.GPU_DYNAMIC_LAMP_DYNIMAT,
gpu.GPU_DYNAMIC_LAMP_DYNPERSMAT,
gpu.GPU_DYNAMIC_LAMP_DYNENERGY,
gpu.GPU_DYNAMIC_LAMP_DYNENERGY,
gpu.GPU_DYNAMIC_LAMP_DYNCOL,
gpu.GPU_DYNAMIC_LAMP_DISTANCE,
gpu.GPU_DYNAMIC_LAMP_ATT1,
gpu.GPU_DYNAMIC_LAMP_ATT2,
gpu.GPU_DYNAMIC_LAMP_SPOTSIZE,
gpu.GPU_DYNAMIC_LAMP_SPOTBLEND,
]
MIST_TYPES = [
gpu.GPU_DYNAMIC_MIST_ENABLE,
gpu.GPU_DYNAMIC_MIST_START,
gpu.GPU_DYNAMIC_MIST_DISTANCE,
gpu.GPU_DYNAMIC_MIST_INTENSITY,
gpu.GPU_DYNAMIC_MIST_TYPE,
gpu.GPU_DYNAMIC_MIST_COLOR,
]
WORLD_TYPES = [
gpu.GPU_DYNAMIC_HORIZON_COLOR,
gpu.GPU_DYNAMIC_AMBIENT_COLOR,
]
MATERIAL_TYPES = [
gpu.GPU_DYNAMIC_MAT_DIFFRGB,
gpu.GPU_DYNAMIC_MAT_REF,
gpu.GPU_DYNAMIC_MAT_SPECRGB,
gpu.GPU_DYNAMIC_MAT_SPEC,
gpu.GPU_DYNAMIC_MAT_HARD,
gpu.GPU_DYNAMIC_MAT_EMIT,
gpu.GPU_DYNAMIC_MAT_AMB,
gpu.GPU_DYNAMIC_MAT_ALPHA,
]
TYPE_TO_NAME = {
gpu.GPU_DYNAMIC_OBJECT_VIEWMAT: 'view_mat',
gpu.GPU_DYNAMIC_OBJECT_MAT: 'model_mat',
gpu.GPU_DYNAMIC_OBJECT_VIEWIMAT: 'inv_view_mat',
gpu.GPU_DYNAMIC_OBJECT_IMAT: 'inv_model_mat',
gpu.GPU_DYNAMIC_OBJECT_COLOR: 'color',
gpu.GPU_DYNAMIC_OBJECT_AUTOBUMPSCALE: 'auto_bump_scale',
gpu.GPU_DYNAMIC_MIST_ENABLE: 'use_mist',
gpu.GPU_DYNAMIC_MIST_START: 'start',
gpu.GPU_DYNAMIC_MIST_DISTANCE: 'depth',
gpu.GPU_DYNAMIC_MIST_INTENSITY: 'intensity',
gpu.GPU_DYNAMIC_MIST_TYPE: 'falloff',
gpu.GPU_DYNAMIC_MIST_COLOR: 'color',
gpu.GPU_DYNAMIC_HORIZON_COLOR: 'horizon_color',
gpu.GPU_DYNAMIC_AMBIENT_COLOR: 'ambient_color',
gpu.GPU_DYNAMIC_LAMP_DYNVEC: 'dynvec',
gpu.GPU_DYNAMIC_LAMP_DYNCO: 'dynco',
gpu.GPU_DYNAMIC_LAMP_DYNIMAT: 'dynimat',
gpu.GPU_DYNAMIC_LAMP_DYNPERSMAT: 'dynpersmat',
gpu.GPU_DYNAMIC_LAMP_DYNENERGY: 'energy',
gpu.GPU_DYNAMIC_LAMP_DYNCOL: 'color',
gpu.GPU_DYNAMIC_LAMP_DISTANCE: 'distance',
gpu.GPU_DYNAMIC_LAMP_ATT1: 'linear_attenuation',
gpu.GPU_DYNAMIC_LAMP_ATT2: 'quadratic_attenuation',
gpu.GPU_DYNAMIC_LAMP_SPOTSIZE: 'spot_size',
gpu.GPU_DYNAMIC_LAMP_SPOTBLEND: 'spot_blend',
gpu.GPU_DYNAMIC_MAT_DIFFRGB: 'diffuse_color',
gpu.GPU_DYNAMIC_MAT_REF: 'diffuse_intensity',
gpu.GPU_DYNAMIC_MAT_SPECRGB: 'specular_color',
gpu.GPU_DYNAMIC_MAT_SPEC: 'specular_intensity',
gpu.GPU_DYNAMIC_MAT_HARD: 'specular_hardness',
gpu.GPU_DYNAMIC_MAT_EMIT: 'emit',
gpu.GPU_DYNAMIC_MAT_AMB: 'ambient',
gpu.GPU_DYNAMIC_MAT_ALPHA: 'alpha',
}
TYPE_TO_SEMANTIC = {
gpu.GPU_DYNAMIC_LAMP_DYNVEC: 'BL_DYNVEC',
gpu.GPU_DYNAMIC_LAMP_DYNCO: 'MODELVIEW', # dynco gets extracted from the matrix
gpu.GPU_DYNAMIC_LAMP_DYNIMAT: 'BL_DYNIMAT',
gpu.GPU_DYNAMIC_LAMP_DYNPERSMAT: 'BL_DYNPERSMAT',
gpu.CD_ORCO: 'POSITION',
gpu.CD_MTFACE: 'TEXCOORD_0',
-1: 'NORMAL' # Hack until the gpu module has something for normals
}
DATATYPE_TO_CONVERTER = {
gpu.GPU_DATA_1I: lambda x: x,
gpu.GPU_DATA_1F: lambda x: x,
gpu.GPU_DATA_2F: list,
gpu.GPU_DATA_3F: list,
gpu.GPU_DATA_4F: list,
}
DATATYPE_TO_GLTF_TYPE = {
gpu.GPU_DATA_1I: 5124, # INT
gpu.GPU_DATA_1F: 5126, # FLOAT
gpu.GPU_DATA_2F: 35664, # FLOAT_VEC2
gpu.GPU_DATA_3F: 35665, # FLOAT_VEC3
gpu.GPU_DATA_4F: 35666, # FLOAT_VEC4
gpu.GPU_DATA_9F: 35675, # FLOAT_MAT3
gpu.GPU_DATA_16F: 35676, # FLOAT_MAT4
}
def vs_to_130(data):
data['attributes'].append({
'varname': 'bl_Vertex',
'type': gpu.CD_ORCO,
'datatype': gpu.GPU_DATA_4F
})
data['attributes'].append({
'varname': 'bl_Normal',
'type': -1,
'datatype': gpu.GPU_DATA_3F
})
data['uniforms'].append({
'varname': 'bl_ModelViewMatrix',
'type': 0,
'datatype': gpu.GPU_DATA_16F,
})
data['uniforms'].append({
'varname': 'bl_ProjectionMatrix',
'type': 0,
'datatype': gpu.GPU_DATA_16F,
})
data['uniforms'].append({
'varname': 'bl_NormalMatrix',
'type': 0,
'datatype': gpu.GPU_DATA_9F,
})
src = '#version 130\n'
src += 'in vec4 bl_Vertex;\n'
src += 'in vec3 bl_Normal;\n'
src += 'uniform mat4 bl_ModelViewMatrix;\n'
src += 'uniform mat4 bl_ProjectionMatrix;\n'
src += 'uniform mat3 bl_NormalMatrix;\n'
src += data['vertex']
src = re.sub(r'#ifdef USE_OPENSUBDIV([^#]*)#endif', '', src)
src = re.sub(r'#ifndef USE_OPENSUBDIV([^#]*)#endif', r'\1', src)
src = re.sub(r'#ifdef CLIP_WORKAROUND(.*?)#endif', '', src, 0, re.DOTALL)
src = re.sub(r'\bvarying\b', 'out', src)
src = re.sub(r'\bgl_(?!Position)(.*?)\b', r'bl_\1', src)
data['vertex'] = src
def fs_to_130(data):
src = '#version 130\n'
src += 'out vec4 frag_color;\n'
src += 'uniform mat4 bl_ProjectionMatrix;\n'
src += 'uniform mat4 bl_ModelViewMatrix;\n'
src += 'uniform mat4 bl_ModelViewMatrixInverse;\n'
src += 'uniform mat3 bl_NormalMatrix;\n'
src += 'uniform mat4 bl_ProjectionMatrixInverse;\n'
src += data['fragment']
src = re.sub(r'\bvarying\b', 'in', src)
src = re.sub(r'\bgl_FragColor\b', 'frag_color', src)
src = re.sub(r'\bgl_(?!FrontFacing)(.*?)\b', r'bl_\1', src)
# Cannot support node_bsdf functions without resolving use of gl_Light
src = re.sub(r'void node_((bsdf)|(subsurface))_.*?^}', '', src, 0, re.DOTALL | re.MULTILINE)
# Need to gather light data from more general uniforms
light_count = 0
light_map = {}
decl_start_str = 'void main()\n{\n'
for uniform in data['uniforms']:
if uniform['type'] == gpu.GPU_DYNAMIC_LAMP_DYNCO:
lamp_name = uniform['lamp'].name
if lamp_name not in light_map:
light_map[lamp_name] = light_count
light_count += 1
light_index = light_map[lamp_name]
varname = 'light{}_transform'.format(light_index)
uniform['datatype'] = gpu.GPU_DATA_16F
src = src.replace(
'uniform vec3 {};'.format(uniform['varname']),
'uniform mat4 {};'.format(varname)
)
var_decl_start = src.find(decl_start_str) + len(decl_start_str)
decl_str = '\tvec3 {} = {}[3].xyz;\n'.format(uniform['varname'], varname)
src = src[:var_decl_start] + decl_str + src[var_decl_start:]
uniform['varname'] = varname
data['fragment'] = src.replace('\r\r\n', '')
def vs_to_web(data):
src = data['vertex']
precision_block = '\n'
for data_type in ('float', 'int'):
precision_block += 'precision mediump {};\n'.format(data_type)
src = src.replace('#version 130', '#version 100\n' + precision_block)
src = re.sub(r'\bin\b', 'attribute', src)
src = re.sub(r'\bout\b', 'varying', src)
data['vertex'] = src
def fs_to_web(data):
src = data['fragment']
precision_block = '\n'
for data_type in ('float', 'int'):
precision_block += 'precision mediump {};\n'.format(data_type)
header = '#version 100\n'
header += '#extension GL_OES_standard_derivatives: enable\n'
header += precision_block
src = src.replace('#version 130', header)
src = re.sub(r'\bin\b', 'varying', src)
src = src.replace('out vec4 frag_color;\n', '')
src = re.sub(r'\bfrag_color\b', 'gl_FragColor', src)
# TODO: This should be fixed in Blender
src = src.replace('blend = (normalize(vec).z + 1)', 'blend = (normalize(vec).z + 1.0)')
# TODO: This likely breaks shadows
src = src.replace('sampler2DShadow', 'sampler2D')
src = src.replace('shadow2DProj', 'texture2DProj')
data['fragment'] = src
def to_130(data):
vs_to_130(data)
fs_to_130(data)
def to_web(data):
to_130(data)
vs_to_web(data)
fs_to_web(data)
class KhrTechniqueWebgl:
ext_meta = {
'name': 'KHR_technique_webgl',
'url': (
'https://github.com/KhronosGroup/glTF/tree/master/extensions/'
'Khronos/KHR_technique_webgl'
),
'isDraft': True,
'settings': {
'embed_shaders': bpy.props.BoolProperty(
name='Embed Shader Data',
description='Embed shader data into the glTF file instead of a separate file',
default=False
)
}
}
settings = None
def export_material(self, state, material):
shader_data = gpu.export_shader(bpy.context.scene, material)
if state['settings']['asset_profile'] == 'DESKTOP':
to_130(shader_data)
else:
to_web(shader_data)
if self.settings.embed_shaders is True:
fs_bytes = shader_data['fragment'].encode()
fs_uri = 'data:text/plain;base64,' + base64.b64encode(fs_bytes).decode('ascii')
vs_bytes = shader_data['vertex'].encode()
vs_uri = 'data:text/plain;base64,' + base64.b64encode(vs_bytes).decode('ascii')
else:
names = [
bpy.path.clean_name(name) + '.glsl'
for name in (material.name+'VS', material.name+'FS')
]
data = (shader_data['vertex'], shader_data['fragment'])
for name, data in zip(names, data):
filename = os.path.join(state['settings']['gltf_output_dir'], name)
with open(filename, 'w') as fout:
fout.write(data)
vs_uri, fs_uri = names
state['output']['shaders'].append({
'type': 35632,
'uri': fs_uri,
'name': material.name + 'FS',
})
state['output']['shaders'].append({
'type': 35633,
'uri': vs_uri,
'name': material.name + 'VS',
})
# Handle programs
state['output']['programs'].append({
'attributes': [a['varname'] for a in shader_data['attributes']],
'fragmentShader': 'shaders_{}FS'.format(material.name),
'vertexShader': 'shaders_{}VS'.format(material.name),
'name': material.name,
})
# Handle parameters/values
values = {}
parameters = {}
for attribute in shader_data['attributes']:
name = attribute['varname']
semantic = TYPE_TO_SEMANTIC[attribute['type']]
_type = DATATYPE_TO_GLTF_TYPE[attribute['datatype']]
parameters[name] = {'semantic': semantic, 'type': _type}
for uniform in shader_data['uniforms']:
valname = TYPE_TO_NAME.get(uniform['type'], uniform['varname'])
rnaname = valname
semantic = None
node = None
value = None
if uniform['varname'] == 'bl_ModelViewMatrix':
semantic = 'MODELVIEW'
elif uniform['varname'] == 'bl_ProjectionMatrix':
semantic = 'PROJECTION'
elif uniform['varname'] == 'bl_NormalMatrix':
semantic = 'MODELVIEWINVERSETRANSPOSE'
else:
if uniform['type'] in LAMP_TYPES:
node = uniform['lamp'].name
valname = node + '_' + valname
semantic = TYPE_TO_SEMANTIC.get(uniform['type'], None)
if not semantic:
lamp_obj = bpy.data.objects[node]
value = getattr(lamp_obj.data, rnaname)
elif uniform['type'] in MIST_TYPES:
valname = 'mist_' + valname
mist_settings = bpy.context.scene.world.mist_settings
if valname == 'mist_color':
value = bpy.context.scene.world.horizon_color
else:
value = getattr(mist_settings, rnaname)
if valname == 'mist_falloff':
if value == 'QUADRATIC':
value = 0.0
elif value == 'LINEAR':
value = 1.0
else:
value = 2.0
elif uniform['type'] in WORLD_TYPES:
world = bpy.context.scene.world
value = getattr(world, rnaname)
elif uniform['type'] in MATERIAL_TYPES:
converter = DATATYPE_TO_CONVERTER[uniform['datatype']]
value = converter(getattr(material, rnaname))
values[valname] = value
elif uniform['type'] == gpu.GPU_DYNAMIC_SAMPLER_2DIMAGE:
texture_slots = [
slot for slot in material.texture_slots
if slot and slot.texture.type == 'IMAGE'
]
for slot in texture_slots:
if slot.texture.image.name == uniform['image'].name:
value = 'texture_' + slot.texture.name
values[uniform['varname']] = value
else:
print('Unconverted uniform:', uniform)
parameter = {}
if semantic:
parameter['semantic'] = semantic
if node:
parameter['node'] = 'node_' + node
elif value:
parameter['value'] = DATATYPE_TO_CONVERTER[uniform['datatype']](value)
else:
parameter['value'] = None
if uniform['type'] == gpu.GPU_DYNAMIC_SAMPLER_2DIMAGE:
parameter['type'] = 35678 # SAMPLER_2D
else:
parameter['type'] = DATATYPE_TO_GLTF_TYPE[uniform['datatype']]
parameters[valname] = parameter
uniform['valname'] = valname
# Handle techniques
tech_name = 'techniques_' + material.name
state['output']['techniques'].append({
'parameters': parameters,
'program': 'programs_' + material.name,
'attributes': {a['varname']: a['varname'] for a in shader_data['attributes']},
'uniforms': {u['varname']: u['valname'] for u in shader_data['uniforms']},
'name': material.name,
})
return {'technique': tech_name, 'values': values, 'name': material.name}
def export(self, state):
state['output']['techniques'] = []
state['output']['shaders'] = []
state['output']['programs'] = []
state['output']['materials'] = [
self.export_material(state, bl_mat) for bl_mat in state['input']['materials']
]
|
Kupoman/blendergltf
|
blendergltf/extension_exporters/khr_technique_webgl.py
|
Python
|
apache-2.0
| 14,656
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ownmusicweb.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
Lightshadow244/OwnMusicWeb
|
ownmusicweb/manage.py
|
Python
|
apache-2.0
| 809
|
# Copyright 2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# https://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Longformer modules."""
from flax import nn
import jax.numpy as jnp
from lra_benchmarks.models.layers import common_layers
from lra_benchmarks.models.longformer import longformer_attention
class LongformerBlock(nn.Module):
"""Longformer Layer."""
def apply(self,
inputs,
qkv_dim,
mlp_dim,
num_heads,
sliding_window_size=512,
global_mask=None,
causal_mask=False,
dtype=jnp.float32,
inputs_segmentation=None,
padding_mask=None,
dropout_rate=0.1,
attention_dropout_rate=0.1,
deterministic=False):
"""Applies the LongformerBlock module.
Args:
inputs: input data of size `[bs, seq_len, features]`.
qkv_dim: dimension of the query/key/value.
mlp_dim: dimension of the mlp on top of attention block.
num_heads: number of attention heads.
sliding_window_size: size of sliding window attention to use.
global_mask: boolean matrix of shape `[bs, seq_len]`, where `True`
indicates that the position is globally attended. By default, no global
attention is used.
causal_mask: If true, apply causal attention mask.
dtype: the dtype of the computation (default: float32).
inputs_segmentation: input segmentation info for packed examples.
padding_mask: bool, mask padding tokens.
dropout_rate: dropout rate
attention_dropout_rate: dropout rate for attention weights
deterministic: if true, apply dropout else don't.
Returns:
output of shape `[bs, seq_len, mlp_dim]`.
"""
assert inputs.ndim == 3
x = nn.LayerNorm(inputs)
x = longformer_attention.LongformerSelfAttention(
x,
num_heads=num_heads,
qkv_features=qkv_dim,
sliding_window_size=sliding_window_size,
global_mask=global_mask,
causal_mask=causal_mask,
dtype=dtype,
segmentation=inputs_segmentation,
padding_mask=padding_mask,
kernel_init=nn.initializers.xavier_uniform(),
bias_init=nn.initializers.normal(stddev=1e-6),
bias=False,
broadcast_dropout=False,
dropout_rate=attention_dropout_rate,
deterministic=deterministic)
x = nn.dropout(x, rate=dropout_rate, deterministic=deterministic)
x = x + inputs
y = nn.LayerNorm(x)
y = common_layers.MlpBlock(
y,
mlp_dim=mlp_dim,
dtype=dtype,
dropout_rate=dropout_rate,
deterministic=deterministic)
return x + y
class LongformerEncoder(nn.Module):
"""Longformer Encoder."""
def apply(self,
inputs,
vocab_size,
sliding_window_size=512,
global_mask=None,
causal_mask=False,
inputs_positions=None,
inputs_segmentation=None,
shared_embedding=None,
use_bfloat16=False,
emb_dim=512,
num_heads=8,
dtype=jnp.float32,
num_layers=6,
qkv_dim=512,
mlp_dim=2048,
max_len=512,
train=True,
dropout_rate=0.1,
attention_dropout_rate=0.1,
learn_pos_emb=False,
classifier=False,
classifier_pool='CLS',
num_classes=10):
"""Applies Longformer model on the inputs.
Args:
inputs: input data.
vocab_size: size of the vocabulary.
sliding_window_size: size of sliding window attention to use.
global_mask: boolean matrix of shape `[bs, seq_len]`, where `True`
indicates that the position is globally attended. By default, no global
attention is used.
causal_mask: If true, apply causal attention masking.
inputs_positions: input subsequence positions for packed examples.
inputs_segmentation: input segmentation info for packed examples.
shared_embedding: a shared embedding layer to use.
use_bfloat16: bool: whether use bfloat16.
emb_dim: dimension of embedding
num_heads: number of heads
dtype: the dtype of the computation (default: float32)
num_layers: number of layers
qkv_dim: dimension of the query/key/value
mlp_dim: dimension of the mlp on top of attention block
max_len: maximum length.
train: if it is training,
dropout_rate: dropout rate
attention_dropout_rate: dropout rate for attention weights
learn_pos_emb: boolean, if learn the positional embedding or use the
sinusoidal positional embedding.
classifier: boolean, for classification mode (output N-class logits)
classifier_pool: str, supports "MEAN", "MAX" pooling.
num_classes: int, number of classification classes.
Returns:
output of the encoder or logits if classifier_mode is true.
"""
assert inputs.ndim == 2 # (batch, len)
# Padding Masks
src_padding_mask = (inputs > 0)[..., None]
# Input Embedding
if shared_embedding is None:
input_embed = nn.Embed.partial(
num_embeddings=vocab_size,
features=emb_dim,
embedding_init=nn.initializers.normal(stddev=1.0))
else:
input_embed = shared_embedding
x = inputs.astype('int32')
x = input_embed(x)
if classifier and classifier_pool == 'CLS':
cls = self.param('cls', (1, 1, emb_dim), nn.initializers.zeros)
cls = jnp.tile(cls, [x.shape[0], 1, 1])
x = jnp.concatenate([cls, x], axis=1)
max_len += 1
src_padding_mask = jnp.concatenate(
[src_padding_mask[:, :1], src_padding_mask], axis=1)
pe_init = nn.initializers.normal(stddev=0.02) if learn_pos_emb else None
x = common_layers.AddPositionEmbs(
x,
inputs_positions=inputs_positions,
posemb_init=pe_init,
max_len=max_len,
name='posembed_input')
x = nn.dropout(x, rate=dropout_rate, deterministic=not train)
if use_bfloat16:
x = x.astype(jnp.bfloat16)
dtype = jnp.bfloat16
else:
dtype = jnp.float32
# Input Encoder
for lyr in range(num_layers):
x = LongformerBlock(
x,
qkv_dim=qkv_dim,
mlp_dim=mlp_dim,
num_heads=num_heads,
sliding_window_size=sliding_window_size,
global_mask=global_mask,
causal_mask=causal_mask,
dtype=dtype,
inputs_segmentation=inputs_segmentation,
padding_mask=src_padding_mask,
dropout_rate=dropout_rate,
attention_dropout_rate=attention_dropout_rate,
deterministic=not train,
name=f'encoderblock_{lyr}')
encoded = nn.LayerNorm(x, dtype=dtype, name='encoder_norm')
if classifier:
encoded = common_layers.classifier_head(
encoded, num_classes, mlp_dim, pooling_mode=classifier_pool)
return encoded
class LongformerDualEncoder(nn.Module):
"""Longformer Model for Matching (dual encoding) tasks."""
def apply(self,
inputs1,
inputs2,
vocab_size=None,
inputs1_positions=None,
inputs2_positions=None,
inputs1_segmentation=None,
inputs2_segmentation=None,
use_bfloat16=False,
emb_dim=512,
num_heads=8,
num_layers=6,
qkv_dim=512,
mlp_dim=2048,
max_len=2048,
train=False,
dropout_rate=0.1,
attention_dropout_rate=0.1,
classifier=True,
classifier_pool='CLS',
num_classes=2,
interaction=None
):
"""Applies Transformer model on text similarity.
A deliberate choice to distinguish this from NLI because
we may want to do different things to the model later. Dual Encoding
mode enforces that we do not do cross attention between pairs.
Args:
inputs1: input data.
inputs2: target data.
vocab_size: size of the input vocabulary.
inputs1_positions: input subsequence positions for packed examples.
inputs2_positions: target subsequence positions for packed examples.
inputs1_segmentation: input segmentation info for packed examples.
inputs2_segmentation: target segmentation info for packed examples.
use_bfloat16: bool: whether use bfloat16.
emb_dim: dimension of embedding.
num_heads: number of heads.
num_layers: number of layers.
qkv_dim: dimension of the query/key/value.
mlp_dim: dimension of the mlp on top of attention block.
max_len: maximum length.
train: whether it is training.
dropout_rate: dropout rate.
attention_dropout_rate: dropout rate for attention weights.
classifier: boolean, to use classifier.
classifier_pool: str, supports "MEAN", "MAX" pooling.
num_classes: int, number of classification classes.
interaction: str
Returns:
output of a transformer decoder.
"""
encoder = LongformerEncoder.shared(
inputs_positions=inputs1_positions,
inputs_segmentation=inputs1_segmentation,
vocab_size=vocab_size,
use_bfloat16=use_bfloat16,
emb_dim=emb_dim,
num_heads=num_heads,
num_layers=num_layers,
qkv_dim=qkv_dim,
mlp_dim=mlp_dim,
max_len=max_len,
train=train,
dropout_rate=dropout_rate,
attention_dropout_rate=attention_dropout_rate,
name='encoder')
inputs1_encoded = encoder(inputs1)
inputs2_encoded = encoder(inputs2)
encoded = common_layers.classifier_head_dual(
inputs1_encoded,
inputs2_encoded,
num_classes,
mlp_dim,
pooling_mode=classifier_pool,
interaction=interaction)
return encoded
class LongformerDecoder(nn.Module):
"""Longformer Decoder."""
def apply(self,
inputs,
vocab_size,
sliding_window_size=512,
global_mask=None,
emb_dim=512,
num_heads=8,
dtype=jnp.float32,
num_layers=6,
qkv_dim=512,
mlp_dim=2048,
max_len=2048,
train=False,
shift=True,
dropout_rate=0.1,
attention_dropout_rate=0.1):
"""Applies Longformer model on the inputs, using causal masking.
Args:
inputs: input data
vocab_size: size of the vocabulary
sliding_window_size: size of sliding window attention to use.
global_mask: boolean matrix of shape `[bs, seq_len]`, where `True`
indicates that the position is globally attended. By default, no global
attention is used.
emb_dim: dimension of embedding
num_heads: number of heads
dtype: the dtype of the computation (default: float32)
num_layers: number of layers
qkv_dim: dimension of the query/key/value
mlp_dim: dimension of the mlp on top of attention block
max_len: maximum length.
train: bool: if model is training.
shift: bool: if we right-shift input - this is only disabled for
fast, looped single-token autoregressive decoding.
dropout_rate: dropout rate
attention_dropout_rate: dropout rate for attention weights
Returns:
output of a transformer decoder.
"""
padding_mask = jnp.where(inputs > 0, 1, 0).astype(jnp.float32)[..., None]
assert inputs.ndim == 2 # (batch, len)
x = inputs
if shift:
x = common_layers.shift_right(x)
x = x.astype('int32')
x = common_layers.Embed(
x, num_embeddings=vocab_size, features=emb_dim, name='embed')
x = common_layers.AddPositionEmbs(
x,
max_len=max_len,
posemb_init=common_layers.sinusoidal_init(max_len=max_len),
cache=None)
x = nn.dropout(x, rate=dropout_rate, deterministic=not train)
for _ in range(num_layers):
x = LongformerBlock(
x,
qkv_dim=qkv_dim,
mlp_dim=mlp_dim,
num_heads=num_heads,
sliding_window_size=sliding_window_size,
global_mask=global_mask,
causal_mask=True,
padding_mask=padding_mask,
dropout_rate=dropout_rate,
attention_dropout_rate=attention_dropout_rate,
deterministic=not train,
cache=None,
)
x = nn.LayerNorm(x)
logits = nn.Dense(
x,
vocab_size,
kernel_init=nn.initializers.xavier_uniform(),
bias_init=nn.initializers.normal(stddev=1e-6))
return logits
|
google-research/long-range-arena
|
lra_benchmarks/models/longformer/longformer.py
|
Python
|
apache-2.0
| 13,102
|
# Copyright 2022 The Magenta Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create a dataset of SequenceExamples from NoteSequence protos.
This script will extract melodies and chords from NoteSequence protos and save
them to TensorFlow's SequenceExample protos for input to the improv RNN models.
"""
import os
from magenta.models.improv_rnn import improv_rnn_config_flags
from magenta.models.improv_rnn import improv_rnn_pipeline
from magenta.pipelines import pipeline
import tensorflow.compat.v1 as tf
flags = tf.app.flags
FLAGS = tf.app.flags.FLAGS
flags.DEFINE_string(
'input', None,
'TFRecord to read NoteSequence protos from.')
flags.DEFINE_string(
'output_dir', None,
'Directory to write training and eval TFRecord files. The TFRecord files '
'are populated with SequenceExample protos.')
flags.DEFINE_float(
'eval_ratio', 0.1,
'Fraction of input to set aside for eval set. Partition is randomly '
'selected.')
flags.DEFINE_string(
'log', 'INFO',
'The threshold for what messages will be logged DEBUG, INFO, WARN, ERROR, '
'or FATAL.')
def main(unused_argv):
tf.logging.set_verbosity(FLAGS.log)
config = improv_rnn_config_flags.config_from_flags()
pipeline_instance = improv_rnn_pipeline.get_pipeline(
config, FLAGS.eval_ratio)
FLAGS.input = os.path.expanduser(FLAGS.input)
FLAGS.output_dir = os.path.expanduser(FLAGS.output_dir)
pipeline.run_pipeline_serial(
pipeline_instance,
pipeline.tf_record_iterator(FLAGS.input, pipeline_instance.input_type),
FLAGS.output_dir)
def console_entry_point():
tf.disable_v2_behavior()
tf.app.run(main)
if __name__ == '__main__':
console_entry_point()
|
magenta/magenta
|
magenta/models/improv_rnn/improv_rnn_create_dataset.py
|
Python
|
apache-2.0
| 2,205
|
from tests.approvals_config import configure_approvaltests
import pytest
# begin-snippet: conftest_pytest_session_scoped
@pytest.fixture(scope="session", autouse=True)
def set_default_reporter_for_all_tests():
configure_approvaltests()
# end-snippet
|
approvals/ApprovalTests.Python
|
tests/conftest.py
|
Python
|
apache-2.0
| 258
|
# Copyright 2016 Adler Brediks Medrado
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup, find_packages
with open("requirements.txt") as reqs:
install_requires = reqs.readlines()
setup(
name="abbr",
version="0.0.1",
url="https://github.com/adlermedrado/abbr",
author="Adler Brediks Medrado",
author_email="abbr@adlermedrado.com.br",
license="Apache-2.0",
description="A client library to abbreviate string contents",
long_description=open('README.rst').read(),
packages=find_packages(),
install_requires=install_requires,
include_package_data=True,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
)
|
adlermedrado/abbr
|
setup.py
|
Python
|
apache-2.0
| 1,346
|
from bzt.modules.functional import FunctionalAggregator, FunctionalAggregatorListener, FunctionalSample
from tests import BZTestCase
from tests.mocks import MockFunctionalReader
class MockListener(FunctionalAggregatorListener):
def __init__(self):
self.results = []
def aggregated_results(self, result, cumulative_results):
self.results.append(result)
class TestFunctionalAggregator(BZTestCase):
def get_reader(self):
mock = MockFunctionalReader()
mock.data = [
FunctionalSample(test_case="test1", test_suite="Tests1", status="PASSED", start_time=1, duration=1,
error_msg=None, error_trace=None, extras=None),
FunctionalSample(test_case="test2", test_suite="Tests1", status="BROKEN", start_time=2, duration=1,
error_msg="Something broke", error_trace=None, extras=None),
FunctionalSample(test_case="test3", test_suite="Tests2", status="PASSED", start_time=2, duration=1,
error_msg=None, error_trace=None, extras=None),
FunctionalSample(test_case="test2", test_suite="Tests1", status="FAILED", start_time=3, duration=1,
error_msg="Something failed", error_trace=None, extras=None),
FunctionalSample(test_case="test1", test_suite="Tests1", status="SKIPPED", start_time=3, duration=1,
error_msg="Disabled by user", error_trace=None, extras=None),
FunctionalSample(test_case="test3", test_suite="Tests2", status="PASSED", start_time=4, duration=1,
error_msg=None, error_trace=None, extras=None),
FunctionalSample(test_case="test1", test_suite="Tests1", status="BROKEN", start_time=4, duration=1,
error_msg="Broken", error_trace=None, extras=None),
FunctionalSample(test_case="test1", test_suite="Tests1", status="PASSED", start_time=5, duration=1,
error_msg=None, error_trace=None, extras=None),
FunctionalSample(test_case="test2", test_suite="Tests1", status="PASSED", start_time=4, duration=1,
error_msg=None, error_trace=None, extras=None),
FunctionalSample(test_case="test3", test_suite="Tests2", status="FAILED", start_time=6, duration=1,
error_msg="Really failed", error_trace=None, extras=None),
FunctionalSample(test_case="test1", test_suite="Tests1", status="PASSED", start_time=6, duration=1,
error_msg=None, error_trace=None, extras=None),
]
return mock
def test_aggregation(self):
reader = self.get_reader()
obj = FunctionalAggregator()
obj.prepare()
obj.add_underling(reader)
obj.process_readers()
tree = obj.cumulative_results
self.assertEqual({"Tests2", "Tests1"}, set(tree.test_suites()))
self.assertEqual(len(tree.test_cases("Tests1")), 8)
self.assertEqual(len(tree.test_cases("Tests2")), 3)
obj.post_process()
def test_listeners(self):
listener = MockListener()
obj = FunctionalAggregator()
obj.prepare()
obj.add_underling(self.get_reader())
obj.add_listener(listener)
obj.check()
obj.post_process()
self.assertEqual(len(listener.results), 1)
|
itaymendel/taurus
|
tests/modules/test_functionalAggregator.py
|
Python
|
apache-2.0
| 3,428
|
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import logging
import os
import sys
import threading
from contextlib import contextmanager
from dataclasses import dataclass
from setproctitle import setproctitle as set_process_title
from pants.base.build_environment import get_buildroot
from pants.base.exception_sink import ExceptionSink, SignalHandler
from pants.base.exiter import Exiter
from pants.bin.daemon_pants_runner import DaemonPantsRunner
from pants.engine.native import Native
from pants.engine.rules import UnionMembership
from pants.init.engine_initializer import EngineInitializer
from pants.init.logging import init_rust_logger, setup_logging
from pants.init.options_initializer import BuildConfigInitializer, OptionsInitializer
from pants.option.options_bootstrapper import OptionsBootstrapper
from pants.option.options_fingerprinter import OptionsFingerprinter
from pants.option.scope import GLOBAL_SCOPE
from pants.pantsd.process_manager import FingerprintedProcessManager
from pants.pantsd.service.fs_event_service import FSEventService
from pants.pantsd.service.pailgun_service import PailgunService
from pants.pantsd.service.pants_service import PantsServices
from pants.pantsd.service.scheduler_service import SchedulerService
from pants.pantsd.service.store_gc_service import StoreGCService
from pants.pantsd.watchman_launcher import WatchmanLauncher
from pants.util.contextutil import stdio_as
from pants.util.memo import memoized_property
from pants.util.strutil import ensure_text
class _LoggerStream(object):
"""A sys.std{out,err} replacement that pipes output to a logger.
N.B. `logging.Logger` expects unicode. However, most of our outstream logic, such as in
`exiter.py`, will use `sys.std{out,err}.buffer` and thus a bytes interface. So, we must provide
a `buffer` property, and change the semantics of the buffer to always convert the message to
unicode. This is an unfortunate code smell, as `logging` does not expose a bytes interface so
this is the best solution we could think of.
"""
def __init__(self, logger, log_level, handler):
"""
:param logging.Logger logger: The logger instance to emit writes to.
:param int log_level: The log level to use for the given logger.
:param Handler handler: The underlying log handler, for determining the fileno
to support faulthandler logging.
"""
self._logger = logger
self._log_level = log_level
self._handler = handler
def write(self, msg):
msg = ensure_text(msg)
for line in msg.rstrip().splitlines():
# The log only accepts text, and will raise a decoding error if the default encoding is ascii
# if provided a bytes input for unicode text.
line = ensure_text(line)
self._logger.log(self._log_level, line.rstrip())
def flush(self):
return
def isatty(self):
return False
def fileno(self):
return self._handler.stream.fileno()
@property
def buffer(self):
return self
class PantsDaemonSignalHandler(SignalHandler):
def __init__(self, daemon):
super().__init__()
self._daemon = daemon
def handle_sigint(self, signum, _frame):
self._daemon.terminate(include_watchman=False)
class PantsDaemon(FingerprintedProcessManager):
"""A daemon that manages PantsService instances."""
JOIN_TIMEOUT_SECONDS = 1
LOG_NAME = "pantsd.log"
class StartupFailure(Exception):
"""Represents a failure to start pantsd."""
class RuntimeFailure(Exception):
"""Represents a pantsd failure at runtime, usually from an underlying service failure."""
@dataclass(frozen=True)
class Handle:
"""A handle to a "probably running" pantsd instance.
We attempt to verify that the pantsd instance is still running when we create a Handle, but
after it has been created it is entirely process that the pantsd instance perishes.
"""
pid: int
port: int
metadata_base_dir: str
class Factory:
@classmethod
def maybe_launch(cls, options_bootstrapper):
"""Creates and launches a daemon instance if one does not already exist.
:param OptionsBootstrapper options_bootstrapper: The bootstrap options.
:returns: A Handle for the running pantsd instance.
:rtype: PantsDaemon.Handle
"""
stub_pantsd = cls.create(options_bootstrapper, full_init=False)
with stub_pantsd._services.lifecycle_lock:
if stub_pantsd.needs_restart(stub_pantsd.options_fingerprint):
# Once we determine we actually need to launch, recreate with full initialization.
pantsd = cls.create(options_bootstrapper)
return pantsd.launch()
else:
# We're already launched.
return PantsDaemon.Handle(
stub_pantsd.await_pid(10),
stub_pantsd.read_named_socket("pailgun", int),
stub_pantsd._metadata_base_dir,
)
@classmethod
def restart(cls, options_bootstrapper):
"""Restarts a running daemon instance.
:param OptionsBootstrapper options_bootstrapper: The bootstrap options.
:returns: A Handle for the pantsd instance.
:rtype: PantsDaemon.Handle
"""
pantsd = cls.create(options_bootstrapper)
with pantsd._services.lifecycle_lock:
# N.B. This will call `pantsd.terminate()` before starting.
return pantsd.launch()
@classmethod
def create(cls, options_bootstrapper, full_init=True):
"""
:param OptionsBootstrapper options_bootstrapper: The bootstrap options.
:param bool full_init: Whether or not to fully initialize an engine et al for the purposes
of spawning a new daemon. `full_init=False` is intended primarily
for lightweight lifecycle checks (since there is a ~1s overhead to
initialize the engine). See the impl of `maybe_launch` for an example
of the intended usage.
"""
bootstrap_options = options_bootstrapper.bootstrap_options
bootstrap_options_values = bootstrap_options.for_global_scope()
# TODO: https://github.com/pantsbuild/pants/issues/3479
watchman = WatchmanLauncher.create(bootstrap_options_values).watchman
if full_init:
build_root = get_buildroot()
native = Native()
build_config = BuildConfigInitializer.get(options_bootstrapper)
legacy_graph_scheduler = EngineInitializer.setup_legacy_graph(
native, options_bootstrapper, build_config
)
services = cls._setup_services(
build_root,
bootstrap_options_values,
legacy_graph_scheduler,
watchman,
union_membership=UnionMembership(build_config.union_rules()),
)
else:
build_root = None
native = None
services = PantsServices()
return PantsDaemon(
native=native,
build_root=build_root,
work_dir=bootstrap_options_values.pants_workdir,
log_level=bootstrap_options_values.level.upper(),
services=services,
metadata_base_dir=bootstrap_options_values.pants_subprocessdir,
bootstrap_options=bootstrap_options,
)
@staticmethod
def _setup_services(
build_root,
bootstrap_options,
legacy_graph_scheduler,
watchman,
union_membership: UnionMembership,
):
"""Initialize pantsd services.
:returns: A PantsServices instance.
"""
should_shutdown_after_run = bootstrap_options.shutdown_pantsd_after_run
fs_event_service = FSEventService(watchman, build_root,)
pidfile_absolute = PantsDaemon.metadata_file_path(
"pantsd", "pid", bootstrap_options.pants_subprocessdir
)
if pidfile_absolute.startswith(build_root):
pidfile = os.path.relpath(pidfile_absolute, build_root)
else:
pidfile = None
logging.getLogger(__name__).warning(
"Not watching pantsd pidfile because subprocessdir is outside of buildroot. Having "
"subprocessdir be a child of buildroot (as it is by default) may help avoid stray "
"pantsd processes."
)
scheduler_service = SchedulerService(
fs_event_service=fs_event_service,
legacy_graph_scheduler=legacy_graph_scheduler,
build_root=build_root,
invalidation_globs=OptionsInitializer.compute_pantsd_invalidation_globs(
build_root, bootstrap_options
),
pantsd_pidfile=pidfile,
union_membership=union_membership,
)
pailgun_service = PailgunService(
(bootstrap_options.pantsd_pailgun_host, bootstrap_options.pantsd_pailgun_port),
DaemonPantsRunner,
scheduler_service,
should_shutdown_after_run,
)
store_gc_service = StoreGCService(legacy_graph_scheduler.scheduler)
return PantsServices(
services=(fs_event_service, scheduler_service, pailgun_service, store_gc_service),
port_map=dict(pailgun=pailgun_service.pailgun_port),
)
def __init__(
self,
native,
build_root,
work_dir,
log_level,
services,
metadata_base_dir,
bootstrap_options=None,
):
"""
:param Native native: A `Native` instance.
:param string build_root: The pants build root.
:param string work_dir: The pants work directory.
:param string log_level: The log level to use for daemon logging.
:param PantsServices services: A registry of services to use in this run.
:param string metadata_base_dir: The ProcessManager metadata base dir.
:param Options bootstrap_options: The bootstrap options, if available.
"""
super().__init__(name="pantsd", metadata_base_dir=metadata_base_dir)
self._native = native
self._build_root = build_root
self._work_dir = work_dir
self._log_level = log_level
self._services = services
self._bootstrap_options = bootstrap_options
self._log_show_rust_3rdparty = (
bootstrap_options.for_global_scope().log_show_rust_3rdparty
if bootstrap_options
else True
)
self._log_dir = os.path.join(work_dir, self.name)
self._logger = logging.getLogger(__name__)
# N.B. This Event is used as nothing more than a convenient atomic flag - nothing waits on it.
self._kill_switch = threading.Event()
@memoized_property
def watchman_launcher(self):
return WatchmanLauncher.create(self._bootstrap_options.for_global_scope())
@property
def is_killed(self):
return self._kill_switch.is_set()
@property
def options_fingerprint(self):
return OptionsFingerprinter.combined_options_fingerprint_for_scope(
GLOBAL_SCOPE, self._bootstrap_options, fingerprint_key="daemon", invert=True
)
def shutdown(self, service_thread_map):
"""Gracefully terminate all services and kill the main PantsDaemon loop."""
with self._services.lifecycle_lock:
for service, service_thread in service_thread_map.items():
self._logger.info(f"terminating pantsd service: {service}")
service.terminate()
service_thread.join(self.JOIN_TIMEOUT_SECONDS)
self._logger.info("terminating pantsd")
self._kill_switch.set()
@staticmethod
def _close_stdio():
"""Close stdio streams to avoid output in the tty that launched pantsd."""
for fd in (sys.stdin, sys.stdout, sys.stderr):
file_no = fd.fileno()
fd.flush()
fd.close()
os.close(file_no)
@contextmanager
def _pantsd_logging(self):
"""A context manager that runs with pantsd logging.
Asserts that stdio (represented by file handles 0, 1, 2) is closed to ensure that we can
safely reuse those fd numbers.
"""
# Ensure that stdio is closed so that we can safely reuse those file descriptors.
for fd in (0, 1, 2):
try:
os.fdopen(fd)
raise AssertionError(f"pantsd logging cannot initialize while stdio is open: {fd}")
except OSError:
pass
# Redirect stdio to /dev/null for the rest of the run, to reserve those file descriptors
# for further forks.
with stdio_as(stdin_fd=-1, stdout_fd=-1, stderr_fd=-1):
# Reinitialize logging for the daemon context.
init_rust_logger(self._log_level, self._log_show_rust_3rdparty)
result = setup_logging(
self._log_level,
log_dir=self._log_dir,
log_name=self.LOG_NAME,
native=self._native,
warnings_filter_regexes=self._bootstrap_options.for_global_scope(),
)
self._native.override_thread_logging_destination_to_just_pantsd()
# Do a python-level redirect of stdout/stderr, which will not disturb `0,1,2`.
# TODO: Consider giving these pipes/actual fds, in order to make them "deep" replacements
# for `1,2`, and allow them to be used via `stdio_as`.
sys.stdout = _LoggerStream(logging.getLogger(), logging.INFO, result.log_handler)
sys.stderr = _LoggerStream(logging.getLogger(), logging.WARN, result.log_handler)
self._logger.debug("logging initialized")
yield (result.log_handler.stream, result.log_handler.native_filename)
def _setup_services(self, pants_services):
for service in pants_services.services:
self._logger.info(f"setting up service {service}")
service.setup(self._services)
@staticmethod
def _make_thread(service):
name = f"{service.__class__.__name__}Thread"
def target():
Native().override_thread_logging_destination_to_just_pantsd()
service.run()
t = threading.Thread(target=target, name=name)
t.daemon = True
return t
def _run_services(self, pants_services):
"""Service runner main loop."""
if not pants_services.services:
self._logger.critical("no services to run, bailing!")
return
service_thread_map = {
service: self._make_thread(service) for service in pants_services.services
}
# Start services.
for service, service_thread in service_thread_map.items():
self._logger.info(f"starting service {service}")
try:
service_thread.start()
except (RuntimeError, FSEventService.ServiceError):
self.shutdown(service_thread_map)
raise PantsDaemon.StartupFailure(
f"service {service} failed to start, shutting down!"
)
# Once all services are started, write our pid.
self.write_pid()
self.write_metadata_by_name(
"pantsd", self.FINGERPRINT_KEY, ensure_text(self.options_fingerprint)
)
# Monitor services.
while not self.is_killed:
for service, service_thread in service_thread_map.items():
if not service_thread.is_alive():
self.shutdown(service_thread_map)
raise PantsDaemon.RuntimeFailure(
f"service failure for {service}, shutting down!"
)
else:
# Avoid excessive CPU utilization.
service_thread.join(self.JOIN_TIMEOUT_SECONDS)
def _write_named_sockets(self, socket_map):
"""Write multiple named sockets using a socket mapping."""
for socket_name, socket_info in socket_map.items():
self.write_named_socket(socket_name, socket_info)
def run_sync(self):
"""Synchronously run pantsd."""
os.environ.pop("PYTHONPATH")
# Switch log output to the daemon's log stream from here forward.
# Also, register an exiter using os._exit to ensure we only close stdio streams once.
self._close_stdio()
with self._pantsd_logging() as (log_stream, log_filename), ExceptionSink.exiter_as(
lambda _: Exiter(exiter=os._exit)
):
# We don't have any stdio streams to log to anymore, so we log to a file.
# We don't override the faulthandler destination because the stream we get will proxy things
# via the rust logging code, and faulthandler needs to be writing directly to a real file
# descriptor. When pantsd logging was originally initialised, we already set up faulthandler
# to log to the correct file descriptor, so don't override it.
#
# We can get tracebacks of the pantsd process by tailing the pantsd log and sending it
# SIGUSR2.
ExceptionSink.reset_interactive_output_stream(
log_stream, override_faulthandler_destination=False,
)
# Reset the log location and the backtrace preference from the global bootstrap options.
global_bootstrap_options = self._bootstrap_options.for_global_scope()
ExceptionSink.reset_should_print_backtrace_to_terminal(
global_bootstrap_options.print_exception_stacktrace
)
ExceptionSink.reset_log_location(global_bootstrap_options.pants_workdir)
self._native.set_panic_handler()
# Set the process name in ps output to 'pantsd' vs './pants compile src/etc:: -ldebug'.
set_process_title(f"pantsd [{self._build_root}]")
# Write service socket information to .pids.
self._write_named_sockets(self._services.port_map)
# Enter the main service runner loop.
self._setup_services(self._services)
self._run_services(self._services)
def post_fork_child(self):
"""Post-fork() child callback for ProcessManager.daemon_spawn()."""
spawn_control_env = dict(
PANTS_ENTRYPOINT=f"{__name__}:launch",
# The daemon should run under the same sys.path as us; so we ensure
# this. NB: It will scrub PYTHONPATH once started to avoid infecting
# its own unrelated subprocesses.
PYTHONPATH=os.pathsep.join(sys.path),
)
exec_env = {**os.environ, **spawn_control_env}
# Pass all of sys.argv so that we can proxy arg flags e.g. `-ldebug`.
cmd = [sys.executable] + sys.argv
spawn_control_env_vars = " ".join(f"{k}={v}" for k, v in spawn_control_env.items())
cmd_line = " ".join(cmd)
self._logger.debug(f"cmd is: {spawn_control_env_vars} {cmd_line}")
# TODO: Improve error handling on launch failures.
os.spawnve(os.P_NOWAIT, sys.executable, cmd, env=exec_env)
def needs_launch(self):
"""Determines if pantsd needs to be launched.
N.B. This should always be called under care of the `lifecycle_lock`.
:returns: True if the daemon needs launching, False otherwise.
:rtype: bool
"""
new_fingerprint = self.options_fingerprint
self._logger.debug(
"pantsd: is_alive={self.is_alive()} new_fingerprint={new_fingerprint} current_fingerprint={self.fingerprint}"
)
return self.needs_restart(new_fingerprint)
def launch(self):
"""Launches pantsd in a subprocess.
N.B. This should always be called under care of the `lifecycle_lock`.
:returns: A Handle for the pantsd instance.
:rtype: PantsDaemon.Handle
"""
self.terminate(include_watchman=False)
self.watchman_launcher.maybe_launch()
self._logger.debug("launching pantsd")
self.daemon_spawn()
# Wait up to 60 seconds for pantsd to write its pidfile.
pantsd_pid = self.await_pid(60)
listening_port = self.read_named_socket("pailgun", int)
self._logger.debug(f"pantsd is running at pid {self.pid}, pailgun port is {listening_port}")
return self.Handle(pantsd_pid, listening_port, self._metadata_base_dir)
def terminate(self, include_watchman=True):
"""Terminates pantsd and watchman.
N.B. This should always be called under care of the `lifecycle_lock`.
"""
super().terminate()
if include_watchman:
self.watchman_launcher.terminate()
def needs_restart(self, option_fingerprint):
"""Overrides ProcessManager.needs_restart, to account for the case where pantsd is running
but we want to shutdown after this run.
:param option_fingerprint: A fingeprint of the global bootstrap options.
:return: True if the daemon needs to restart.
"""
should_shutdown_after_run = (
self._bootstrap_options.for_global_scope().shutdown_pantsd_after_run
)
return super().needs_restart(option_fingerprint) or (
self.is_alive() and should_shutdown_after_run
)
def launch():
"""An external entrypoint that spawns a new pantsd instance."""
PantsDaemon.Factory.create(OptionsBootstrapper.create()).run_sync()
|
wisechengyi/pants
|
src/python/pants/pantsd/pants_daemon.py
|
Python
|
apache-2.0
| 22,406
|
#-----------------------------------------------------------------------------------------------------------------------
#Introdução a Programação de Computadores - IPC
#Universidade do Estado do Amazonas - UEA
#Prof. Jucimar Jr.
#Alexandre Marques Uchôa 1715310028
#Jandinne Duarte de Oliveira 1015070265
#Uriel Brito Barros 1515120558
#Roberta de Oliveira da cruz 0825070169
#Evandro Padilha Barroso Filho 1715310009
#
##
#Faça um Programa que peça o raio de um círculo, calcule e mostre sua área.
#-----------------------------------------------------------------------------------------------------------------------
r = float(input("Digite um raio"))
area = (3.14*r*r)
print ('Sua área é', area)
|
jucimarjr/IPC_2017-1
|
lista02/lista02_exercicio01_questao06.py
|
Python
|
apache-2.0
| 781
|
# coding: utf-8
"""
Stakeholder engagement API
This API enables Intelligent Engagement for your Business. iEngage is a platform that combines process, augmented intelligence and rewards to help you intelligently engage customers.
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class NLC(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, nlc_id=None, nlc_classifier_name=None, created_date=None, modified_date=None, classification=None):
"""
NLC - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'nlc_id': 'int',
'nlc_classifier_name': 'str',
'created_date': 'datetime',
'modified_date': 'datetime',
'classification': 'list[Bucket]'
}
self.attribute_map = {
'nlc_id': 'nlcId',
'nlc_classifier_name': 'nlcClassifierName',
'created_date': 'createdDate',
'modified_date': 'modifiedDate',
'classification': 'classification'
}
self._nlc_id = nlc_id
self._nlc_classifier_name = nlc_classifier_name
self._created_date = created_date
self._modified_date = modified_date
self._classification = classification
@property
def nlc_id(self):
"""
Gets the nlc_id of this NLC.
:return: The nlc_id of this NLC.
:rtype: int
"""
return self._nlc_id
@nlc_id.setter
def nlc_id(self, nlc_id):
"""
Sets the nlc_id of this NLC.
:param nlc_id: The nlc_id of this NLC.
:type: int
"""
self._nlc_id = nlc_id
@property
def nlc_classifier_name(self):
"""
Gets the nlc_classifier_name of this NLC.
:return: The nlc_classifier_name of this NLC.
:rtype: str
"""
return self._nlc_classifier_name
@nlc_classifier_name.setter
def nlc_classifier_name(self, nlc_classifier_name):
"""
Sets the nlc_classifier_name of this NLC.
:param nlc_classifier_name: The nlc_classifier_name of this NLC.
:type: str
"""
self._nlc_classifier_name = nlc_classifier_name
@property
def created_date(self):
"""
Gets the created_date of this NLC.
:return: The created_date of this NLC.
:rtype: datetime
"""
return self._created_date
@created_date.setter
def created_date(self, created_date):
"""
Sets the created_date of this NLC.
:param created_date: The created_date of this NLC.
:type: datetime
"""
self._created_date = created_date
@property
def modified_date(self):
"""
Gets the modified_date of this NLC.
:return: The modified_date of this NLC.
:rtype: datetime
"""
return self._modified_date
@modified_date.setter
def modified_date(self, modified_date):
"""
Sets the modified_date of this NLC.
:param modified_date: The modified_date of this NLC.
:type: datetime
"""
self._modified_date = modified_date
@property
def classification(self):
"""
Gets the classification of this NLC.
:return: The classification of this NLC.
:rtype: list[Bucket]
"""
return self._classification
@classification.setter
def classification(self, classification):
"""
Sets the classification of this NLC.
:param classification: The classification of this NLC.
:type: list[Bucket]
"""
self._classification = classification
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
iEngage/python-sdk
|
iengage_client/models/nlc.py
|
Python
|
apache-2.0
| 5,546
|
# -*- coding: utf-8 -*-
__author__ = 'tyler'
import urllib2
import scrapy
from scrapy import log
import demjson
'''class AutoSpider(scrapy.Spider):
name = "sse"
allowed_domains = ["query.sse.com.cn"]
preurl='http://data.eastmoney.com/stock';
start_urls = [
'http://query.sse.com.cn/infodisplay/showTradePublicFile.do?jsonCallBack=jQuery172023210379532913938_1430627585124&dateTx=2015-04-29&random=0.48195114223841695&_=1430627617454'
]
def parse(self, response):
jsonstr=response.body_as_unicode()
log.msg(jsonstr[len('jQuery172023210379532913938_1430627585124'):-1])
s1=demjson.decode(jsonstr[len('jQuery172023210379532913938_1430627585124('):-1])
log.msg(s1['fileContents'])
if __name__=='__main__':'''
import re
tradeDay=''
send_headers = {
'Host': 'query.sse.com.cn',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0',
'Accept': '*/*',
'Accept-Language': 'zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3',
'Accept-Encoding': 'gzip, deflate',
'Referer': 'http://www.sse.com.cn/disclosure/diclosure/public/',
'Connection': 'keep-alive'
}
url='http://query.sse.com.cn/infodisplay/showTradePublicFile.do?jsonCallBack=jQuery172023210379532913938_1430627585124&dateTx=2015-04-29&random=0.48195114223841695&_=1430627617454'
req = urllib2.Request(url,headers=send_headers)
response = urllib2.urlopen(req)
html = response.read()
jsonStr=demjson.decode(html[len('jQuery172023210379532913938_1430627585124('):-1])
lines=jsonStr['fileContents']
def loopLineFun(lines):
for line in lines:
yield line.encode('utf8')
loopline=loopLineFun(lines)
class LHBItem():
pass
dictlist = {}
r1 = re.compile(ur'\s+\(\d\)\s+(\d+)\s+([\u4e00-\u9fa5]+)\s+((-?\d+)(\.\d+)?)%\s+(\d+)\s+((-?\d+)(\.\d+)?)')
#r1 = re.compile(ur'\s+\(\d\)')
def readDep(loop,code):
state='buy'
rdep = re.compile(ur'\s+\(\d\)')
rout=re.compile(ur'^\s?$')
for tmp in loop:
print tmp
if tmp.find('买入营业部名称')>=0:
state='buy'
continue
if tmp.find('卖出营业部名称')>=0:
state='sell'
continue
outMatch=rout.match(tmp)
if outMatch and state=='sell':
print '跳出'
return
if rdep.match(tmp.decode('utf8')):
dep=re.split('\s+',tmp)
depName=dep[2]
tradeAmount=dep[3]
print 'depName ' + depName
r2=re.compile(ur'\s+[\u4e00-\u9fa5]+:\s(\d+)\s+[\u4e00-\u9fa5]+:\s[\u4e00-\u9fa5]+')
def readA7(loop):
for tmp in loop:
mat=r1.match(tmp.decode('utf8'))
if mat:
lbhItem =LHBItem()
lbhItem.symbol= mat.group(1)
lbhItem.stockName= mat.group(2)
lbhItem.zhengdie= mat.group(3)
lbhItem.vol=mat.group(6)
lbhItem.amount= mat.group(7)
dictlist[lbhItem.symbol]=lbhItem
continue
#dep
mat2=r2.match(tmp.decode('utf8'))
if mat2:
print '*************************'
readDep(loop,mat2.group(1))
if tmp.find('二、')>=0:
return
for tmp in loopline:
print tmp
if tmp.find('交易日期')>=0:
tradeDay=tmp[13:]
print tradeDay
if tmp.find('偏离值达到7%')>=0:
tmp=readA7(loopline)
print tmp;
break
if tmp.find('二、')>=0:
print '-------'
for k in dictlist:
print k
|
dingmingliu/quanttrade
|
quantspider/quantspider/spiders/sse_spider.py
|
Python
|
apache-2.0
| 3,630
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This is a complete rewrite of a file licensed as follows:
#
# Copyright (c) 2014, Even Rouault <even dot rouault at mines-paris dot org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
"""Test Thin Plate Spline transformer in alg/gdal_tps.cpp.
Rewrite of:
https://trac.osgeo.org/gdal/browser/trunk/autotest/alg/tps.py
"""
import unittest
from osgeo import gdal
from osgeo import osr
from autotest2.gcore import gcore_util
class TransformGeolocTest(unittest.TestCase):
def testGroundControlPoints(self):
# https://trac.osgeo.org/gdal/ticket/5586
driver = gdal.GetDriverByName('MEM')
filepath = 'tps.mem'
with gcore_util.GdalUnlinkWhenDone(filepath):
datasource = driver.Create('tps.mem', 2, 2)
# An set of ground control points that will generate an error.
gcp_list = [
gdal.GCP(0, 0, 0, 0, 0),
gdal.GCP(0, 50, 0, 0, 50),
gdal.GCP(50, 0, 0, 50, 0),
gdal.GCP(50, 50, 0, 50, 50),
gdal.GCP(0 * 25, 0 * 25, 0, 25, 25)
]
datasource.SetGCPs(gcp_list, osr.GetUserInputAsWKT('WGS84'))
utm_wkt = osr.GetUserInputAsWKT('+proj=utm +zone=11 +datum=WGS84')
with gcore_util.ErrorHandler('CPLQuietErrorHandler'):
transformer = gdal.Transformer(
datasource, None, ['DST_SRS=' + utm_wkt, 'METHOD=GCP_TPS'])
self.assertIsNotNone(transformer)
# TODO(schwehr): The error observed is 3 (CPLE_FileIO), but
# expected 1 (CPLE_AppDefined).
self.assertNotEqual(gdal.GetLastErrorType(), gdal.CPLE_None)
err_msg = gdal.GetLastErrorMsg()
self.assertIn('problem inverting', err_msg)
self.assertIn('interpolation matrix', err_msg)
if __name__ == '__main__':
unittest.main()
|
schwehr/gdal-autotest2
|
python/alg/tps_test.py
|
Python
|
apache-2.0
| 3,423
|
import datetime
import six
try:
from django.contrib.sites.requests import RequestSite
except ImportError: # Django < 1.9
from django.contrib.sites.models import RequestSite
from django.core.exceptions import ObjectDoesNotExist
from django.core.serializers.json import DjangoJSONEncoder
from django.forms.models import model_to_dict
from django.shortcuts import render, get_object_or_404
from django.utils.timezone import now
from django.core.paginator import Paginator, EmptyPage
from django.views.decorators.cache import cache_page
from graphite.util import json, epoch, epoch_to_dt, jsonResponse, HttpError, HttpResponse
from graphite.events.models import Event
from graphite.render.attime import parseATTime
from graphite.settings import EVENTS_PER_PAGE, _PAGE_LINKS
class EventEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return epoch(obj)
return json.JSONEncoder.default(self, obj)
def get_page_range(paginator, page):
"""
Generate page range
"""
page_range = []
if 4>page:
if len(paginator.page_range)>_PAGE_LINKS:
page_range = [p for p in range(1, _PAGE_LINKS+1)]
else:
page_range=paginator.page_range
else:
for p in paginator.page_range:
if p<page:
if page-p<(_PAGE_LINKS)//2:
page_range.append(p)
if p>=page:
if p-page<(_PAGE_LINKS)//2:
page_range.append(p)
if len(page_range)>_PAGE_LINKS and page>5:
page_range = page_range[:-1]
return page_range
@cache_page(60 * 15)
def view_events(request, page_id=1):
if request.method == "GET":
try:
page_id = int(page_id)
except ValueError:
page_id = 1
events = fetch(request)
paginator = Paginator(events, EVENTS_PER_PAGE)
try:
events = paginator.page(page_id)
except EmptyPage:
events = paginator.page(paginator.num_pages)
pages = get_page_range(paginator, page_id)
context = {'events': events,
'site': RequestSite(request),
'pages': pages,
'protocol': 'https' if request.is_secure() else 'http'}
return render(request, 'events.html', context)
else:
return post_event(request)
@jsonResponse(encoder=DjangoJSONEncoder)
def jsonDetail(request, queryParams, event_id):
try:
e = Event.objects.get(id=event_id)
e.tags = e.tags.split()
return model_to_dict(e)
except ObjectDoesNotExist:
raise HttpError('Event matching query does not exist', status=404)
def detail(request, event_id):
if request.META.get('HTTP_ACCEPT') == 'application/json':
return jsonDetail(request, event_id)
e = get_object_or_404(Event, pk=event_id)
context = {'event': e}
return render(request, 'event.html', context)
def post_event(request):
if request.method == 'POST':
event = json.loads(request.body)
assert isinstance(event, dict)
tags = event.get('tags')
if tags is not None:
if isinstance(tags, list):
tags = ' '.join(tags)
elif not isinstance(tags, six.string_types):
return HttpResponse(
json.dumps({'error': '"tags" must be an array or space-separated string'}),
status=400)
else:
tags = None
if 'when' in event:
when = epoch_to_dt(event['when'])
else:
when = now()
Event.objects.create(
what=event.get('what'),
tags=tags,
when=when,
data=event.get('data', ''),
)
return HttpResponse(status=200)
else:
return HttpResponse(status=405)
def get_data(request):
query_params = request.GET.copy()
query_params.update(request.POST)
if 'jsonp' in query_params:
response = HttpResponse(
"%s(%s)" % (query_params.get('jsonp'),
json.dumps(fetch(request), cls=EventEncoder)),
content_type='text/javascript')
else:
response = HttpResponse(
json.dumps(fetch(request), cls=EventEncoder),
content_type='application/json')
return response
def fetch(request):
if request.GET.get('from') is not None:
time_from = parseATTime(request.GET['from'])
else:
time_from = epoch_to_dt(0)
if request.GET.get('until') is not None:
time_until = parseATTime(request.GET['until'])
else:
time_until = now()
set_operation = request.GET.get('set')
tags = request.GET.get('tags')
if tags is not None:
tags = request.GET.get('tags').split(' ')
result = []
for x in Event.find_events(time_from, time_until, tags=tags, set_operation=set_operation):
# django-tagging's with_intersection() returns matches with unknown tags
# this is a workaround to ensure we only return positive matches
if set_operation == 'intersection':
if len(set(tags) & set(x.as_dict()['tags'])) == len(tags):
result.append(x.as_dict())
else:
result.append(x.as_dict())
return result
|
drax68/graphite-web
|
webapp/graphite/events/views.py
|
Python
|
apache-2.0
| 5,333
|
import datetime
import os
import subprocess
import sys
import warnings
from typing import Optional, Union
import click
from ruamel.yaml import YAML
from ruamel.yaml.compat import StringIO
from great_expectations import exceptions as ge_exceptions
from great_expectations.checkpoint import Checkpoint, LegacyCheckpoint
from great_expectations.cli.v012.cli_messages import SECTION_SEPARATOR
from great_expectations.cli.v012.datasource import get_batch_kwargs
from great_expectations.cli.v012.docs import build_docs
from great_expectations.cli.v012.upgrade_helpers import GE_UPGRADE_HELPER_VERSION_MAP
from great_expectations.cli.v012.util import cli_colorize_string, cli_message
from great_expectations.core.batch import Batch
from great_expectations.core.expectation_suite import ExpectationSuite
from great_expectations.core.id_dict import BatchKwargs
from great_expectations.core.usage_statistics.util import send_usage_message
from great_expectations.data_asset import DataAsset
from great_expectations.data_context.data_context import DataContext
from great_expectations.data_context.types.base import CURRENT_GE_CONFIG_VERSION
from great_expectations.data_context.types.resource_identifiers import (
ExpectationSuiteIdentifier,
RunIdentifier,
ValidationResultIdentifier,
)
from great_expectations.datasource import Datasource
from great_expectations.profile import BasicSuiteBuilderProfiler
EXIT_UPGRADE_CONTINUATION_MESSAGE = (
"\nOk, exiting now. To upgrade at a later time, use the following command: "
"<cyan>great_expectations project upgrade</cyan>\n\nTo learn more about the upgrade "
"process, visit "
"<cyan>https://docs.greatexpectations.io/en/latest/how_to_guides/migrating_versions.html"
"</cyan>.\n"
)
class MyYAML(YAML):
# copied from https://yaml.readthedocs.io/en/latest/example.html#output-of-dump-as-a-string
def dump(self, data, stream=None, **kw):
inefficient = False
if stream is None:
inefficient = True
stream = StringIO()
YAML.dump(self, data, stream, **kw)
if inefficient:
return stream.getvalue()
yaml = MyYAML() # or typ='safe'/'unsafe' etc
yaml.indent(mapping=2, sequence=4, offset=2)
yaml.default_flow_style = False
def create_expectation_suite(
context,
datasource_name=None,
batch_kwargs_generator_name=None,
generator_asset=None,
batch_kwargs=None,
expectation_suite_name=None,
additional_batch_kwargs=None,
empty_suite=False,
show_intro_message=False,
flag_build_docs=True,
open_docs=False,
profiler_configuration="demo",
data_asset_name=None,
):
"""
Create a new expectation suite.
WARNING: the flow and name of this method and its interaction with _profile_to_create_a_suite
require a serious revisiting.
:return: a tuple: (success, suite name, profiling_results)
"""
if generator_asset:
warnings.warn(
"The 'generator_asset' argument will be deprecated and renamed to 'data_asset_name'. "
"Please update code accordingly.",
DeprecationWarning,
)
data_asset_name = generator_asset
if show_intro_message and not empty_suite:
cli_message(
"\n<cyan>========== Create sample Expectations ==========</cyan>\n\n"
)
data_source = select_datasource(context, datasource_name=datasource_name)
if data_source is None:
# select_datasource takes care of displaying an error message, so all is left here is to exit.
sys.exit(1)
datasource_name = data_source.name
if expectation_suite_name in context.list_expectation_suite_names():
tell_user_suite_exists(expectation_suite_name)
sys.exit(1)
if (
batch_kwargs_generator_name is None
or data_asset_name is None
or batch_kwargs is None
):
(
datasource_name,
batch_kwargs_generator_name,
data_asset_name,
batch_kwargs,
) = get_batch_kwargs(
context,
datasource_name=datasource_name,
batch_kwargs_generator_name=batch_kwargs_generator_name,
data_asset_name=data_asset_name,
additional_batch_kwargs=additional_batch_kwargs,
)
# In this case, we have "consumed" the additional_batch_kwargs
additional_batch_kwargs = {}
if expectation_suite_name is None:
default_expectation_suite_name = _get_default_expectation_suite_name(
batch_kwargs, data_asset_name
)
while True:
expectation_suite_name = click.prompt(
"\nName the new Expectation Suite",
default=default_expectation_suite_name,
)
if expectation_suite_name in context.list_expectation_suite_names():
tell_user_suite_exists(expectation_suite_name)
else:
break
if empty_suite:
create_empty_suite(context, expectation_suite_name, batch_kwargs)
return True, expectation_suite_name, None
profiling_results = _profile_to_create_a_suite(
additional_batch_kwargs,
batch_kwargs,
batch_kwargs_generator_name,
context,
datasource_name,
expectation_suite_name,
data_asset_name,
profiler_configuration,
)
if flag_build_docs:
build_docs(context, view=False)
if open_docs:
attempt_to_open_validation_results_in_data_docs(context, profiling_results)
return True, expectation_suite_name, profiling_results
def _profile_to_create_a_suite(
additional_batch_kwargs,
batch_kwargs,
batch_kwargs_generator_name,
context,
datasource_name,
expectation_suite_name,
data_asset_name,
profiler_configuration,
):
cli_message(
"""
Great Expectations will choose a couple of columns and generate expectations about them
to demonstrate some examples of assertions you can make about your data.
Great Expectations will store these expectations in a new Expectation Suite '{:s}' here:
{:s}
""".format(
expectation_suite_name,
context.stores[
context.expectations_store_name
].store_backend.get_url_for_key(
ExpectationSuiteIdentifier(
expectation_suite_name=expectation_suite_name
).to_tuple()
),
)
)
confirm_proceed_or_exit()
# TODO this may not apply
cli_message("\nGenerating example Expectation Suite...")
run_id = datetime.datetime.now(datetime.timezone.utc).strftime("%Y%m%dT%H%M%S.%fZ")
profiling_results = context.profile_data_asset(
datasource_name,
batch_kwargs_generator_name=batch_kwargs_generator_name,
data_asset_name=data_asset_name,
batch_kwargs=batch_kwargs,
profiler=BasicSuiteBuilderProfiler,
profiler_configuration=profiler_configuration,
expectation_suite_name=expectation_suite_name,
run_id=RunIdentifier(run_name=run_id),
additional_batch_kwargs=additional_batch_kwargs,
)
if not profiling_results["success"]:
_raise_profiling_errors(profiling_results)
cli_message("\nDone generating example Expectation Suite")
return profiling_results
def _raise_profiling_errors(profiling_results):
if (
profiling_results["error"]["code"]
== DataContext.PROFILING_ERROR_CODE_SPECIFIED_DATA_ASSETS_NOT_FOUND
):
raise ge_exceptions.DataContextError(
"""Some of the data assets you specified were not found: {:s}
""".format(
",".join(profiling_results["error"]["not_found_data_assets"])
)
)
raise ge_exceptions.DataContextError(
f"Unknown profiling error code: {profiling_results['error']['code']}"
)
def attempt_to_open_validation_results_in_data_docs(context, profiling_results):
try:
# TODO this is really brittle and not covered in tests
validation_result = profiling_results["results"][0][1]
validation_result_identifier = ValidationResultIdentifier.from_object(
validation_result
)
context.open_data_docs(resource_identifier=validation_result_identifier)
except (KeyError, IndexError):
context.open_data_docs()
def _get_default_expectation_suite_name(batch_kwargs, data_asset_name):
if data_asset_name:
suite_name = f"{data_asset_name}.warning"
elif "query" in batch_kwargs:
suite_name = "query.warning"
elif "path" in batch_kwargs:
try:
# Try guessing a filename
filename = os.path.split(os.path.normpath(batch_kwargs["path"]))[1]
# Take all but the last part after the period
filename = ".".join(filename.split(".")[:-1])
suite_name = f"{str(filename)}.warning"
except (OSError, IndexError):
suite_name = "warning"
else:
suite_name = "warning"
return suite_name
def tell_user_suite_exists(suite_name: str) -> None:
cli_message(
f"""<red>An expectation suite named `{suite_name}` already exists.</red>
- If you intend to edit the suite please use `great_expectations suite edit {suite_name}`."""
)
def create_empty_suite(
context: DataContext, expectation_suite_name: str, batch_kwargs
) -> None:
cli_message(
"""
Great Expectations will create a new Expectation Suite '{:s}' and store it here:
{:s}
""".format(
expectation_suite_name,
context.stores[
context.expectations_store_name
].store_backend.get_url_for_key(
ExpectationSuiteIdentifier(
expectation_suite_name=expectation_suite_name
).to_tuple()
),
)
)
suite = context.create_expectation_suite(expectation_suite_name)
suite.add_citation(comment="New suite added via CLI", batch_kwargs=batch_kwargs)
context.save_expectation_suite(suite, expectation_suite_name)
def launch_jupyter_notebook(notebook_path: str) -> None:
jupyter_command_override = os.getenv("GE_JUPYTER_CMD", None)
if jupyter_command_override:
subprocess.call(f"{jupyter_command_override} {notebook_path}", shell=True)
else:
subprocess.call(["jupyter", "notebook", notebook_path])
def load_batch(
context: DataContext,
suite: Union[str, ExpectationSuite],
batch_kwargs: Union[dict, BatchKwargs],
) -> Union[Batch, DataAsset]:
batch: Union[Batch, DataAsset] = context.get_batch(batch_kwargs, suite)
assert isinstance(batch, DataAsset) or isinstance(
batch, Batch
), "Batch failed to load. Please check your batch_kwargs"
return batch
def load_expectation_suite(
# TODO consolidate all the myriad CLI tests into this
context: DataContext,
suite_name: str,
usage_event: str,
) -> ExpectationSuite:
"""
Load an expectation suite from a given context.
Handles a suite name with or without `.json`
:param usage_event:
"""
if suite_name.endswith(".json"):
suite_name = suite_name[:-5]
try:
suite = context.get_expectation_suite(suite_name)
return suite
except ge_exceptions.DataContextError:
exit_with_failure_message_and_stats(
context,
usage_event,
f"<red>Could not find a suite named `{suite_name}`.</red> Please check "
"the name by running `great_expectations suite list` and try again.",
)
def exit_with_failure_message_and_stats(
context: DataContext, usage_event: str, message: str
) -> None:
cli_message(message)
send_usage_message(
data_context=context,
event=usage_event,
api_version="v2",
success=False,
)
sys.exit(1)
def load_checkpoint(
context: DataContext,
checkpoint_name: str,
usage_event: str,
) -> Union[Checkpoint, LegacyCheckpoint]:
"""Load a checkpoint or raise helpful errors."""
try:
checkpoint: Union[Checkpoint, LegacyCheckpoint] = context.get_checkpoint(
name=checkpoint_name
)
return checkpoint
except (
ge_exceptions.CheckpointNotFoundError,
ge_exceptions.InvalidCheckpointConfigError,
):
exit_with_failure_message_and_stats(
context,
usage_event,
f"""\
<red>Could not find checkpoint `{checkpoint_name}`.</red> Try running:
- `<green>great_expectations checkpoint list</green>` to verify your checkpoint exists
- `<green>great_expectations checkpoint new</green>` to configure a new checkpoint""",
)
except ge_exceptions.CheckpointError as e:
exit_with_failure_message_and_stats(context, usage_event, f"<red>{e}</red>")
def select_datasource(context: DataContext, datasource_name: str = None) -> Datasource:
"""Select a datasource interactively."""
# TODO consolidate all the myriad CLI tests into this
data_source = None
if datasource_name is None:
data_sources = sorted(context.list_datasources(), key=lambda x: x["name"])
if len(data_sources) == 0:
cli_message(
"<red>No datasources found in the context. To add a datasource, run `great_expectations datasource new`</red>"
)
elif len(data_sources) == 1:
datasource_name = data_sources[0]["name"]
else:
choices = "\n".join(
[
f" {i}. {data_source['name']}"
for i, data_source in enumerate(data_sources, 1)
]
)
option_selection = click.prompt(
f"Select a datasource\n{choices}\n",
type=click.Choice(
[str(i) for i, data_source in enumerate(data_sources, 1)]
),
show_choices=False,
)
datasource_name = data_sources[int(option_selection) - 1]["name"]
if datasource_name is not None:
data_source = context.get_datasource(datasource_name)
return data_source
def load_data_context_with_error_handling(
directory: str, from_cli_upgrade_command: bool = False
) -> DataContext:
"""Return a DataContext with good error handling and exit codes."""
try:
context: DataContext = DataContext(context_root_dir=directory)
ge_config_version: int = context.get_config().config_version
if (
from_cli_upgrade_command
and int(ge_config_version) < CURRENT_GE_CONFIG_VERSION
):
directory = directory or context.root_directory
(
increment_version,
exception_occurred,
) = upgrade_project_up_to_one_version_increment(
context_root_dir=directory,
ge_config_version=ge_config_version,
continuation_message=EXIT_UPGRADE_CONTINUATION_MESSAGE,
from_cli_upgrade_command=from_cli_upgrade_command,
)
if not exception_occurred and increment_version:
context = DataContext(context_root_dir=directory)
return context
except ge_exceptions.UnsupportedConfigVersionError as err:
directory = directory or DataContext.find_context_root_dir()
ge_config_version = DataContext.get_ge_config_version(
context_root_dir=directory
)
upgrade_helper_class = (
GE_UPGRADE_HELPER_VERSION_MAP.get(int(ge_config_version))
if ge_config_version
else None
)
if upgrade_helper_class and ge_config_version < CURRENT_GE_CONFIG_VERSION:
upgrade_project(
context_root_dir=directory,
ge_config_version=ge_config_version,
from_cli_upgrade_command=from_cli_upgrade_command,
)
else:
cli_message(f"<red>{err.message}</red>")
sys.exit(1)
except (
ge_exceptions.ConfigNotFoundError,
ge_exceptions.InvalidConfigError,
) as err:
cli_message(f"<red>{err.message}</red>")
sys.exit(1)
except ge_exceptions.PluginModuleNotFoundError as err:
cli_message(err.cli.v012_colored_message)
sys.exit(1)
except ge_exceptions.PluginClassNotFoundError as err:
cli_message(err.cli.v012_colored_message)
sys.exit(1)
except ge_exceptions.InvalidConfigurationYamlError as err:
cli_message(f"<red>{str(err)}</red>")
sys.exit(1)
def upgrade_project(
context_root_dir, ge_config_version, from_cli_upgrade_command=False
):
if from_cli_upgrade_command:
message = (
f"<red>\nYour project appears to have an out-of-date config version ({ge_config_version}) - "
f"the version "
f"number must be at least {CURRENT_GE_CONFIG_VERSION}.</red>"
)
else:
message = (
f"<red>\nYour project appears to have an out-of-date config version ({ge_config_version}) - "
f"the version "
f"number must be at least {CURRENT_GE_CONFIG_VERSION}.\nIn order to proceed, "
f"your project must be upgraded.</red>"
)
cli_message(message)
upgrade_prompt = (
"\nWould you like to run the Upgrade Helper to bring your project up-to-date?"
)
confirm_proceed_or_exit(
confirm_prompt=upgrade_prompt,
continuation_message=EXIT_UPGRADE_CONTINUATION_MESSAGE,
)
cli_message(SECTION_SEPARATOR)
# use loop in case multiple upgrades need to take place
while ge_config_version < CURRENT_GE_CONFIG_VERSION:
(
increment_version,
exception_occurred,
) = upgrade_project_up_to_one_version_increment(
context_root_dir=context_root_dir,
ge_config_version=ge_config_version,
continuation_message=EXIT_UPGRADE_CONTINUATION_MESSAGE,
from_cli_upgrade_command=from_cli_upgrade_command,
)
if exception_occurred or not increment_version:
break
ge_config_version += 1
cli_message(SECTION_SEPARATOR)
upgrade_success_message = "<green>Upgrade complete. Exiting...</green>\n"
upgrade_incomplete_message = f"""\
<red>The Upgrade Helper was unable to perform a complete project upgrade. Next steps:</red>
- Please perform any manual steps outlined in the Upgrade Overview and/or Upgrade Report above
- When complete, increment the config_version key in your <cyan>great_expectations.yml</cyan> to <cyan>{
ge_config_version + 1}</cyan>\n
To learn more about the upgrade process, visit \
<cyan>https://docs.greatexpectations.io/en/latest/how_to_guides/migrating_versions.html</cyan>
"""
if ge_config_version < CURRENT_GE_CONFIG_VERSION:
cli_message(upgrade_incomplete_message)
else:
cli_message(upgrade_success_message)
sys.exit(0)
def upgrade_project_up_to_one_version_increment(
context_root_dir: str,
ge_config_version: float,
continuation_message: str,
from_cli_upgrade_command: bool = False,
) -> [bool, bool]: # Returns increment_version, exception_occurred
upgrade_helper_class = GE_UPGRADE_HELPER_VERSION_MAP.get(int(ge_config_version))
if not upgrade_helper_class:
return False, False
target_ge_config_version = int(ge_config_version) + 1
# set version temporarily to CURRENT_GE_CONFIG_VERSION to get functional DataContext
DataContext.set_ge_config_version(
config_version=CURRENT_GE_CONFIG_VERSION,
context_root_dir=context_root_dir,
)
upgrade_helper = upgrade_helper_class(context_root_dir=context_root_dir)
upgrade_overview, confirmation_required = upgrade_helper.get_upgrade_overview()
if confirmation_required or from_cli_upgrade_command:
upgrade_confirmed = confirm_proceed_or_exit(
confirm_prompt=upgrade_overview,
continuation_message=continuation_message,
exit_on_no=False,
)
else:
upgrade_confirmed = True
if upgrade_confirmed:
cli_message("\nUpgrading project...")
cli_message(SECTION_SEPARATOR)
# run upgrade and get report of what was done, if version number should be incremented
(
upgrade_report,
increment_version,
exception_occurred,
) = upgrade_helper.upgrade_project()
# display report to user
cli_message(upgrade_report)
if exception_occurred:
# restore version number to current number
DataContext.set_ge_config_version(
ge_config_version, context_root_dir, validate_config_version=False
)
# display report to user
return False, True
# set config version to target version
if increment_version:
DataContext.set_ge_config_version(
target_ge_config_version,
context_root_dir,
validate_config_version=False,
)
return True, False
# restore version number to current number
DataContext.set_ge_config_version(
ge_config_version, context_root_dir, validate_config_version=False
)
return False, False
# restore version number to current number
DataContext.set_ge_config_version(
ge_config_version, context_root_dir, validate_config_version=False
)
cli_message(continuation_message)
sys.exit(0)
def confirm_proceed_or_exit(
confirm_prompt: str = "Would you like to proceed?",
continuation_message: str = "Ok, exiting now. You can always read more at https://docs.greatexpectations.io/ !",
exit_on_no: bool = True,
exit_code: int = 0,
) -> Optional[bool]:
"""
Every CLI command that starts a potentially lengthy (>1 sec) computation
or modifies some resources (e.g., edits the config file, adds objects
to the stores) must follow this pattern:
1. Explain which resources will be created/modified/deleted
2. Use this method to ask for user's confirmation
The goal of this standardization is for the users to expect consistency -
if you saw one command, you know what to expect from all others.
If the user does not confirm, the program should exit. The purpose of the exit_on_no parameter is to provide
the option to perform cleanup actions before exiting outside of the function.
"""
confirm_prompt_colorized = cli_colorize_string(confirm_prompt)
continuation_message_colorized = cli_colorize_string(continuation_message)
if not click.confirm(confirm_prompt_colorized, default=True):
if exit_on_no:
cli_message(continuation_message_colorized)
sys.exit(exit_code)
else:
return False
return True
|
great-expectations/great_expectations
|
great_expectations/cli/v012/toolkit.py
|
Python
|
apache-2.0
| 22,960
|
#! /usr/bin/python
# -*- coding: utf8 -*-
import tensorflow as tf
import os
from sys import platform as _platform
import collections
import random
import numpy as np
import warnings
from six.moves import xrange
from tensorflow.python.platform import gfile
import re
## Iteration functions
def generate_skip_gram_batch(data, batch_size, num_skips, skip_window, data_index=0):
"""Generate a training batch for the Skip-Gram model.
Parameters
----------
data : a list
To present context.
batch_size : an int
Batch size to return.
num_skips : an int
How many times to reuse an input to generate a label.
skip_window : an int
How many words to consider left and right.
data_index : an int
Index of the context location.
without using yield, this code use data_index to instead.
Returns
--------
batch : a list
Inputs
labels : a list
Labels
data_index : an int
Index of the context location.
Examples
--------
>>> Setting num_skips=2, skip_window=1, use the right and left words.
>>> In the same way, num_skips=4, skip_window=2 means use the nearby 4 words.
>>> data = [1,2,3,4,5,6,7,8,9,10,11]
>>> batch, labels, data_index = tl.nlp.generate_skip_gram_batch(data=data, batch_size=8, num_skips=2, skip_window=1, data_index=0)
>>> print(batch)
... [2 2 3 3 4 4 5 5]
>>> print(labels)
... [[3]
... [1]
... [4]
... [2]
... [5]
... [3]
... [4]
... [6]]
References
-----------
- `TensorFlow word2vec tutorial <https://www.tensorflow.org/versions/r0.9/tutorials/word2vec/index.html#vector-representations-of-words>`_
"""
# global data_index # you can put data_index outside the function, then
# modify the global data_index in the function without return it.
# note: without using yield, this code use data_index to instead.
assert batch_size % num_skips == 0
assert num_skips <= 2 * skip_window
batch = np.ndarray(shape=(batch_size), dtype=np.int32)
labels = np.ndarray(shape=(batch_size, 1), dtype=np.int32)
span = 2 * skip_window + 1 # [ skip_window target skip_window ]
buffer = collections.deque(maxlen=span)
for _ in range(span):
buffer.append(data[data_index])
data_index = (data_index + 1) % len(data)
for i in range(batch_size // num_skips):
target = skip_window # target label at the center of the buffer
targets_to_avoid = [ skip_window ]
for j in range(num_skips):
while target in targets_to_avoid:
target = random.randint(0, span - 1)
targets_to_avoid.append(target)
batch[i * num_skips + j] = buffer[skip_window]
labels[i * num_skips + j, 0] = buffer[target]
buffer.append(data[data_index])
data_index = (data_index + 1) % len(data)
return batch, labels, data_index
## Sampling functions
def sample(a=[], temperature=1.0):
"""Sample an index from a probability array.
Parameters
----------
a : a list
List of probabilities.
temperature : float or None
The higher the more uniform.\n
When a = [0.1, 0.2, 0.7],\n
temperature = 0.7, the distribution will be sharpen [ 0.05048273 0.13588945 0.81362782]\n
temperature = 1.0, the distribution will be the same [0.1 0.2 0.7]\n
temperature = 1.5, the distribution will be filtered [ 0.16008435 0.25411807 0.58579758]\n
If None, it will be ``np.argmax(a)``
Notes
------
No matter what is the temperature and input list, the sum of all probabilities will be one.
Even if input list = [1, 100, 200], the sum of all probabilities will still be one.
For large vocabulary_size, choice a higher temperature to avoid error.
"""
b = np.copy(a)
try:
if temperature == 1:
return np.argmax(np.random.multinomial(1, a, 1))
if temperature is None:
return np.argmax(a)
else:
a = np.log(a) / temperature
a = np.exp(a) / np.sum(np.exp(a))
return np.argmax(np.random.multinomial(1, a, 1))
except:
# np.set_printoptions(threshold=np.nan)
# print(a)
# print(np.sum(a))
# print(np.max(a))
# print(np.min(a))
# exit()
message = "For large vocabulary_size, choice a higher temperature\
to avoid log error. Hint : use ``sample_top``. "
warnings.warn(message, Warning)
# print(a)
# print(b)
return np.argmax(np.random.multinomial(1, b, 1))
def sample_top(a=[], top_k=10):
"""Sample from ``top_k`` probabilities.
Parameters
----------
a : a list
List of probabilities.
top_k : int
Number of candidates to be considered.
"""
idx = np.argpartition(a, -top_k)[-top_k:]
probs = a[idx]
# print("new", probs)
probs = probs / np.sum(probs)
choice = np.random.choice(idx, p=probs)
return choice
## old implementation
# a = np.array(a)
# idx = np.argsort(a)[::-1]
# idx = idx[:top_k]
# # a = a[idx]
# probs = a[idx]
# print("prev", probs)
# # probs = probs / np.sum(probs)
# # choice = np.random.choice(idx, p=probs)
# # return choice
## Vector representations of words (Advanced) UNDOCUMENT
class SimpleVocabulary(object):
"""Simple vocabulary wrapper, see create_vocab().
Parameters
------------
vocab : A dictionary of word to word_id.
unk_id : Id of the special 'unknown' word.
"""
def __init__(self, vocab, unk_id):
"""Initializes the vocabulary."""
self._vocab = vocab
self._unk_id = unk_id
def word_to_id(self, word):
"""Returns the integer id of a word string."""
if word in self._vocab:
return self._vocab[word]
else:
return self._unk_id
class Vocabulary(object):
"""Create Vocabulary class from a given vocabulary and its id-word, word-id convert,
see create_vocab() and ``tutorial_tfrecord3.py``.
Parameters
-----------
vocab_file : File containing the vocabulary, where the words are the first
whitespace-separated token on each line (other tokens are ignored) and
the word ids are the corresponding line numbers.
start_word : Special word denoting sentence start.
end_word : Special word denoting sentence end.
unk_word : Special word denoting unknown words.
Properties
------------
vocab : a dictionary from word to id.
reverse_vocab : a list from id to word.
start_id : int of start id
end_id : int of end id
unk_id : int of unk id
pad_id : int of padding id
Vocab_files
-------------
>>> Look as follow, includes `start_word` , `end_word` but no `unk_word` .
>>> a 969108
>>> <S> 586368
>>> </S> 586368
>>> . 440479
>>> on 213612
>>> of 202290
>>> the 196219
>>> in 182598
>>> with 152984
>>> and 139109
>>> is 97322
"""
def __init__(self,
vocab_file,
start_word="<S>",
end_word="</S>",
unk_word="<UNK>",
pad_word="<PAD>"):
if not tf.gfile.Exists(vocab_file):
tf.logging.fatal("Vocab file %s not found.", vocab_file)
tf.logging.info("Initializing vocabulary from file: %s", vocab_file)
with tf.gfile.GFile(vocab_file, mode="r") as f:
reverse_vocab = list(f.readlines())
reverse_vocab = [line.split()[0] for line in reverse_vocab]
assert start_word in reverse_vocab
assert end_word in reverse_vocab
if unk_word not in reverse_vocab:
reverse_vocab.append(unk_word)
vocab = dict([(x, y) for (y, x) in enumerate(reverse_vocab)])
print(" [TL] Vocabulary from %s : %s %s %s" % (vocab_file, start_word, end_word, unk_word))
print(" vocabulary with %d words (includes start_word, end_word, unk_word)" % len(vocab))
# tf.logging.info(" vocabulary with %d words" % len(vocab))
self.vocab = vocab # vocab[word] = id
self.reverse_vocab = reverse_vocab # reverse_vocab[id] = word
# Save special word ids.
self.start_id = vocab[start_word]
self.end_id = vocab[end_word]
self.unk_id = vocab[unk_word]
self.pad_id = vocab[pad_word]
print(" start_id: %d" % self.start_id)
print(" end_id: %d" % self.end_id)
print(" unk_id: %d" % self.unk_id)
print(" pad_id: %d" % self.pad_id)
def word_to_id(self, word):
"""Returns the integer word id of a word string."""
if word in self.vocab:
return self.vocab[word]
else:
return self.unk_id
def id_to_word(self, word_id):
"""Returns the word string of an integer word id."""
if word_id >= len(self.reverse_vocab):
return self.reverse_vocab[self.unk_id]
else:
return self.reverse_vocab[word_id]
def process_sentence(sentence, start_word="<S>", end_word="</S>"):
"""Converts a sentence string into a list of string words, add start_word and end_word,
see ``create_vocab()`` and ``tutorial_tfrecord3.py``.
Parameter
---------
sentence : a sentence in string.
start_word : a string or None, if None, non start word will be appended.
end_word : a string or None, if None, non end word will be appended.
Returns
---------
A list of strings; the processed caption.
Examples
-----------
>>> c = "how are you?"
>>> c = tl.nlp.process_sentence(c)
>>> print(c)
... ['<S>', 'how', 'are', 'you', '?', '</S>']
"""
try:
import nltk
except:
raise Exception("Hint : NLTK is required.")
if start_word is not None:
process_sentence = [start_word]
else:
process_sentence = []
process_sentence.extend(nltk.tokenize.word_tokenize(sentence.lower()))
if end_word is not None:
process_sentence.append(end_word)
return process_sentence
def create_vocab(sentences, word_counts_output_file, min_word_count=1):
"""Creates the vocabulary of word to word_id, see create_vocab() and ``tutorial_tfrecord3.py``.
The vocabulary is saved to disk in a text file of word counts. The id of each
word in the file is its corresponding 0-based line number.
Parameters
------------
sentences : a list of lists of strings.
word_counts_output_file : A string
The file name.
min_word_count : a int
Minimum number of occurrences for a word.
Returns
--------
- tl.nlp.SimpleVocabulary object.
Mores
-----
- ``tl.nlp.build_vocab()``
Examples
--------
>>> captions = ["one two , three", "four five five"]
>>> processed_capts = []
>>> for c in captions:
>>> c = tl.nlp.process_sentence(c, start_word="<S>", end_word="</S>")
>>> processed_capts.append(c)
>>> print(processed_capts)
...[['<S>', 'one', 'two', ',', 'three', '</S>'], ['<S>', 'four', 'five', 'five', '</S>']]
>>> tl.nlp.create_vocab(processed_capts, word_counts_output_file='vocab.txt', min_word_count=1)
... [TL] Creating vocabulary.
... Total words: 8
... Words in vocabulary: 8
... Wrote vocabulary file: vocab.txt
>>> vocab = tl.nlp.Vocabulary('vocab.txt', start_word="<S>", end_word="</S>", unk_word="<UNK>")
... INFO:tensorflow:Initializing vocabulary from file: vocab.txt
... [TL] Vocabulary from vocab.txt : <S> </S> <UNK>
... vocabulary with 10 words (includes start_word, end_word, unk_word)
... start_id: 2
... end_id: 3
... unk_id: 9
... pad_id: 0
"""
from collections import Counter
print(" [TL] Creating vocabulary.")
counter = Counter()
for c in sentences:
counter.update(c)
# print('c',c)
print(" Total words: %d" % len(counter))
# Filter uncommon words and sort by descending count.
word_counts = [x for x in counter.items() if x[1] >= min_word_count]
word_counts.sort(key=lambda x: x[1], reverse=True)
word_counts = [("<PAD>", 0)] + word_counts # 1st id should be reserved for padding
# print(word_counts)
print(" Words in vocabulary: %d" % len(word_counts))
# Write out the word counts file.
with tf.gfile.FastGFile(word_counts_output_file, "w") as f:
f.write("\n".join(["%s %d" % (w, c) for w, c in word_counts]))
print(" Wrote vocabulary file: %s" % word_counts_output_file)
# Create the vocabulary dictionary.
reverse_vocab = [x[0] for x in word_counts]
unk_id = len(reverse_vocab)
vocab_dict = dict([(x, y) for (y, x) in enumerate(reverse_vocab)])
vocab = SimpleVocabulary(vocab_dict, unk_id)
return vocab
## Vector representations of words
def simple_read_words(filename="nietzsche.txt"):
"""Read context from file without any preprocessing.
Parameters
----------
filename : a string
A file path (like .txt file)
Returns
--------
The context in a string
"""
with open("nietzsche.txt", "r") as f:
words = f.read()
return words
def read_words(filename="nietzsche.txt", replace = ['\n', '<eos>']):
"""File to list format context. Note that, this script can not handle punctuations.
For customized read_words method, see ``tutorial_generate_text.py``.
Parameters
----------
filename : a string
A file path (like .txt file),
replace : a list
[original string, target string], to disable replace use ['', '']
Returns
--------
The context in a list, split by space by default, and use ``'<eos>'`` to represent ``'\n'``,
e.g. ``[... 'how', 'useful', 'it', "'s" ... ]``.
Code References
---------------
- `tensorflow.models.rnn.ptb.reader <https://github.com/tensorflow/tensorflow/tree/master/tensorflow/models/rnn/ptb>`_
"""
with tf.gfile.GFile(filename, "r") as f:
try: # python 3.4 or older
context_list = f.read().replace(*replace).split()
except: # python 3.5
f.seek(0)
replace = [x.encode('utf-8') for x in replace]
context_list = f.read().replace(*replace).split()
return context_list
def read_analogies_file(eval_file='questions-words.txt', word2id={}):
"""Reads through an analogy question file, return its id format.
Parameters
----------
eval_data : a string
The file name.
word2id : a dictionary
Mapping words to unique IDs.
Returns
--------
analogy_questions : a [n, 4] numpy array containing the analogy question's
word ids.
questions_skipped: questions skipped due to unknown words.
Examples
---------
>>> eval_file should be in this format :
>>> : capital-common-countries
>>> Athens Greece Baghdad Iraq
>>> Athens Greece Bangkok Thailand
>>> Athens Greece Beijing China
>>> Athens Greece Berlin Germany
>>> Athens Greece Bern Switzerland
>>> Athens Greece Cairo Egypt
>>> Athens Greece Canberra Australia
>>> Athens Greece Hanoi Vietnam
>>> Athens Greece Havana Cuba
...
>>> words = tl.files.load_matt_mahoney_text8_dataset()
>>> data, count, dictionary, reverse_dictionary = \
tl.nlp.build_words_dataset(words, vocabulary_size, True)
>>> analogy_questions = tl.nlp.read_analogies_file( \
eval_file='questions-words.txt', word2id=dictionary)
>>> print(analogy_questions)
... [[ 3068 1248 7161 1581]
... [ 3068 1248 28683 5642]
... [ 3068 1248 3878 486]
... ...,
... [ 1216 4309 19982 25506]
... [ 1216 4309 3194 8650]
... [ 1216 4309 140 312]]
"""
questions = []
questions_skipped = 0
with open(eval_file, "rb") as analogy_f:
for line in analogy_f:
if line.startswith(b":"): # Skip comments.
continue
words = line.strip().lower().split(b" ") # lowercase
ids = [word2id.get(w.strip()) for w in words]
if None in ids or len(ids) != 4:
questions_skipped += 1
else:
questions.append(np.array(ids))
print("Eval analogy file: ", eval_file)
print("Questions: ", len(questions))
print("Skipped: ", questions_skipped)
analogy_questions = np.array(questions, dtype=np.int32)
return analogy_questions
def build_vocab(data):
"""Build vocabulary.
Given the context in list format.
Return the vocabulary, which is a dictionary for word to id.
e.g. {'campbell': 2587, 'atlantic': 2247, 'aoun': 6746 .... }
Parameters
----------
data : a list of string
the context in list format
Returns
--------
word_to_id : a dictionary
mapping words to unique IDs. e.g. {'campbell': 2587, 'atlantic': 2247, 'aoun': 6746 .... }
Code References
---------------
- `tensorflow.models.rnn.ptb.reader <https://github.com/tensorflow/tensorflow/tree/master/tensorflow/models/rnn/ptb>`_
Examples
--------
>>> data_path = os.getcwd() + '/simple-examples/data'
>>> train_path = os.path.join(data_path, "ptb.train.txt")
>>> word_to_id = build_vocab(read_txt_words(train_path))
"""
# data = _read_words(filename)
counter = collections.Counter(data)
# print('counter', counter) # dictionary for the occurrence number of each word, e.g. 'banknote': 1, 'photography': 1, 'kia': 1
count_pairs = sorted(counter.items(), key=lambda x: (-x[1], x[0]))
# print('count_pairs',count_pairs) # convert dictionary to list of tuple, e.g. ('ssangyong', 1), ('swapo', 1), ('wachter', 1)
words, _ = list(zip(*count_pairs))
word_to_id = dict(zip(words, range(len(words))))
# print(words) # list of words
# print(word_to_id) # dictionary for word to id, e.g. 'campbell': 2587, 'atlantic': 2247, 'aoun': 6746
return word_to_id
def build_reverse_dictionary(word_to_id):
"""Given a dictionary for converting word to integer id.
Returns a reverse dictionary for converting a id to word.
Parameters
----------
word_to_id : dictionary
mapping words to unique ids
Returns
--------
reverse_dictionary : a dictionary
mapping ids to words
"""
reverse_dictionary = dict(zip(word_to_id.values(), word_to_id.keys()))
return reverse_dictionary
def build_words_dataset(words=[], vocabulary_size=50000, printable=True, unk_key = 'UNK'):
"""Build the words dictionary and replace rare words with 'UNK' token.
The most common word has the smallest integer id.
Parameters
----------
words : a list of string or byte
The context in list format. You may need to do preprocessing on the words,
such as lower case, remove marks etc.
vocabulary_size : an int
The maximum vocabulary size, limiting the vocabulary size.
Then the script replaces rare words with 'UNK' token.
printable : boolean
Whether to print the read vocabulary size of the given words.
unk_key : a string
Unknown words = unk_key
Returns
--------
data : a list of integer
The context in a list of ids
count : a list of tuple and list
count[0] is a list : the number of rare words\n
count[1:] are tuples : the number of occurrence of each word\n
e.g. [['UNK', 418391], (b'the', 1061396), (b'of', 593677), (b'and', 416629), (b'one', 411764)]
dictionary : a dictionary
word_to_id, mapping words to unique IDs.
reverse_dictionary : a dictionary
id_to_word, mapping id to unique word.
Examples
--------
>>> words = tl.files.load_matt_mahoney_text8_dataset()
>>> vocabulary_size = 50000
>>> data, count, dictionary, reverse_dictionary = tl.nlp.build_words_dataset(words, vocabulary_size)
Code References
-----------------
- `tensorflow/examples/tutorials/word2vec/word2vec_basic.py <https://github.com/tensorflow/tensorflow/blob/r0.7/tensorflow/examples/tutorials/word2vec/word2vec_basic.py>`_
"""
import collections
count = [[unk_key, -1]]
count.extend(collections.Counter(words).most_common(vocabulary_size - 1))
dictionary = dict()
for word, _ in count:
dictionary[word] = len(dictionary)
data = list()
unk_count = 0
for word in words:
if word in dictionary:
index = dictionary[word]
else:
index = 0 # dictionary['UNK']
unk_count += 1
data.append(index)
count[0][1] = unk_count
reverse_dictionary = dict(zip(dictionary.values(), dictionary.keys()))
if printable:
print('Real vocabulary size %d' % len(collections.Counter(words).keys()))
print('Limited vocabulary size {}'.format(vocabulary_size))
assert len(collections.Counter(words).keys()) >= vocabulary_size , \
"the limited vocabulary_size must be less than or equal to the read vocabulary_size"
return data, count, dictionary, reverse_dictionary
def words_to_word_ids(data=[], word_to_id={}, unk_key = 'UNK'):
"""Given a context (words) in list format and the vocabulary,
Returns a list of IDs to represent the context.
Parameters
----------
data : a list of string or byte
the context in list format
word_to_id : a dictionary
mapping words to unique IDs.
unk_key : a string
Unknown words = unk_key
Returns
--------
A list of IDs to represent the context.
Examples
--------
>>> words = tl.files.load_matt_mahoney_text8_dataset()
>>> vocabulary_size = 50000
>>> data, count, dictionary, reverse_dictionary = \
... tl.nlp.build_words_dataset(words, vocabulary_size, True)
>>> context = [b'hello', b'how', b'are', b'you']
>>> ids = tl.nlp.words_to_word_ids(words, dictionary)
>>> context = tl.nlp.word_ids_to_words(ids, reverse_dictionary)
>>> print(ids)
... [6434, 311, 26, 207]
>>> print(context)
... [b'hello', b'how', b'are', b'you']
Code References
---------------
- `tensorflow.models.rnn.ptb.reader <https://github.com/tensorflow/tensorflow/tree/master/tensorflow/models/rnn/ptb>`_
"""
# if isinstance(data[0], six.string_types):
# print(type(data[0]))
# # exit()
# print(data[0])
# print(word_to_id)
# return [word_to_id[str(word)] for word in data]
# else:
word_ids = []
for word in data:
if word_to_id.get(word) is not None:
word_ids.append(word_to_id[word])
else:
word_ids.append(word_to_id[unk_key])
return word_ids
# return [word_to_id[word] for word in data] # this one
# if isinstance(data[0], str):
# # print('is a string object')
# return [word_to_id[word] for word in data]
# else:#if isinstance(s, bytes):
# # print('is a unicode object')
# # print(data[0])
# return [word_to_id[str(word)] f
def word_ids_to_words(data, id_to_word):
"""Given a context (ids) in list format and the vocabulary,
Returns a list of words to represent the context.
Parameters
----------
data : a list of integer
the context in list format
id_to_word : a dictionary
mapping id to unique word.
Returns
--------
A list of string or byte to represent the context.
Examples
---------
>>> see words_to_word_ids
"""
return [id_to_word[i] for i in data]
def save_vocab(count=[], name='vocab.txt'):
"""Save the vocabulary to a file so the model can be reloaded.
Parameters
----------
count : a list of tuple and list
count[0] is a list : the number of rare words\n
count[1:] are tuples : the number of occurrence of each word\n
e.g. [['UNK', 418391], (b'the', 1061396), (b'of', 593677), (b'and', 416629), (b'one', 411764)]
Examples
---------
>>> words = tl.files.load_matt_mahoney_text8_dataset()
>>> vocabulary_size = 50000
>>> data, count, dictionary, reverse_dictionary = \
... tl.nlp.build_words_dataset(words, vocabulary_size, True)
>>> tl.nlp.save_vocab(count, name='vocab_text8.txt')
>>> vocab_text8.txt
... UNK 418391
... the 1061396
... of 593677
... and 416629
... one 411764
... in 372201
... a 325873
... to 316376
"""
pwd = os.getcwd()
vocabulary_size = len(count)
with open(os.path.join(pwd, name), "w") as f:
for i in xrange(vocabulary_size):
f.write("%s %d\n" % (tf.compat.as_text(count[i][0]), count[i][1]))
print("%d vocab saved to %s in %s" % (vocabulary_size, name, pwd))
## Functions for translation
def basic_tokenizer(sentence, _WORD_SPLIT=re.compile(b"([.,!?\"':;)(])")):
"""Very basic tokenizer: split the sentence into a list of tokens.
Parameters
-----------
sentence : tensorflow.python.platform.gfile.GFile Object
_WORD_SPLIT : regular expression for word spliting.
Examples
--------
>>> see create_vocabulary
>>> from tensorflow.python.platform import gfile
>>> train_path = "wmt/giga-fren.release2"
>>> with gfile.GFile(train_path + ".en", mode="rb") as f:
>>> for line in f:
>>> tokens = tl.nlp.basic_tokenizer(line)
>>> print(tokens)
>>> exit()
... [b'Changing', b'Lives', b'|', b'Changing', b'Society', b'|', b'How',
... b'It', b'Works', b'|', b'Technology', b'Drives', b'Change', b'Home',
... b'|', b'Concepts', b'|', b'Teachers', b'|', b'Search', b'|', b'Overview',
... b'|', b'Credits', b'|', b'HHCC', b'Web', b'|', b'Reference', b'|',
... b'Feedback', b'Virtual', b'Museum', b'of', b'Canada', b'Home', b'Page']
References
----------
- Code from ``/tensorflow/models/rnn/translation/data_utils.py``
"""
words = []
sentence = tf.compat.as_bytes(sentence)
for space_separated_fragment in sentence.strip().split():
words.extend(re.split(_WORD_SPLIT, space_separated_fragment))
return [w for w in words if w]
def create_vocabulary(vocabulary_path, data_path, max_vocabulary_size,
tokenizer=None, normalize_digits=True,
_DIGIT_RE=re.compile(br"\d"),
_START_VOCAB=[b"_PAD", b"_GO", b"_EOS", b"_UNK"]):
"""Create vocabulary file (if it does not exist yet) from data file.
Data file is assumed to contain one sentence per line. Each sentence is
tokenized and digits are normalized (if normalize_digits is set).
Vocabulary contains the most-frequent tokens up to max_vocabulary_size.
We write it to vocabulary_path in a one-token-per-line format, so that later
token in the first line gets id=0, second line gets id=1, and so on.
Parameters
-----------
vocabulary_path : path where the vocabulary will be created.
data_path : data file that will be used to create vocabulary.
max_vocabulary_size : limit on the size of the created vocabulary.
tokenizer : a function to use to tokenize each data sentence.
if None, basic_tokenizer will be used.
normalize_digits : Boolean
if true, all digits are replaced by 0s.
References
----------
- Code from ``/tensorflow/models/rnn/translation/data_utils.py``
"""
if not gfile.Exists(vocabulary_path):
print("Creating vocabulary %s from data %s" % (vocabulary_path, data_path))
vocab = {}
with gfile.GFile(data_path, mode="rb") as f:
counter = 0
for line in f:
counter += 1
if counter % 100000 == 0:
print(" processing line %d" % counter)
tokens = tokenizer(line) if tokenizer else basic_tokenizer(line)
for w in tokens:
word = re.sub(_DIGIT_RE, b"0", w) if normalize_digits else w
if word in vocab:
vocab[word] += 1
else:
vocab[word] = 1
vocab_list = _START_VOCAB + sorted(vocab, key=vocab.get, reverse=True)
if len(vocab_list) > max_vocabulary_size:
vocab_list = vocab_list[:max_vocabulary_size]
with gfile.GFile(vocabulary_path, mode="wb") as vocab_file:
for w in vocab_list:
vocab_file.write(w + b"\n")
else:
print("Vocabulary %s from data %s exists" % (vocabulary_path, data_path))
def initialize_vocabulary(vocabulary_path):
"""Initialize vocabulary from file, return the word_to_id (dictionary)
and id_to_word (list).
We assume the vocabulary is stored one-item-per-line, so a file:\n
dog\n
cat\n
will result in a vocabulary {"dog": 0, "cat": 1}, and this function will
also return the reversed-vocabulary ["dog", "cat"].
Parameters
-----------
vocabulary_path : path to the file containing the vocabulary.
Returns
--------
vocab : a dictionary
Word to id. A dictionary mapping string to integers.
rev_vocab : a list
Id to word. The reversed vocabulary (a list, which reverses the vocabulary mapping).
Examples
---------
>>> Assume 'test' contains
... dog
... cat
... bird
>>> vocab, rev_vocab = tl.nlp.initialize_vocabulary("test")
>>> print(vocab)
>>> {b'cat': 1, b'dog': 0, b'bird': 2}
>>> print(rev_vocab)
>>> [b'dog', b'cat', b'bird']
Raises
-------
ValueError : if the provided vocabulary_path does not exist.
"""
if gfile.Exists(vocabulary_path):
rev_vocab = []
with gfile.GFile(vocabulary_path, mode="rb") as f:
rev_vocab.extend(f.readlines())
rev_vocab = [tf.compat.as_bytes(line.strip()) for line in rev_vocab]
vocab = dict([(x, y) for (y, x) in enumerate(rev_vocab)])
return vocab, rev_vocab
else:
raise ValueError("Vocabulary file %s not found.", vocabulary_path)
def sentence_to_token_ids(sentence, vocabulary,
tokenizer=None, normalize_digits=True,
UNK_ID=3, _DIGIT_RE=re.compile(br"\d")):
"""Convert a string to list of integers representing token-ids.
For example, a sentence "I have a dog" may become tokenized into
["I", "have", "a", "dog"] and with vocabulary {"I": 1, "have": 2,
"a": 4, "dog": 7"} this function will return [1, 2, 4, 7].
Parameters
-----------
sentence : tensorflow.python.platform.gfile.GFile Object
The sentence in bytes format to convert to token-ids.\n
see basic_tokenizer(), data_to_token_ids()
vocabulary : a dictionary mapping tokens to integers.
tokenizer : a function to use to tokenize each sentence;
If None, basic_tokenizer will be used.
normalize_digits : Boolean
If true, all digits are replaced by 0s.
Returns
--------
A list of integers, the token-ids for the sentence.
"""
if tokenizer:
words = tokenizer(sentence)
else:
words = basic_tokenizer(sentence)
if not normalize_digits:
return [vocabulary.get(w, UNK_ID) for w in words]
# Normalize digits by 0 before looking words up in the vocabulary.
return [vocabulary.get(re.sub(_DIGIT_RE, b"0", w), UNK_ID) for w in words]
def data_to_token_ids(data_path, target_path, vocabulary_path,
tokenizer=None, normalize_digits=True,
UNK_ID=3, _DIGIT_RE=re.compile(br"\d")):
"""Tokenize data file and turn into token-ids using given vocabulary file.
This function loads data line-by-line from data_path, calls the above
sentence_to_token_ids, and saves the result to target_path. See comment
for sentence_to_token_ids on the details of token-ids format.
Parameters
-----------
data_path : path to the data file in one-sentence-per-line format.
target_path : path where the file with token-ids will be created.
vocabulary_path : path to the vocabulary file.
tokenizer : a function to use to tokenize each sentence;
if None, basic_tokenizer will be used.
normalize_digits : Boolean; if true, all digits are replaced by 0s.
References
----------
- Code from ``/tensorflow/models/rnn/translation/data_utils.py``
"""
if not gfile.Exists(target_path):
print("Tokenizing data in %s" % data_path)
vocab, _ = initialize_vocabulary(vocabulary_path)
with gfile.GFile(data_path, mode="rb") as data_file:
with gfile.GFile(target_path, mode="w") as tokens_file:
counter = 0
for line in data_file:
counter += 1
if counter % 100000 == 0:
print(" tokenizing line %d" % counter)
token_ids = sentence_to_token_ids(line, vocab, tokenizer,
normalize_digits, UNK_ID=UNK_ID,
_DIGIT_RE=_DIGIT_RE)
tokens_file.write(" ".join([str(tok) for tok in token_ids]) + "\n")
else:
print("Target path %s exists" % target_path)
|
arcyfelix/ML-DL-AI
|
Supervised Learning/GANs/dcgan-tensorflayer/tensorlayer/nlp.py
|
Python
|
apache-2.0
| 32,641
|
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import logging.config
from flask import Flask, Blueprint
from werkzeug.contrib.fixers import ProxyFix
from ceep_api import settings
from ceep_api.api import restplus
from ceep_api.api.endpoints.adbmonitors import ns as adbmonitors_namespace
from ceep_api.api.restplus import api
from ceep_api.database import db
def configure_app(flask_app):
flask_app.config['SQLALCHEMY_DATABASE_URI'] = settings.SQLALCHEMY_DATABASE_URI
flask_app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = settings.SQLALCHEMY_TRACK_MODIFICATIONS
flask_app.config['SWAGGER_UI_DOC_EXPANSION'] = settings.RESTPLUS_SWAGGER_UI_DOC_EXPANSION
flask_app.config['RESTPLUS_VALIDATE'] = settings.RESTPLUS_VALIDATE
flask_app.config['RESTPLUS_MASK_SWAGGER'] = settings.RESTPLUS_MASK_SWAGGER
flask_app.config['ERROR_404_HELP'] = settings.RESTPLUS_ERROR_404_HELP
def initialize_app(flask_app):
log.debug('Initialize APP...')
configure_app(flask_app)
blueprint = Blueprint('api', __name__, url_prefix='/api/1.0')
api.init_app(blueprint)
api.add_namespace(adbmonitors_namespace)
flask_app.register_blueprint(blueprint)
db.init_app(flask_app)
logging.config.fileConfig('logging.conf')
log = logging.getLogger(__name__)
app = Flask(__name__)
initialize_app(app)
app.wsgi_app = ProxyFix(app.wsgi_app)
|
seraph115/ceep_api
|
ceep_api/run.py
|
Python
|
apache-2.0
| 1,398
|
#------------------------------------------------------------------------------
# Copyright 2013 Esri
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#------------------------------------------------------------------------------
# Name: TestUtilities.py
# Requirements: ArcGIS Desktop Standard
#------------------------------------------------------------------------------
import os
currentPath = os.path.dirname(__file__)
print "currentPath: " + currentPath
geodatabasePath = os.path.normpath(os.path.join(os.path.join(currentPath, r"../../../data_management/data/Geonames")))
print "geodatabasePath: " + geodatabasePath
toolboxesPath = os.path.normpath(os.path.join(currentPath, r"../../../data_management/toolboxes/"))
print "toolboxesPath: " + toolboxesPath
inputGDB = os.path.join(geodatabasePath, "Geonames.gdb")
toolbox = os.path.join(toolboxesPath, "Geonames Tools_10.3.tbx")
|
JudTown17/solutions-geoprocessing-toolbox
|
data_management/test/test_geonames_tools/TestUtilities.py
|
Python
|
apache-2.0
| 1,403
|
from __future__ import print_function
from __future__ import division
from django.core.management.base import BaseCommand
from query.models import Video, Face, LabelSet, Frame
from scannerpy import ProtobufGenerator, Config
import os
import cv2
import math
import numpy as np
import tensorflow as tf
import align.detect_face
from collections import defaultdict
from array import *
from functools import wraps
import inspect
cfg = Config()
proto = ProtobufGenerator(cfg)
def initializer(func):
"""
Automatically assigns the parameters.
>>> class process:
... @initializer
... def __init__(self, cmd, reachable=False, user='root'):
... pass
>>> p = process('halt', True)
>>> p.cmd, p.reachable, p.user
('halt', True, 'root')
"""
names, varargs, keywords, defaults = inspect.getargspec(func)
@wraps(func)
def wrapper(self, *args, **kargs):
for name, arg in list(zip(names[1:], args)) + list(kargs.items()):
setattr(self, name, arg)
for name, default in zip(reversed(names), reversed(defaults)):
if not hasattr(self, name):
setattr(self, name, default)
func(self, *args, **kargs)
return wrapper
class VideoEvalStats(object):
@initializer
def __init__(self, video_id = 0, num_frames=0, tp_frames=0, fp_frames=0, fn_frames=0, mismatched_tp_frames=0, num_detections=0, tp_detections=0, fp_detections=0, fn_detections=0, num_males=0, num_females=0, gender_matches=0, male_mismatches=0, female_mismatches=0):
pass
def compute_precision_recall(self, tp, fp, fn):
if (tp + fp) != 0:
precision = tp / (tp + fp)
else:
precision = 0.0
if (tp + fn) != 0:
recall = tp / (tp + fn)
else:
recall = 0.0
return (precision, recall)
def compute_frame_acc_stats(self):
return self.compute_precision_recall(self.tp_frames, self.fp_frames, self.fn_frames)
def compute_det_acc_stats(self):
(det_precision, det_recall) = self.compute_precision_recall(self.tp_detections, self.fp_detections, self.fn_detections)
return (det_precision, det_recall)
def compute_gender_acc_stats(self):
if self.tp_detections != 0:
gender_precision = self.gender_matches / (self.num_males + self.num_females)
else:
gender_precision = 1.0
return gender_precision
def __str__(self):
frame_stats = "Video({})[FRAME SELECTION]: num_frames({}), tp({}), fp({}), fn({})".format(self.video_id, self.num_frames, self.tp_frames, self.fp_frames, self.fn_frames)
frame_acc_stats = "Video({})[FRAME SELECTION]: Frame selection precision({}), Frame selection recall({})".format(self.video_id, *self.compute_frame_acc_stats())
det_stats = "Video({})[DETECTION]: num_detections({}), tp({}), fp({}), fn({}), mismatched_frames({})".format(self.video_id, self.num_detections, self.tp_detections, self.fp_detections, self.fn_detections, self.mismatched_tp_frames)
det_acc_stats = "Video({})[DETECTION]: Detection precision({}), Detection recall({})".format(self.video_id, *self.compute_det_acc_stats())
gender_stats = "Video({})[GENDER]: males({}), females({}), gender_matches({}), male_mismatches({}), female_mismatches({})".format(self.video_id, self.num_males, self.num_females, self.gender_matches, self.male_mismatches, self.female_mismatches)
gender_acc_stats = "Video({})[GENDER]: Gender precision({})".format(self.video_id, self.compute_gender_acc_stats())
return frame_stats + "\n" + frame_acc_stats + "\n" + det_stats + "\n" + det_acc_stats + "\n" + gender_stats + "\n" + gender_acc_stats
def __add__(self, other):
num_frames = self.num_frames + other.num_frames
# frame selection
tp_frames = self.tp_frames + other.tp_frames
fp_frames = self.fp_frames + other.fp_frames
fn_frames = self.fn_frames + other.fn_frames
# face detection
num_detections = self.num_detections + other.num_detections
mismatched_tp_frames = self.mismatched_tp_frames + other.mismatched_tp_frames
tp_detections = self.tp_detections + other.tp_detections
fp_detections = self.fp_detections + other.fp_detections
fn_detections = self.fn_detections + other.fn_detections
# gender detection
num_males = self.num_males + other.num_males
num_females = self.num_females + other.num_females
gender_matches = self.gender_matches + other.gender_matches
male_mismatches = self.male_mismatches + other.male_mismatches
female_mismatches = self.female_mismatches + other.female_mismatches
return VideoEvalStats(self.video_id, num_frames, tp_frames, fp_frames, fn_frames, mismatched_tp_frames, num_detections, tp_detections, fp_detections, fn_detections, num_males, num_females, gender_matches, male_mismatches, female_mismatches)
class VideoStats(object):
@initializer
def __init__(self, video_id = 0, num_frames=0, selected_frames=0, num_detections=0, num_males=0, num_females=0):
pass
def __str__(self):
stats = "Video({}): num_frames({}), selected_frames({}), num_detections({}), num_males({}), num_females({})".format(self.video_id, self.num_frames, self.selected_frames, self.num_detections, self.num_males, self.num_females)
return stats
def __add__(self, other):
num_frames = self.num_frames + other.num_frames
selected_frames = self.selected_frames + other.selected_frames
num_detections = self.num_detections + other.num_detections
num_males = self.num_males + other.num_males
num_females = self.num_females + other.num_females
return VideoStats(self.video_id, num_frames, selected_frames, num_detections, num_males, num_females)
class Command(BaseCommand):
help = 'Detect faces in videos'
def add_arguments(self, parser):
parser.add_argument('command')
def bbox_area(self, bbox, video):
return ((bbox.x2 - bbox.x1)*video.width) * \
((bbox.y2 - bbox.y1)*video.height)
def compute_iou(self, bbox1, bbox2, video):
int_x1=max(bbox1.x1, bbox2.x1)
int_y1=max(bbox1.y1, bbox2.y1)
int_x2=min(bbox1.x2, bbox2.x2)
int_y2=min(bbox1.y2, bbox2.y2)
int_area = 0.0
if(int_x2 > int_x1 and int_y2 > int_y1):
int_area = ((int_x2 - int_x1)*video.width) * \
((int_y2 - int_y1)*video.height)
iou = int_area/(self.bbox_area(bbox1, video)+self.bbox_area(bbox2, video)-int_area)
return iou
def remove_duplicates(self, l):
s = set()
return [x for x in l
if x not in s and not s.add(x)]
def fetch_ground_truth(self, video, label = "Talking Heads"):
g_labelset = video.handlabeled_labelset() # ground truth
#g_faces = Face.objects.filter(frame__labelset=g_labelset).prefetch_related('frame').all()
g_faces = Face.objects.filter(frame__labelset=g_labelset, frame__labels__name="Talking Heads").prefetch_related('frame').all()
ground_truth_frames = []
g_faces_dict = defaultdict(list)
for g_face in g_faces:
g_faces_dict[g_face.frame.number].append(g_face)
ground_truth_frames.append(g_face.frame.number)
ground_truth_frames = self.remove_duplicates(ground_truth_frames)
return (ground_truth_frames, g_faces_dict)
def fetch_automatic_detections(self, video, label = "Talking Heads"):
d_labelset = video.detected_labelset() # prediction
#d_faces = Face.objects.filter(frame__labelset=d_labelset).prefetch_related('frame').all()
#d_faces = Face.objects.filter(frame__labelset=d_labelset, frame__number__in=ground_truth_frames).prefetch_related('frame').all()
d_faces = Face.objects.filter(frame__labelset=d_labelset).prefetch_related('frame').all()
detected_frames = []
d_faces_dict = defaultdict(list)
# metrics for automatic detection of frames with "talking heads"
face_size_thres = 0.03
det_score_thres = 0.95
for d_face in d_faces:
if d_face.bbox.score > det_score_thres and self.bbox_area(d_face.bbox, video) > (face_size_thres * video.width * video.height):
d_faces_dict[d_face.frame.number].append(d_face)
detected_frames.append(d_face.frame.number)
detected_frames = self.remove_duplicates(detected_frames)
return (detected_frames, d_faces_dict)
def eval_detection(self, video, frame_number, d_faces, g_faces, vstats):
if len(d_faces) == 0 and len(g_faces) == 0:
return (0, 0, 0, 0, 0)
iou_threshold = 0.5
tp_detections = 0
fp_detections = 0
fn_detections = 0
gender_matches = 0
d_dict = defaultdict(int)
g_dict = defaultdict(int)
gender_eval_list = []
for d_face in d_faces:
for g_face in g_faces:
iou = self.compute_iou(d_face.bbox, g_face.bbox, video)
if iou > iou_threshold:
if g_dict[g_face] != 0:
fp_detections += 1
else:
tp_detections += 1
#if d_face.gender == g_face.gender:
# gender_matches += 1
gender_eval_list.append((d_face.gender, g_face.gender))
g_dict[g_face] += 1
d_dict[d_face] += 1
for d_face in d_faces:
if d_dict[d_face] == 0:
fp_detections += 1
for g_face in g_faces:
if g_dict[g_face] == 0:
fn_detections += 1
# update detection stats
vstats.num_detections += len(d_faces)
vstats.tp_detections += tp_detections
vstats.fp_detections += fp_detections
vstats.fn_detections += fn_detections
if fp_detections != 0 or fn_detections != 0:
vstats.mismatched_tp_frames += 1
return (vstats, gender_eval_list)
def eval_frame_selection(self, g_frame_list, d_frame_list):
tp_frames = [x for x in g_frame_list if x in d_frame_list]
fp_frames = [x for x in d_frame_list if x not in tp_frames]
fn_frames = [x for x in g_frame_list if x not in tp_frames]
return (tp_frames, fp_frames, fn_frames)
def eval_gender(self, gender_eval_list, vstats):
num_males = 0
num_females = 0
gender_matches = 0
male_mismatches = 0
female_mismatches = 0
for (d, g) in gender_eval_list:
if d == 'M':
num_males += 1
if g != d:
male_mismatches += 1
else:
gender_matches += 1
else:
num_females += 1
if g != d:
female_mismatches += 1
else:
gender_matches += 1
#update gender stats
vstats.num_males += num_males
vstats.num_females += num_females
vstats.gender_matches += gender_matches
vstats.male_mismatches += male_mismatches
vstats.female_mismatches += female_mismatches
return vstats
def eval_video(self, video):
(ground_truth_frames, g_faces_dict) = self.fetch_ground_truth(video)
(detected_frames, d_faces_dict) = self.fetch_automatic_detections(video)
(tp_frames, fp_frames, fn_frames) = self.eval_frame_selection(ground_truth_frames, detected_frames)
vstats = VideoEvalStats(video_id=video.id, num_frames=int(video.num_frames/video.get_stride()), tp_frames = len(tp_frames), fp_frames=len(fp_frames), fn_frames=len(fn_frames))
#for frame_number in range(0, 1000, video.get_stride()):
for frame_number in tp_frames:
# evaluate detection
d_faces = d_faces_dict[frame_number]
g_faces = g_faces_dict[frame_number]
(vstats, gender_eval_list) = self.eval_detection(video, frame_number, d_faces, g_faces, vstats)
# evaluate gender
vstats = self.eval_gender(gender_eval_list, vstats)
return vstats
def eval_videos(self, start_video_id, end_video_id):
vtotal_stats = VideoEvalStats(video_id=0)
for video_id in range(start_video_id, end_video_id):
video = Video.objects.filter(id=video_id).get()
vstats = self.eval_video(video)
print(vstats)
vtotal_stats = vtotal_stats + vstats
print(vtotal_stats)
def infer_videos(self, start_video_id, end_video_id):
vtotal_stats = VideoStats(video_id=0)
for video_id in range(start_video_id, end_video_id):
video = Video.objects.filter(id=video_id).get()
(detected_frames, d_faces_dict) = self.fetch_automatic_detections(video)
vstats = VideoStats(video_id=video.id, num_frames=int(video.num_frames/video.get_stride()), selected_frames=len(detected_frames))
#for frame_number in range(0, 1000, video.get_stride()):
for frame_number in detected_frames:
# evaluate detection
d_faces = d_faces_dict[frame_number]
for d_face in d_faces:
vstats.num_detections += 1
if d_face.gender == 'M':
vstats.num_males += 1
else:
vstats.num_females += 1
print(vstats)
vtotal_stats = vtotal_stats + vstats
print(vtotal_stats)
def handle(self, *args, **options):
start_video_id = 1
end_video_id = 61
#with open(options['path']) as f:
# paths = [s.strip() for s in f.readlines()]
command = options['command']
if command == "eval":
self.eval_videos(start_video_id, end_video_id) # compare with labeled data
elif command == "infer":
self.infer_videos(start_video_id, end_video_id) # no labeled data (just infer)
else:
print("Error: eval or run")
|
MattPerron/esper
|
esper/query/management/commands/score.py
|
Python
|
apache-2.0
| 14,310
|
resource_id = "celery-1"
_install_script = """
[ { "id": "celery-1",
"key": {"name": "Celery", "version": "2.3"},
"config_port": {
"password": "engage_129",
"username": "engage_celery",
"vhost": "engage_celery_vhost"
},
"input_ports": {
"broker": {
"BROKER_HOST": "${hostname}",
"BROKER_PORT": "5672",
"broker": "rabbitmqctl"
},
"host": {
"cpu_arch": "x86_64",
"genforma_home": "${deployment_home}",
"hostname": "${hostname}",
"log_directory": "${deployment_home}/log",
"os_type": "mac-osx",
"os_user_name": "${username}",
"private_ip": null,
"sudo_password": "GenForma/${username}/sudo_password"
},
"pip": {
"pipbin": "${deployment_home}/python/bin/pip"
},
"python": {
"PYTHONPATH": "${deployment_home}/python/lib/python2.7/site-packages/",
"home": "${deployment_home}/python/bin/python",
"python_bin_dir": "${deployment_home}/python/bin",
"type": "python",
"version": "2.7"
},
"setuptools": {
"easy_install": "${deployment_home}/python/bin/easy_install"
}
},
"output_ports": {
"celery": {
"broker": "rabbitmqctl",
"password": "engage_129",
"username": "engage_celery",
"vhost": "engage_celery_vhost"
}
},
"inside": {
"id": "${hostname}",
"key": {"name": "mac-osx", "version": "10.6"},
"port_mapping": {
"host": "host"
}
},
"environment": [
{
"id": "rabbitmq-1",
"key": {"name": "rabbitmq", "version": "2.4"},
"port_mapping": {
"broker": "broker"
}
},
{
"id": "python-1",
"key": {"name": "python", "version": "2.7"},
"port_mapping": {
"python": "python"
}
},
{
"id": "__GF_inst_2",
"key": {"name": "pip", "version": "any"},
"port_mapping": {
"pip": "pip"
}
},
{
"id": "setuptools-1",
"key": {"name": "setuptools", "version": "0.6"},
"port_mapping": {
"setuptools": "setuptools"
}
}
]
}
]
"""
def get_install_script():
return _install_script
def get_password_data():
return {}
|
quaddra/engage
|
python_pkg/engage/drivers/genforma/drivertest_celery.py
|
Python
|
apache-2.0
| 2,355
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""BigQuery API IAM policy definitions
For all allowed roles and permissions, see:
https://cloud.google.com/bigquery/docs/access-control
"""
# BigQuery-specific IAM roles available for tables and views
BIGQUERY_DATA_EDITOR_ROLE = "roles/bigquery.dataEditor"
"""When applied to a table or view, this role provides permissions to
read and update data and metadata for the table or view."""
BIGQUERY_DATA_OWNER_ROLE = "roles/bigquery.dataOwner"
"""When applied to a table or view, this role provides permissions to
read and update data and metadata for the table or view, share the
table/view, and delete the table/view."""
BIGQUERY_DATA_VIEWER_ROLE = "roles/bigquery.dataViewer"
"""When applied to a table or view, this role provides permissions to
read data and metadata from the table or view."""
BIGQUERY_METADATA_VIEWER_ROLE = "roles/bigquery.metadataViewer"
"""When applied to a table or view, this role provides persmissions to
read metadata from the table or view."""
|
googleapis/python-bigquery
|
google/cloud/bigquery/iam.py
|
Python
|
apache-2.0
| 1,554
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for consumer_tracking_pipeline_visitor."""
# pytype: skip-file
from __future__ import absolute_import
import logging
import unittest
import apache_beam as beam
from apache_beam import pvalue
from apache_beam.pipeline import Pipeline
from apache_beam.pvalue import AsList
from apache_beam.runners.direct import DirectRunner
from apache_beam.runners.direct.consumer_tracking_pipeline_visitor import ConsumerTrackingPipelineVisitor
from apache_beam.transforms import CoGroupByKey
from apache_beam.transforms import Create
from apache_beam.transforms import DoFn
from apache_beam.transforms import FlatMap
from apache_beam.transforms import Flatten
from apache_beam.transforms import ParDo
# Disable frequent lint warning due to pipe operator for chaining transforms.
# pylint: disable=expression-not-assigned
# pylint: disable=pointless-statement
class ConsumerTrackingPipelineVisitorTest(unittest.TestCase):
def setUp(self):
self.pipeline = Pipeline(DirectRunner())
self.visitor = ConsumerTrackingPipelineVisitor()
try: # Python 2
self.assertCountEqual = self.assertItemsEqual
except AttributeError: # Python 3
pass
def test_root_transforms(self):
root_read = beam.Impulse()
root_flatten = Flatten(pipeline=self.pipeline)
pbegin = pvalue.PBegin(self.pipeline)
pcoll_read = pbegin | 'read' >> root_read
pcoll_read | FlatMap(lambda x: x)
[] | 'flatten' >> root_flatten
self.pipeline.visit(self.visitor)
root_transforms = [t.transform for t in self.visitor.root_transforms]
self.assertCountEqual(root_transforms, [root_read, root_flatten])
pbegin_consumers = [
c.transform for c in self.visitor.value_to_consumers[pbegin]
]
self.assertCountEqual(pbegin_consumers, [root_read])
self.assertEqual(len(self.visitor.step_names), 3)
def test_side_inputs(self):
class SplitNumbersFn(DoFn):
def process(self, element):
if element < 0:
yield pvalue.TaggedOutput('tag_negative', element)
else:
yield element
class ProcessNumbersFn(DoFn):
def process(self, element, negatives):
yield element
def _process_numbers(pcoll, negatives):
first_output = (
pcoll
| 'process numbers step 1' >> ParDo(ProcessNumbersFn(), negatives))
second_output = (
first_output
| 'process numbers step 2' >> ParDo(ProcessNumbersFn(), negatives))
output_pc = ((first_output, second_output)
| 'flatten results' >> beam.Flatten())
return output_pc
root_read = beam.Impulse()
result = (
self.pipeline
| 'read' >> root_read
| ParDo(SplitNumbersFn()).with_outputs('tag_negative', main='positive'))
positive, negative = result
_process_numbers(positive, AsList(negative))
self.pipeline.visit(self.visitor)
root_transforms = [t.transform for t in self.visitor.root_transforms]
self.assertEqual(root_transforms, [root_read])
self.assertEqual(len(self.visitor.step_names), 5)
self.assertEqual(len(self.visitor.views), 1)
self.assertTrue(isinstance(self.visitor.views[0], pvalue.AsList))
def test_co_group_by_key(self):
emails = self.pipeline | 'email' >> Create([('joe', 'joe@example.com')])
phones = self.pipeline | 'phone' >> Create([('mary', '111-222-3333')])
{'emails': emails, 'phones': phones} | CoGroupByKey()
self.pipeline.visit(self.visitor)
root_transforms = [t.transform for t in self.visitor.root_transforms]
self.assertEqual(len(root_transforms), 2)
self.assertGreater(
len(self.visitor.step_names), 3) # 2 creates + expanded CoGBK
self.assertEqual(len(self.visitor.views), 0)
def test_visitor_not_sorted(self):
p = Pipeline()
# pylint: disable=expression-not-assigned
from apache_beam.testing.test_stream import TestStream
p | TestStream().add_elements(['']) | beam.Map(lambda _: _)
original_graph = p.to_runner_api(return_context=False)
out_of_order_graph = p.to_runner_api(return_context=False)
root_id = out_of_order_graph.root_transform_ids[0]
root = out_of_order_graph.components.transforms[root_id]
tmp = root.subtransforms[0]
root.subtransforms[0] = root.subtransforms[1]
root.subtransforms[1] = tmp
p = beam.Pipeline().from_runner_api(
out_of_order_graph, runner='BundleBasedDirectRunner', options=None)
v_out_of_order = ConsumerTrackingPipelineVisitor()
p.visit(v_out_of_order)
p = beam.Pipeline().from_runner_api(
original_graph, runner='BundleBasedDirectRunner', options=None)
v_original = ConsumerTrackingPipelineVisitor()
p.visit(v_original)
# Convert to string to assert they are equal.
out_of_order_labels = {
str(k): [str(t) for t in v_out_of_order.value_to_consumers[k]]
for k in v_out_of_order.value_to_consumers
}
original_labels = {
str(k): [str(t) for t in v_original.value_to_consumers[k]]
for k in v_original.value_to_consumers
}
self.assertDictEqual(out_of_order_labels, original_labels)
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
|
iemejia/incubator-beam
|
sdks/python/apache_beam/runners/direct/consumer_tracking_pipeline_visitor_test.py
|
Python
|
apache-2.0
| 5,990
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for BatchCreateEntities
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-dialogflow
# [START dialogflow_generated_dialogflow_v2_EntityTypes_BatchCreateEntities_async]
from google.cloud import dialogflow_v2
async def sample_batch_create_entities():
# Create a client
client = dialogflow_v2.EntityTypesAsyncClient()
# Initialize request argument(s)
entities = dialogflow_v2.Entity()
entities.value = "value_value"
entities.synonyms = ['synonyms_value_1', 'synonyms_value_2']
request = dialogflow_v2.BatchCreateEntitiesRequest(
parent="parent_value",
entities=entities,
)
# Make the request
operation = client.batch_create_entities(request=request)
print("Waiting for operation to complete...")
response = await operation.result()
# Handle the response
print(response)
# [END dialogflow_generated_dialogflow_v2_EntityTypes_BatchCreateEntities_async]
|
googleapis/python-dialogflow
|
samples/generated_samples/dialogflow_generated_dialogflow_v2_entity_types_batch_create_entities_async.py
|
Python
|
apache-2.0
| 1,788
|
from . elasticfactor import ElasticFactor
from ... environment import cfg
from elasticsearch import Elasticsearch
def run(node):
id_a, id_b = node.get('id_a', '63166071_1'), node.get('id_b', '63166071_2')
es = Elasticsearch()
data_a = es.get(index="factor_state2016", doc_type='factor_network', id=id_a)
data_b = es.get(index="factor_state2016", doc_type='factor_network', id=id_b)
constructor = ElasticFactor(cfg["cdr_elastic_search"]["hosts"] + cfg["cdr_elastic_search"]["index"])
merged = constructor.merge(data_a["_source"], data_b["_source"])
return merged
|
qadium-memex/linkalytics
|
linkalytics/factor/constructor/merge.py
|
Python
|
apache-2.0
| 603
|
import unittest
import copy
import gc
import rpy2.rinterface as rinterface
rinterface.initr()
class SexpTestCase(unittest.TestCase):
def testNew_invalid(self):
x = "a"
self.assertRaises(ValueError, rinterface.Sexp, x)
def testNew(self):
sexp = rinterface.baseenv.get("letters")
sexp_new = rinterface.Sexp(sexp)
idem = rinterface.baseenv.get("identical")
self.assertTrue(idem(sexp, sexp_new)[0])
sexp_new2 = rinterface.Sexp(sexp)
self.assertTrue(idem(sexp, sexp_new2)[0])
del(sexp)
self.assertTrue(idem(sexp_new, sexp_new2)[0])
def testTypeof_get(self):
sexp = rinterface.baseenv.get("letters")
self.assertEquals(sexp.typeof, rinterface.STRSXP)
sexp = rinterface.baseenv.get("pi")
self.assertEquals(sexp.typeof, rinterface.REALSXP)
sexp = rinterface.baseenv.get("plot")
self.assertEquals(sexp.typeof, rinterface.CLOSXP)
def testDo_slot(self):
data_func = rinterface.baseenv.get("data")
data_func(rinterface.SexpVector(["iris", ], rinterface.STRSXP))
sexp = rinterface.globalenv.get("iris")
names = sexp.do_slot("names")
iris_names = ("Sepal.Length", "Sepal.Width", "Petal.Length", "Petal.Width", "Species")
self.assertEquals(len(iris_names), len(names))
for i, n in enumerate(iris_names):
self.assertEquals(iris_names[i], names[i])
self.assertRaises(LookupError, sexp.do_slot, "foo")
def testDo_slot_assign(self):
data_func = rinterface.baseenv.get("data")
data_func(rinterface.SexpVector(["iris", ], rinterface.STRSXP))
sexp = rinterface.globalenv.get("iris")
iris_names = rinterface.StrSexpVector(['a', 'b', 'c', 'd', 'e'])
sexp.do_slot_assign("names", iris_names)
names = [x for x in sexp.do_slot("names")]
self.assertEquals(['a', 'b', 'c', 'd', 'e'], names)
def testDo_slot_assign_create(self):
#test that assigning slots is also creating the slot
x = rinterface.IntSexpVector([1,2,3])
x.do_slot_assign("foo", rinterface.StrSexpVector(["bar", ]))
slot = x.do_slot("foo")
self.assertEquals(1, len(slot))
self.assertEquals("bar", slot[0])
def testSexp_rsame_true(self):
sexp_a = rinterface.baseenv.get("letters")
sexp_b = rinterface.baseenv.get("letters")
self.assertTrue(sexp_a.rsame(sexp_b))
def testSexp_rsame_false(self):
sexp_a = rinterface.baseenv.get("letters")
sexp_b = rinterface.baseenv.get("pi")
self.assertFalse(sexp_a.rsame(sexp_b))
def testSexp_rsame_wrongType(self):
sexp_a = rinterface.baseenv.get("letters")
self.assertRaises(ValueError, sexp_a.rsame, 'foo')
def testSexp_sexp(self):
sexp = rinterface.IntSexpVector([1,2,3])
cobj = sexp.__sexp__
sexp = rinterface.IntSexpVector([4,5,6,7])
self.assertEquals(4, len(sexp))
sexp.__sexp__ = cobj
self.assertEquals(3, len(sexp))
def testSexp_sexp_wrongtypeof(self):
sexp = rinterface.IntSexpVector([1,2,3])
cobj = sexp.__sexp__
sexp = rinterface.StrSexpVector(['a', 'b'])
self.assertEquals(2, len(sexp))
self.assertRaises(ValueError, sexp.__setattr__, '__sexp__', cobj)
def testSexp_sexp_destroyCobj(self):
sexp = rinterface.IntSexpVector([1,2,3])
cobj = sexp.__sexp__
del(cobj)
gc.collect()
# no real test, just make sure that it does
# not cause a segfault
def testSexp_deepcopy(self):
sexp = rinterface.IntSexpVector([1,2,3])
self.assertEquals(0, sexp.named)
rinterface.baseenv.get("identity")(sexp)
self.assertEquals(2, sexp.named)
sexp2 = sexp.__deepcopy__()
self.assertEquals(sexp.typeof, sexp2.typeof)
self.assertEquals(list(sexp), list(sexp2))
self.assertFalse(sexp.rsame(sexp2))
self.assertEquals(0, sexp2.named)
# should be the same as above, but just in case:
sexp3 = copy.deepcopy(sexp)
self.assertEquals(sexp.typeof, sexp3.typeof)
self.assertEquals(list(sexp), list(sexp3))
self.assertFalse(sexp.rsame(sexp3))
self.assertEquals(0, sexp3.named)
def suite():
suite = unittest.TestLoader().loadTestsFromTestCase(SexpTestCase)
return suite
if __name__ == '__main__':
tr = unittest.TextTestRunner(verbosity = 2)
tr.run(suite())
|
lbouma/Cyclopath
|
pyserver/bin/rpy2/rinterface/tests/test_Sexp.py
|
Python
|
apache-2.0
| 4,549
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-08-17 00:40
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('turnos', '0004_auto_20160519_0134'),
]
operations = [
migrations.RenameField('Turno', 'asistio', 'no_asistio'),
migrations.RenameField('Turno', 'aviso', 'no_aviso')
]
|
mava-ar/sgk
|
src/turnos/migrations/0005_auto_20160816_2140.py
|
Python
|
apache-2.0
| 415
|
__author__ = 'LiGe'
#encoding:utf-8
import networkx as nx
import matplotlib.pyplot as plot
from file_to_graph import file_to_mat
def build_graph(mat):
G=nx.DiGraph()#创建空图
for i in range(0,mat.shape[0]):
G.add_node(i)#创造节点
for i in range(0,mat.shape[0]):
for j in range(0,mat.shape[1]):
if mat[i,j]==1:
G.add_edge(i,j)#加一条有向边
#print nx.in_degree(G,0)
#print nx.out_degree(G)
#print nx.degree(G)
print nx.clustering(G.to_undirected())
print G.in_degree(1)
#nx.convert_to_undirected(G)
#nx.convert_to_undirected()
print nx.betweenness_centrality(G)
print nx.closeness_centrality(G)
#print nx.diameter(G)
print nx.average_shortest_path_length(G)
# print nx.average_clustering(G)
sub_graph= nx.strongly_connected_component_subgraphs(G)
for line in sub_graph:
print nx.degree(line)
#pos =nx.circular_layout(G)
#plot.title('the orginal graph with pos')
#nx.draw(G,pos,with_label=True,node_size=300)
#plot.show()
nx.draw(line, with_label=True)
plot.show()
if __name__=='__main__':
file='benapi_renew/mmc.exe.txt'
mat=file_to_mat(file)
build_graph(mat)
|
yanshengli/DBN_Learning
|
基于复杂语言网络的文本二分类/select_feature.py
|
Python
|
apache-2.0
| 1,297
|
import os
import re
import cmd
import sys
import time
import util
host = sys.argv[1]
cmd.run ("virsh shutdown %s"%(host))
while util.vm_is_running(host):
time.sleep(1)
|
alobbs/qvm
|
qvm/qvm-stop.py
|
Python
|
apache-2.0
| 171
|
#!/usr/bin/env python
# Copyright (c) 2016 Lyft Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
setuptools.setup(
setup_requires=['pbr'],
pbr=True)
|
lyft/bandit-high-entropy-string
|
setup.py
|
Python
|
apache-2.0
| 680
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Method Manager
Provide the end user interface for method (geophysical) dependent
modelling and inversion as well as data and model visualization.
"""
import numpy as np
import pygimli as pg
from pygimli.utils import prettyFloat as pf
def fit(funct, data, err=None, **kwargs):
"""Generic function fitter.
Fit data to a given function.
TODO
----
* Dictionary support for funct to submit user data..
Parameters
----------
funct: callable
Function with the first argmument as data space, e.g., x, t, f, Nr. ..
Any following arguments are the parameters to be fit.
Except if a verbose flag if used.
data: iterable (float)
Data values
err: iterable (float) [None]
Data error values in %/100. Default is 1% if None are given.
Other Parameters
----------------
*dataSpace*: iterable
Keyword argument of the data space of len(data).
The name need to fit the first argument of funct.
Returns
-------
model: array
Fitted model parameter.
response: array
Model response.
Example
-------
>>> import pygimli as pg
>>>
>>> func = lambda t, a, b: a*np.exp(b*t)
>>> t = np.linspace(1, 2, 20)
>>> data = func(t, 1.1, 2.2)
>>> model, response = pg.frameworks.fit(func, data, t=t)
>>> print(pg.core.round(model, 1e-5))
2 [1.1, 2.2]
>>> _ = pg.plt.plot(t, data, 'o', label='data')
>>> _ = pg.plt.plot(t, response, label='response')
>>> _ = pg.plt.legend()
"""
mgr = ParameterInversionManager(funct, **kwargs)
model = mgr.invert(data, err, **kwargs)
return model, mgr.fw.response
# TG: harmonicFit does not really belong here as it is no curve fit
# We should rather use a class Decomposition
# Discuss .. rename to Framework or InversionFramework since he only manages
# the union of Inversion/Modelling and RegionManager(later)
class MethodManager(object):
"""General manager to maintenance a measurement method.
Method Manager are the interface to end-user interaction and can be seen as
simple but complete application classes which manage all tasks of
geophysical data processing.
The method manager holds one instance of a forward operator and an
appropriate inversion framework to handle modelling and data inversion.
Method Manager also helps with data import and export,
handle measurement data error estimation as well as model and data
visualization.
Attributes
----------
verbose : bool
Give verbose output.
debug : bool
Give debug output.
fop : :py:mod:`pygimli.frameworks.Modelling`
Forward Operator instance .. knows the physics.
fop is initialized by
:py:mod:`pygimli.manager.MethodManager.initForwardOperator`
and calls a valid
:py:mod:`pygimli.manager.MethodManager.createForwardOperator` method
in any derived classes.
inv : :py:mod:`pygimli.frameworks.Inversion`.
Inversion framework instance .. knows the reconstruction approach.
The attribute inv is initialized by default but can be changed
overwriting
:py:mod:`pygimli.manager.MethodManager.initInversionFramework`
"""
def __init__(self, fop=None, fw=None, data=None, **kwargs):
"""Constructor."""
self._fop = fop
self._fw = fw
# we hold our own copy of the data
self._verbose = kwargs.pop('verbose', False)
self._debug = kwargs.pop('debug', False)
self.data = None
if data is not None:
if isinstance(data, str):
self.load(data)
else:
self.data = data
# The inversion framework
self._initInversionFramework(verbose=self._verbose,
debug=self._debug)
# The forward operator is stored in self._fw
self._initForwardOperator(verbose=self._verbose, **kwargs)
# maybe obsolete
self.figs = {}
self.errIsAbsolute = False
def __hash__(self):
"""Create a hash for Method Manager."""
return pg.utils.strHash(str(type(self))) ^ hash(self.fop)
@property
def verbose(self):
return self._verbose
@verbose.setter
def verbose(self, v):
self._verbose = v
self.fw.verbose = self._verbose
@property
def debug(self):
return self._debug
@debug.setter
def debug(self, v):
self._debug = v
self.fw.debug = self._debug
@property
def fw(self):
return self._fw
@property
def fop(self):
return self.fw.fop
@property
def inv(self):
return self.fw
@property
def model(self):
return self.fw.model
def reinitForwardOperator(self, **kwargs):
"""Reinitialize the forward operator.
Sometimes it can be useful to reinitialize the forward operator.
Keyword arguments will be forwarded to 'self.createForwardOperator'.
"""
self._initForwardOperator(**kwargs)
def _initForwardOperator(self, **kwargs):
"""Initialize or re-initialize the forward operator.
Called once in the constructor to force the manager to create the
necessary forward operator member. Can be recalled if you need to
changed the mangers own forward operator object. If you want an own
instance of a valid FOP call createForwardOperator.
"""
if self._fop is not None:
fop = self._fop
else:
fop = self.createForwardOperator(**kwargs)
if fop is None:
pg.critical("It seems that createForwardOperator method "
"does not return a valid forward operator.")
if self.fw is not None:
self.fw.reset()
self.fw.setForwardOperator(fop)
else:
pg.critical("No inversion framework defined.")
def createForwardOperator(self, **kwargs):
"""Mandatory interface for derived classes.
Here you need to specify which kind of forward operator FOP
you want to use.
This is called by any initForwardOperator() call.
Parameters
----------
**kwargs
Any arguments that are necessary for your FOP creation.
Returns
-------
Modelling
Instance of any kind of :py:mod:`pygimli.framework.Modelling`.
"""
pg.critical("No forward operator defined, either give one or "
"overwrite in derived class")
def _initInversionFramework(self, **kwargs):
"""Initialize or re-initialize the inversion framework.
Called once in the constructor to force the manager to create the
necessary Framework instance.
"""
self._fw = self.createInversionFramework(**kwargs)
if self.fw is None:
pg.critical("createInversionFramework does not return "
"valid inversion framework.")
def createInversionFramework(self, **kwargs):
"""Create default Inversion framework.
Derived classes may overwrite this method.
Parameters
----------
**kwargs
Any arguments that are necessary for your creation.
Returns
-------
Inversion
Instance of any kind of :py:mod:`pygimli.framework.Inversion`.
"""
if self._fw is None:
return pg.frameworks.Inversion(**kwargs)
else:
return self._fw
def load(self, fileName):
"""API, overwrite in derived classes."""
pg.critical('API, overwrite in derived classes', fileName)
def estimateError(self, data, errLevel=0.01, absError=None):
# TODO check, rel or abs in return.
"""Estimate data error.
Create an error of estimated measurement error.
On default it returns an array of constant relative errors.
More sophisticated error estimation should be done
in specialized derived classes.
Parameters
----------
data : iterable
Data values for which the errors should be estimated.
errLevel : float (0.01)
Error level in percent/100 (i.e., 3% = 0.03).
absError : float (None)
Absolute error in the unit of the data.
Returns
-------
err : array
Returning array of size len(data)
"""
if absError is not None:
return absError + data * errLevel
return np.ones(len(data)) * errLevel
def simulate(self, model, **kwargs):
# """Run a simulation aka the forward task."""
ra = self.fop.response(par=model)
noiseLevel = kwargs.pop('noiseLevel', 0.0)
if noiseLevel > 0:
err = self.estimateError(ra, errLevel=noiseLevel)
ra *= 1. + pg.randn(ra.size(), seed=kwargs.pop('seed', None)) * err
return ra, err
return ra
def setData(self, data):
"""Set a data and distribute it to the forward operator"""
self.data = data
self.applyData(data)
def applyData(self, data):
""" """
self.fop.data = data
def checkData(self, data):
"""Overwrite for special checks to return data values"""
# if self._dataToken == 'nan':
# pg.critical('self._dataToken nan, should be set in class', self)
# return data(self._dataToken)
return data
def _ensureData(self, data):
"""Check data validity"""
if data is None:
data = self.fw.dataVals
vals = self.checkData(data)
if vals is None:
pg.critical("There are no data values.")
if abs(min(vals)) < 1e-12:
print(min(vals), max(vals))
pg.critical("There are zero data values.")
return vals
def checkError(self, err, dataVals=None):
"""Return relative error. Default we assume 'err' are relative values.
Overwrite is derived class if needed. """
if isinstance(err, pg.DataContainer):
if not err.haveData('err'):
pg.error('Datacontainer have no "err" values. '
'Fallback set to 0.01')
return err['err']
return err
def _ensureError(self, err, dataVals=None):
"""Check error validity"""
if err is None:
err = self.fw.errorVals
vals = self.checkError(err, dataVals)
if vals is None:
pg.warn('No data error given, set Fallback set to 1%')
vals = np.ones(len(dataVals)) * 0.01
try:
if min(vals) <= 0:
pg.critical("All error values need to be larger then 0. Either"
" give and err argument or fill dataContainer "
" with a valid 'err' ", min(vals), max(vals))
except ValueError:
pg.critical("Can't estimate data error")
return vals
def preRun(self, *args, **kwargs):
"""Called just before the inversion run starts."""
pass
def postRun(self, *args, **kwargs):
"""Called just after the inversion run."""
pass
def invert(self, data=None, err=None, **kwargs):
"""Invert the data.
Invert the data by calling self.inv.run() with mandatory data and
error values.
TODO
*need dataVals mandatory? what about already loaded data
Parameters
----------
dataVals : iterable
Data values to be inverted.
errVals : iterable | float
Error value for the given data.
If errVals is float we assume this means to be a global relative
error and force self.estimateError to be called.
"""
if data is not None:
self.data = data
else:
data = self.data
dataVals = self._ensureData(data)
errVals = self._ensureError(err, dataVals)
self.preRun(**kwargs)
self.fw.run(dataVals, errVals, **kwargs)
self.postRun(**kwargs)
return self.fw.model
def showModel(self, model, ax=None, **kwargs):
"""Show a model.
Draw model into a given axes or show inversion result from last run.
Forwards on default to the self.fop.drawModel function
of the modelling operator.
If there is no function given, you have to override this method.
Parameters
----------
ax : mpl axes
Axes object to draw into. Create a new if its not given.
model : iterable
Model data to be draw.
Returns
-------
ax, cbar
"""
if ax is None:
fig, ax = pg.plt.subplots()
ax, cBar = self.fop.drawModel(ax, model, **kwargs)
return ax, cBar
def showData(self, data=None, ax=None, **kwargs):
"""Show the data.
Draw data values into a given axes or show the data values from
the last run.
Forwards on default to the self.fop.drawData function
of the modelling operator.
If there is no given function given, you have to override this method.
Parameters
----------
ax : mpl axes
Axes object to draw into. Create a new if its not given.
data : iterable | pg.DataContainer
Data values to be draw.
Returns
-------
ax, cbar
"""
if ax is None:
fig, ax = pg.plt.subplots()
if data is None:
data = self.data
return self.fop.drawData(ax, data, **kwargs), None
def showResult(self, model=None, ax=None, **kwargs):
"""Show the last inversion result.
TODO
----
DRY: decide showModel or showResult
Parameters
----------
ax : mpl axes
Axes object to draw into. Create a new if its not given.
model : iterable [None]
Model values to be draw. Default is self.model from the last run
Returns
-------
ax, cbar
"""
if model is None:
model = self.model
return self.showModel(model, ax=ax, **kwargs)
def showFit(self, ax=None, **kwargs):
"""Show the last inversion data and response."""
ax, cBar = self.showData(data=self.inv.dataVals,
error=self.inv.errorVals,
label='Data',
ax=ax, **kwargs)
ax, cBar = self.showData(data=self.inv.response,
label='Response',
ax=ax, **kwargs)
if not kwargs.pop('hideFittingAnnotation', False):
fittext = r"rrms: {0}, $\chi^2$: {1}".format(
pf(self.fw.inv.relrms()), pf(self.fw.inv.chi2()))
ax.text(0.99, 0.005, fittext,
transform=ax.transAxes,
horizontalalignment='right',
verticalalignment='bottom',
fontsize=8)
if not kwargs.pop('hideLegend', False):
ax.legend()
return ax, cBar
def showResultAndFit(self, **kwargs):
"""Calls showResults and showFit."""
fig = pg.plt.figure()
ax = fig.add_subplot(1, 2, 1)
self.showResult(ax=ax, model=self.model, **kwargs)
ax1 = fig.add_subplot(2, 2, 2)
ax2 = fig.add_subplot(2, 2, 4)
self.showFit(axs=[ax1, ax2], **kwargs)
fig.tight_layout()
return fig
@staticmethod
def createArgParser(dataSuffix='dat'):
"""Create default argument parser.
TODO move this to some kind of app class
Create default argument parser for the following options:
-Q, --quiet
-R, --robustData: options.robustData
-B, --blockyModel: options.blockyModel
-l, --lambda: options.lam
-i, --maxIter: options.maxIter
--depth: options.depth
"""
import argparse
parser = argparse.ArgumentParser(
description="usage: %prog [options] *." + dataSuffix)
parser.add_argument("-Q", "--quiet", dest="quiet",
action="store_true", default=False,
help="Be verbose.")
# parser.add_argument("-R", "--robustData", dest="robustData",
# action="store_true", default=False,
# help="Robust data (L1 norm) minimization.")
# parser.add_argument("-B", "--blockyModel", dest="blockyModel",
# action="store_true", default=False,
# help="Blocky model (L1 norm) regularization.")
parser.add_argument('-l', "--lambda", dest="lam", type=float,
default=100,
help="Regularization strength.")
parser.add_argument('-i', "--maxIter", dest="maxIter", type=int,
default=20,
help="Maximum iteration count.")
# parser.add_argument("--depth", dest="depth", type=float,
# default=None,
# help="Depth of inversion domain. [None=auto].")
parser.add_argument('dataFileName')
return parser
class ParameterInversionManager(MethodManager):
"""Framework to invert unconstrained parameters."""
def __init__(self, funct=None, fop=None, **kwargs):
"""Constructor."""
if fop is not None:
if not isinstance(fop, pg.frameworks.ParameterModelling):
pg.critical("We need a fop if type ",
pg.frameworks.ParameterModelling)
elif funct is not None:
fop = pg.frameworks.ParameterModelling(funct)
else:
pg.critical("you should either give a valid fop or a function so "
"I can create the fop for you")
super(ParameterInversionManager, self).__init__(fop, **kwargs)
def createInversionFramework(self, **kwargs):
"""
"""
return pg.frameworks.MarquardtInversion(**kwargs)
def invert(self, data=None, err=None, **kwargs):
"""
Parameters
----------
limits: {str: [min, max]}
Set limits for parameter by parameter name.
startModel: {str: startModel}
Set the start value for parameter by parameter name.
"""
dataSpace = kwargs.pop(self.fop.dataSpaceName, None)
if dataSpace is not None:
self.fop.dataSpace = dataSpace
limits = kwargs.pop('limits', {})
for k, v in limits.items():
self.fop.setRegionProperties(k, limits=v)
startModel = kwargs.pop('startModel', {})
if isinstance(startModel, dict):
for k, v in startModel.items():
self.fop.setRegionProperties(k, startModel=v)
else:
kwargs['startModel'] = startModel
return super(ParameterInversionManager, self).invert(data=data,
err=err,
**kwargs)
class MethodManager1d(MethodManager):
"""Method Manager base class for managers on a 1d discretization."""
def __init__(self, fop=None, **kwargs):
"""Constructor."""
super(MethodManager1d, self).__init__(fop, **kwargs)
def createInversionFramework(self, **kwargs):
"""
"""
return pg.frameworks.Block1DInversion(**kwargs)
def invert(self, data=None, err=None, **kwargs):
""" """
return super(MethodManager1d, self).invert(data=data, err=err,
**kwargs)
class MeshMethodManager(MethodManager):
def __init__(self, **kwargs):
"""Constructor.
Attribute
---------
mesh: pg.Mesh
Copy of the main mesh to be distributed to inversion and the fop.
You can overwrite it with invert(mesh=mesh).
"""
super(MeshMethodManager, self).__init__(**kwargs)
self.mesh = None
@property
def paraDomain(self):
return self.fop.paraDomain
def paraModel(self, model=None):
"""Give the model parameter regarding the parameter mesh."""
if model is None:
model = self.fw.model
return self.fop.paraModel(model)
def createMesh(self, data=None, **kwargs):
"""API, implement in derived classes."""
pg.critical('no default mesh generation defined .. implement in '
'derived class')
def setMesh(self, mesh, **kwargs):
"""Set a mesh and distribute it to the forward operator"""
self.mesh = mesh
self.applyMesh(mesh, **kwargs)
def applyMesh(self, mesh, ignoreRegionManager=False, **kwargs):
""" """
if ignoreRegionManager:
mesh = self.fop.createRefinedFwdMesh(mesh, **kwargs)
self.fop.setMesh(mesh, ignoreRegionManager=ignoreRegionManager)
def invert(self, data=None, mesh=None, zWeight=1.0, startModel=None,
**kwargs):
"""Run the full inversion.
Parameters
----------
data : pg.DataContainer
mesh : pg.Mesh [None]
zWeight : float [1.0]
startModel : float | iterable [None]
If set to None fop.createDefaultStartModel(dataValues) is called.
Keyword Arguments
-----------------
forwarded to Inversion.run
Returns
-------
model : array
Model mapped for match the paraDomain Cell markers.
The calculated model is in self.fw.model.
"""
if data is None:
data = self.data
if data is None:
pg.critical('No data given for inversion')
self.applyData(data)
# no mesh given and there is no mesh known .. we create them
if mesh is None and self.mesh is None:
mesh = self.createMesh(data, **kwargs)
# a mesh was given or created so we forward it to the fop
if mesh is not None:
self.setMesh(mesh)
# remove unused keyword argument .. need better kwargfs
self.fop._refineP2 = kwargs.pop('refineP2', False)
dataVals = self._ensureData(self.fop.data)
errorVals = self._ensureError(self.fop.data, dataVals)
if self.fop.mesh() is None:
pg.critical('Please provide a mesh')
# inversion will call this itsself as default behaviour
# if startModel is None:
# startModel = self.fop.createStartModel(dataVals)
# pg._g('invert-dats', dataVals)
# pg._g('invert-err', errVals)
# pg._g('invert-sm', startModel)
kwargs['startModel'] = startModel
self.fop.setRegionProperties('*', zWeight=zWeight)
# Limits is no mesh related argument here or base??
limits = kwargs.pop('limits', None)
if limits is not None:
self.fop.setRegionProperties('*', limits=limits)
self.preRun(**kwargs)
self.fw.run(dataVals, errorVals, **kwargs)
self.postRun(**kwargs)
return self.paraModel(self.fw.model)
def showFit(self, axs=None, **kwargs):
"""Show data and the inversion result model response."""
orientation = 'vertical'
if axs is None:
fig, axs = pg.plt.subplots(nrows=1, ncols=2)
orientation = 'horizontal'
self.showData(data=self.inv.dataVals,
orientation=orientation,
ax=axs[0], **kwargs)
axs[0].text(0.0, 1.03, "Data",
transform=axs[0].transAxes,
horizontalalignment='left',
verticalalignment='center')
resp = None
data = None
if 'model' in kwargs:
resp = self.fop.response(kwargs['model'])
data = self._ensureData(self.fop.data)
else:
resp = self.inv.response
data = self.fw.dataVals
self.showData(data=resp,
orientation=orientation,
ax=axs[1], **kwargs)
axs[1].text(0.0, 1.03, "Response",
transform=axs[1].transAxes,
horizontalalignment='left',
verticalalignment='center')
fittext = r"rrms: {0}%, $\chi^2$: {1}".format(
pg.pf(pg.utils.rrms(data, resp)*100),
pg.pf(self.fw.chi2History[-1]))
axs[1].text(1.0, 1.03, fittext,
transform=axs[1].transAxes,
horizontalalignment='right',
verticalalignment='center')
# if not kwargs.pop('hideFittingAnnotation', False):
# axs[0].text(0.01, 1.0025, "rrms: {0}, $\chi^2$: {1}"
# .format(pg.utils.prettyFloat(self.fw.inv.relrms()),
# pg.utils.prettyFloat(self.fw.inv.chi2())),
# transform=axs[0].transAxes,
# horizontalalignment='left',
# verticalalignment='bottom')
return axs
def coverage(self):
"""Return coverage vector considering the logarithmic transformation.
"""
covTrans = pg.core.coverageDCtrans(self.fop.jacobian(),
1.0 / self.inv.response,
1.0 / self.inv.model)
nCells = self.fop.paraDomain.cellCount()
return np.log10(covTrans[:nCells] / self.fop.paraDomain.cellSizes())
def standardizedCoverage(self, threshhold=0.01):
"""Return standardized coverage vector (0|1) using thresholding.
"""
return 1.0*(abs(self.coverage()) > threshhold)
class PetroInversionManager(MeshMethodManager):
"""Class for petrophysical inversion (s. Rücker et al. 2017)."""
def __init__(self, petro, mgr=None, **kwargs):
"""Initialize instance with manager and petrophysical relation."""
petrofop = kwargs.pop('petrofop', None)
if petrofop is None:
fop = kwargs.pop('fop', None)
if fop is None and mgr is not None:
# Check! why I can't use mgr.fop
# fop = mgr.fop
fop = mgr.createForwardOperator()
self.checkData = mgr.checkData
self.checkError = mgr.checkError
if fop is not None:
if not isinstance(fop, pg.frameworks.PetroModelling):
petrofop = pg.frameworks.PetroModelling(fop, petro)
if petrofop is None:
print(mgr)
print(fop)
pg.critical('implement me')
super().__init__(fop=petrofop, **kwargs)
# Really necessary? Should a combination of petro and joint do the same
class JointPetroInversionManager(MeshMethodManager):
"""Joint inversion targeting at the same parameter through petrophysics."""
def __init__(self, petros, mgrs):
"""Initialize with lists of managers and transformations"""
self.mgrs = mgrs
self.fops = [pg.frameworks.PetroModelling(m.fop, p)
for p, m in zip(petros, mgrs)]
super().__init__(fop=pg.frameworks.JointModelling(self.fops))
# just hold a local copy
self.dataTrans = pg.trans.TransCumulative()
def checkError(self, err, data=None):
"""Collect error values."""
if len(err) != len(self.mgrs):
pg.critical("Please provide data for all managers")
vals = pg.Vector(0)
for i, mgr in enumerate(self.mgrs):
# we get the data values again or we have to split data
dataVals = mgr.checkData(self.fop._data[i])
vals = pg.cat(vals, mgr.checkError(err[i], dataVals))
return vals
def checkData(self, data):
"""Collect data values."""
if len(data) != len(self.mgrs):
pg.critical("Please provide data for all managers")
self.dataTrans.clear()
vals = pg.Vector(0)
for i, mgr in enumerate(self.mgrs):
self.dataTrans.add(mgr.inv.dataTrans, data[i].size())
vals = pg.cat(vals, mgr.checkData(data[i]))
self.inv.dataTrans = self.dataTrans
return vals
def invert(self, data, **kwargs):
"""Run inversion"""
limits = kwargs.pop('limits', [0., 1.])
self.fop.modelTrans.setLowerBound(limits[0])
self.fop.modelTrans.setUpperBound(limits[1])
kwargs['startModel'] = kwargs.pop('startModel',
(limits[1]+limits[0])/2.)
return super().invert(data, **kwargs)
|
gimli-org/gimli
|
pygimli/frameworks/methodManager.py
|
Python
|
apache-2.0
| 29,010
|
#!/usr/bin/env python
#-*- coding: UTF-8 -*-
#Ticloud web version 2.0
#author:WangRui
import ConfigParser
import logging
class ConfigManager(object):
_config_dict = None
@staticmethod
def create(filename):
parse_file = ParseIniFile(filename)
parse_file.init()
parse_file.getvalue()
parse_file.close()
ConfigManager._config_dict = parse_file.ini_dict
@staticmethod
def getvalue(arr, args):
try:
return ConfigManager._config_dict[arr][args]
except AttributeError:
logging.error("from ConfigManager._config_dict get config attribute error")
return None
class ParseIniFile(object):
"""
解析ini配置文件
"""
def __init__(self, filename):
self.filename = filename
self.cfg = None
self.read_handle = None
self.ini_dict = {}
def init(self):
self.cfg = ConfigParser.ConfigParser()
try:
with open(self.filename, "r") as self.read_handle:
self.cfg.readfp(self.read_handle)
except IOError:
logging.error("parse ini file error")
def close(self):
if self.read_handle is not None:
self.read_handle.close()
def getvalue(self):
if self.read_handle:
for sect in self.cfg.sections():
temp_dict = dict()
temp_dict["info"] = ''
for opt in self.cfg.options(sect):
temp_dict[opt] = self.cfg.get(sect, opt)
info = "\n" + opt + "=" + self.cfg.get(sect, opt)
temp_dict["info"] += info
self.ini_dict[sect] = temp_dict
def all_options(self, sect):
List = []
for opt in self.cfg.options(sect):
Dict = {}
Dict["opt"] = opt
Dict["value"] = self.cfg.get(sect, opt)
List.append(Dict)
return List
def get_value_now(self, sect, opt):
return self.cfg.get(sect, opt)
def write(self, data):
for k in self.ini_dict[data]:
if not cmp(k, "info"):
continue
self.cfg.set(data, k, self.ini_dict[data][k])
self.cfg.write(open(self.filename, "w"))
def delsection(self, name):
e = ''
self.cfg = ConfigParser.ConfigParser()
try:
self.cfg.read(self.filename)
self.cfg.remove_section(name)
self.cfg.write(open(self.filename, "w"))
except ConfigParser.ParsingError, e:
print e
return e
class ParseConfigFile(object):
def __init__(self, filename):
self.filename = filename
self.cfg = None
self.read_handle = None
self.ini_dict = {}
def init(self):
self.cfg = ConfigParser.ConfigParser()
try:
with open(self.filename, "r") as self.read_handle:
self.cfg.readfp(self.read_handle)
except IOError:
logging.error("parse ini file error")
def close(self):
if self.read_handle is not None:
self.read_handle.close()
def getvalue(self):
if self.read_handle:
for sect in self.cfg.sections():
temp_dict = dict()
temp_dict["info"] = ''
for opt in self.cfg.options(sect):
temp_dict[opt] = self.cfg.get(sect, opt)
info = "\n" + opt + "=" + self.cfg.get(sect, opt)
temp_dict["info"] += info
self.ini_dict[sect] = temp_dict
def write(self, data):
for k in self.ini_dict[data]:
if not cmp(k, "info"):
continue
self.cfg.set(data, k, self.ini_dict[data][k])
self.cfg.write(open(self.filename, "w"))
def delsection(self, name):
e = ''
self.cfg = ConfigParser.ConfigParser()
try:
self.cfg.read(self.filename)
self.cfg.remove_section(name)
self.cfg.write(open(self.filename, "w"))
except ConfigParser.ParsingError, e:
print e
return e
|
liugangabc/ccs_web
|
common/configmanager.py
|
Python
|
apache-2.0
| 4,155
|
class Error(Exception):
def __init__(self, msg):
self.msg = msg
|
mattaw/SoCFoundationFlow
|
admin/waf/waf-extensions/SFFerrors.py
|
Python
|
apache-2.0
| 77
|
###################################################################################################
#
# query_string_parser.py
# Extracts the query string from a URL and prints each parameter and value.
#
# Plugin Author: Your Name Here (ryan@obsidianforensics.com)
#
###################################################################################################
# Config
friendlyName = "Query String Parser"
description = "Extracts the query string from a URL and prints each field and value."
artifactTypes = ("url", "cache") # Artifacts that this plugin processes
remoteLookups = 0 # if this plugin will query online sources/databases
browser = "all" # browsers that the plugin applies to
version = "20170225" # version of the plugin (use the date)
parsedItems = 0 # count of items that the plugin parsed; initialized to 0
def plugin(analysis_session=None):
import urllib.parse
# Setting up our return variable
global parsedItems
parsedItems = 0
for item in analysis_session.parsed_artifacts: # For each item that Hindsight has parsed,
if item.row_type.startswith(artifactTypes): # if the row if of a supported type for this plugin, and
if item.interpretation is None: # if there isn't already an interpretation,
parsed_url = urllib.parse.urlparse(item.url)
query_string_dict = urllib.parse.parse_qs(parsed_url.query)
if len(query_string_dict) > 0: # Check if we have any field/value pairs.
query_string = '' # Create our return string; start it off empty.
for field, value in list(query_string_dict.items()): # Add each field/value to the return string
query_string += '{}: {} | '.format(field, value[0])
item.interpretation = query_string[:-2] + " [Query String Parser]"
parsedItems += 1 # Increment the count of parsed items
# Lastly, a count of parsed items with a description of what the plugin did
return "{} query strings parsed".format(parsedItems)
|
obsidianforensics/hindsight
|
pyhindsight/plugins/query_string_parser.py
|
Python
|
apache-2.0
| 2,196
|
from django.db.models import Q
from links.models import Post
from comments.models import ThreadedComment as comments
from django.utils import timezone
from datetime import datetime, timedelta
from django.contrib import messages
KARMA_LOW = 100
KARMA_MEDIUM = 1000
KARMA_HIGH = 5000
INTERVAL_LOW = 3600
INTERVAL_MEDIUM = 360
INTERVAL_HIGH = 36
COMMENT_PER_INTERVAL = 20
COMMENT_MAX = 80
def allowed_to_comment(user):
karma = user.userprofile.karma
now = timezone.now()
time_threshold = now - timedelta(seconds=3600)
comments_number = comments.objects.filter(Q(user=user) and Q(submit_date__gt=time_threshold)).count()
if karma < KARMA_HIGH:
if comments_number > COMMENT_PER_INTERVAL:
return False
else:
return True
else:
if comments_number > COMMENT_MAX:
return False
else:
return True
def allowed_to_post(request, user):
karma = user.userprofile.karma
print karma
now = timezone.now()
try:
posted = Post.objects.filter(post__submitter__exact=user).latest('submit_date')
diff = now - posted.submit_date
diff = diff.seconds
except:
diff = INTERVAL_LOW + 1
print diff
if karma < KARMA_LOW:
result = diff > INTERVAL_LOW
if not result:
messages.success(request, 'Please try in an hour!')
return result
elif karma > KARMA_LOW and karma < KARMA_HIGH:
result = diff > INTERVAL_MEDIUM
if not result:
messages.success(request, 'Please try in ten minutes!')
return result
else:
result = diff > INTERVAL_HIGH
if not result:
messages.warning(request, 'Please try in 30 sec')
return result
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[0]
else:
ip = request.META.get('REMOTE_ADDR')
return ip
|
sheshkovsky/jaryan
|
links/utils.py
|
Python
|
apache-2.0
| 1,792
|
from core.serializers import ProjectSerializer
from rest_framework import generics
from core.models import Project
class ProjectList(generics.ListCreateAPIView):
queryset = Project.objects.all()
serializer_class = ProjectSerializer
class ProjectDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Project.objects.all()
serializer_class = ProjectSerializer
|
wathsalav/xos
|
xos/core/views/projects.py
|
Python
|
apache-2.0
| 382
|
"""This contains the unit tests for treadmill.utils.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import io
import os
import shutil
import signal
import stat
import tempfile
import time
import unittest
# Disable W0402: string deprecated
# pylint: disable=W0402
import string
import mock
import six
if six.PY2 and os.name == 'posix':
import subprocess32 as subprocess # pylint: disable=import-error
else:
import subprocess # pylint: disable=wrong-import-order
from treadmill import exc
from treadmill import utils
from treadmill import yamlwrapper as yaml
class UtilsTest(unittest.TestCase):
"""This contains the treadmill.utils tests."""
def setUp(self):
self.root = tempfile.mkdtemp()
def tearDown(self):
if self.root and os.path.isdir(self.root):
shutil.rmtree(self.root)
@mock.patch('treadmill.subproc.get_aliases', mock.Mock(return_value={}))
def test_create_script(self):
"""this tests the create_script function.
the function creates executable scripts from templates that exist
in the template directory.
"""
script_file = os.path.join(self.root, 'script')
# Function we are testing
utils.create_script(
script_file,
's6.run',
user='testproid',
home='home',
shell='shell',
_alias={
's6_setuidgid': '/test/s6-setuidgid',
}
)
# Read the output from the mock filesystem
with io.open(script_file) as script:
data = script.read()
# Validate that data is what it should be
self.assertTrue(data.index(
'/test/s6-setuidgid testproid') > 0)
# Validate that the file is +x
self.assertEqual(utils.os.stat(script_file).st_mode, 33261)
@mock.patch('treadmill.subproc.get_aliases', mock.Mock(return_value={}))
def test_create_script_perms(self):
"""this tests the create_script function (permissions).
"""
script_file = os.path.join(self.root, 'script')
# Test non-default mode (+x)
mode = (stat.S_IRUSR |
stat.S_IRGRP |
stat.S_IROTH)
utils.create_script(
script_file,
's6.run',
mode=mode,
user='testproid',
home='home',
shell='shell',
_alias={
's6_setuidgid': '/test/s6-setuidgid',
}
)
self.assertEqual(utils.os.stat(script_file).st_mode, 33060)
def test_base_n(self):
"""Test to/from_base_n conversions."""
alphabet = (string.digits +
string.ascii_lowercase +
string.ascii_uppercase)
for base in [2, 10, 16, 36, 62]:
for num in [0, 10, 2313, 23134223879243284]:
n_num = utils.to_base_n(num, base=base, alphabet=alphabet)
_num = utils.from_base_n(n_num, base=base, alphabet=alphabet)
self.assertTrue(num == _num)
self.assertEqual(utils.to_base_n(15, base=16), 'f')
self.assertEqual(utils.to_base_n(10, base=2), '1010')
self.assertEqual(
utils.from_base_n('101', base=2),
int('101', base=2),
)
self.assertEqual(
utils.from_base_n('deadbeef', base=16),
int('deadbeef', base=16)
)
def test_ip2int(self):
"""Tests IP string to int representation conversion."""
self.assertEqual(0x40E9BB63, utils.ip2int('64.233.187.99'))
ip = utils.ip2int('192.168.100.1')
self.assertEqual('192.168.100.2', utils.int2ip(ip + 1))
self.assertEqual('192.168.100.0', utils.int2ip(ip - 1))
ip = utils.ip2int('192.168.100.255')
self.assertEqual('192.168.101.0', utils.int2ip(ip + 1))
ip = utils.ip2int('192.168.100.0')
self.assertEqual('192.168.99.255', utils.int2ip(ip - 1))
def test_to_obj(self):
"""Tests dict to namedtuple conversion."""
obj = utils.to_obj({'a': 1, 'b': 2, 'c': 3}, 'foo')
self.assertEqual(1, obj.a)
self.assertEqual(2, obj.b)
self.assertEqual(3, obj.c)
obj = utils.to_obj({'a': 1, 'b': [1, 2, 3], 'c': 3}, 'foo')
self.assertEqual(1, obj.a)
self.assertEqual([1, 2, 3], obj.b)
self.assertEqual(3, obj.c)
obj = utils.to_obj({'a': 1, 'b': {'d': 5}, 'c': 3}, 'foo')
self.assertEqual(1, obj.a)
self.assertEqual(5, obj.b.d)
self.assertEqual(3, obj.c)
obj = utils.to_obj({'a': [1, {'d': 5}, 3], 'b': 33}, 'foo')
self.assertEqual(1, obj.a[0])
self.assertEqual(5, obj.a[1].d)
self.assertEqual(3, obj.a[2])
self.assertEqual(33, obj.b)
def test_kilobytes(self):
"""Test memory/disk size string conversion."""
self.assertEqual(10, utils.kilobytes('10K'))
self.assertEqual(10, utils.kilobytes('10k'))
self.assertRaises(Exception, utils.kilobytes, '10')
self.assertEqual(10 * 1024, utils.kilobytes('10M'))
self.assertEqual(10 * 1024, utils.kilobytes('10m'))
self.assertEqual(10 * 1024 * 1024, utils.kilobytes('10G'))
self.assertEqual(10 * 1024 * 1024, utils.kilobytes('10g'))
def test_size_to_bytes(self):
"""Test conversion of units to bytes."""
self.assertEqual(10, utils.size_to_bytes(10))
self.assertEqual(-10, utils.size_to_bytes(-10))
self.assertEqual(10, utils.size_to_bytes('10'))
self.assertEqual(-10, utils.size_to_bytes('-10'))
self.assertEqual(10 * 1024, utils.size_to_bytes('10K'))
self.assertEqual(-10 * 1024, utils.size_to_bytes('-10K'))
self.assertEqual(-10 * 1024 * 1024, utils.size_to_bytes('-10M'))
def test_cpuunits(self):
"""Test conversion of cpu string to bmips."""
self.assertEqual(10, utils.cpu_units('10%'))
self.assertEqual(10, utils.cpu_units('10'))
def test_validate(self):
"""Tests dictionary validation."""
schema = [
('required', True, str),
('optional', False, str),
]
struct = {'required': 'foo'}
utils.validate(struct, schema)
self.assertNotIn('optional', struct)
struct = {'required': 'foo', 'optional': 'xxx'}
utils.validate(struct, schema)
struct = {'required': 'foo', 'optional': 1234}
self.assertRaises(Exception, utils.validate,
struct, schema)
schema = [
('required', True, list),
('optional', False, list),
]
struct = {'required': ['foo']}
utils.validate(struct, schema)
struct = {'required': 'foo'}
self.assertRaises(Exception, utils.validate,
struct, schema)
def test_to_seconds(self):
"""Tests time interval to seconds conversion."""
self.assertEqual(0, utils.to_seconds('0s'))
self.assertEqual(3, utils.to_seconds('3s'))
self.assertEqual(180, utils.to_seconds('3m'))
self.assertEqual(7200, utils.to_seconds('2h'))
self.assertEqual(259200, utils.to_seconds('3d'))
def test_find_in_path(self):
"""Tests finding program in system path."""
temp_dir = self.root
saved_path = os.environ['PATH']
# xxxx is not in path
self.assertEqual('xxxx', utils.find_in_path('xxxx'))
os.environ['PATH'] = os.environ['PATH'] + ':' + temp_dir
io.open(os.path.join(temp_dir, 'xxxx'), 'w').close()
# xxxx is in path, but not executable.
self.assertEqual('xxxx', utils.find_in_path('xxxx'))
os.chmod(os.path.join(temp_dir, 'xxxx'), int(utils.EXEC_MODE))
self.assertEqual(
os.path.join(temp_dir, 'xxxx'),
utils.find_in_path('xxxx')
)
os.environ['PATH'] = saved_path
def test_humanreadable(self):
"""Tests conversion of values into human readable format."""
self.assertEqual('1.0M', utils.bytes_to_readable(1024, 'K'))
self.assertEqual('1.0G', utils.bytes_to_readable(1024, 'M'))
self.assertEqual(
'2.5T',
utils.bytes_to_readable(1024 * 1024 * 2.5, 'M')
)
self.assertEqual('1.0K', utils.bytes_to_readable(1024, 'B'))
self.assertEqual('2,310', utils.cpu_to_readable(2310))
self.assertEqual('23.10', utils.cpu_to_cores_readable(2310))
def test_tail(self):
"""Tests utils.tail."""
filed, filepath = tempfile.mkstemp()
with os.fdopen(filed, 'w') as f:
for i in six.moves.range(0, 5):
f.write('%d\n' % i)
with io.open(filepath) as f:
lines = utils.tail_stream(f)
self.assertEqual(['0\n', '1\n', '2\n', '3\n', '4\n'], lines)
os.unlink(filepath)
filed, filepath = tempfile.mkstemp()
with os.fdopen(filed, 'w') as f:
for i in six.moves.range(0, 10000):
f.write('%d\n' % i)
with io.open(filepath) as f:
lines = utils.tail_stream(f, 5)
self.assertEqual(
['9995\n', '9996\n', '9997\n', '9998\n', '9999\n'],
lines
)
# Test utils.tail given the file name.
lines = utils.tail(filepath, 5)
self.assertEqual(
['9995\n', '9996\n', '9997\n', '9998\n', '9999\n'],
lines
)
os.unlink(filepath)
self.assertEqual([], utils.tail('/no/such/thing'))
@mock.patch('os.write', mock.Mock())
@mock.patch('os.close', mock.Mock())
def test_report_ready(self):
"""Tests reporting service readyness."""
cwd = os.getcwd()
tmpdir = self.root
os.chdir(tmpdir)
utils.report_ready()
self.assertFalse(os.write.called)
self.assertFalse(os.close.called)
with io.open('notification-fd', 'w') as f:
f.write('300')
utils.report_ready()
os.write.assert_called_with(300, mock.ANY)
os.close.assert_called_with(300)
os.write.reset()
os.close.reset()
with io.open('notification-fd', 'w') as f:
f.write('300\n')
utils.report_ready()
os.write.assert_called_with(300, mock.ANY)
os.close.assert_called_with(300)
os.chdir(cwd)
def test_signal_flag(self):
"""Tests signal flag."""
signalled = utils.make_signal_flag(signal.SIGHUP, signal.SIGTERM)
self.assertFalse(signalled)
os.kill(os.getpid(), signal.SIGHUP)
time.sleep(0.1)
self.assertTrue(signalled)
signalled.clear()
os.kill(os.getpid(), signal.SIGTERM)
time.sleep(0.1)
self.assertTrue(signalled)
def test_to_yaml(self):
"""Tests conversion of dict to yaml representation."""
obj = {
'xxx': u'abcd'
}
self.assertEqual(yaml.dump(obj), u'{xxx: abcd}\n')
@mock.patch('signal.signal', mock.Mock(spec_set=True))
@mock.patch('os.closerange', mock.Mock(spec_set=True))
@mock.patch('os.execvp', mock.Mock(spec_set=True))
def test_sane_execvp(self):
"""Tests sane execvp wrapper.
"""
# do not complain about accessing protected member _SIGNALS
# pylint: disable=W0212
utils.sane_execvp('/bin/sleep', ['sleep', '30'])
os.closerange.assert_called_with(3, subprocess.MAXFD)
signal.signal.assert_has_calls(
[
mock.call(i, signal.SIG_DFL)
for i in utils._SIGNALS
]
)
os.execvp.assert_called_with('/bin/sleep', ['sleep', '30'])
@mock.patch('treadmill.utils.sys_exit', mock.Mock())
def test_decorator_tm_exc(self):
"""Test the `exit_on_unhandled` decorator on `TreadmillError`."""
@utils.exit_on_unhandled
def test_fun():
"""raise exc.TreadmillError('test')."""
raise exc.TreadmillError('test')
test_fun()
utils.sys_exit.assert_called_with(-1)
@mock.patch('treadmill.utils.sys_exit', mock.Mock())
def test_decorator_py_exc(self):
"""Test the `exit_on_unhandled` decorator on Python `Exception`."""
@utils.exit_on_unhandled
def test_fun():
"""raise Exception('test')."""
raise Exception('test')
test_fun()
utils.sys_exit.assert_called_with(-1)
if __name__ == '__main__':
unittest.main()
|
captiosus/treadmill
|
tests/utils_test.py
|
Python
|
apache-2.0
| 12,668
|
'''
Created on 15.02.2015
@author: diesel
'''
import datetime
from indexdata import IndexData, IndexHistory
import indexdatabase
def _selectTrue( idxData ):
return True
class FetchData():
'''
classdocs
'''
def __init__(self, indexName):
'''
Constructor
'''
self.indexName = indexName
self.startDate = datetime.datetime(1900, 1, 1)
self.endDate = datetime.datetime.today()
self.selectFunc = _selectTrue
self.indexDB = indexdatabase.getIndexDatabase()
self.collection = self.indexDB.getIndexCollection(self.indexName)
self.selectFunc = _selectTrue
def _fetchData(self, select):
history = IndexHistory()
for entry in self.collection.find({'date': {'$gte': self.startDate, '$lt': self.endDate} }).sort('date'):
indexEntry = IndexData()
indexEntry.setDictionary(entry)
if self.selectFunc( indexEntry ):
history.addIndexData(indexEntry)
return history
'''
Get a index history by date.
'''
def fetchDataByDate(self, startDate, endDate, select=_selectTrue ):
self.startDate = startDate
self.endDate = endDate
return self._fetchData( select )
'''
Get the index history for one month
'''
def fetchDataByMonth(self, year, month, select=_selectTrue ):
self.startDate = datetime.datetime( year, month, 1)
if month == 12:
self.endDate = datetime.datetime( year + 1, 1, 1)
else:
self.endDate = datetime.datetime( year, month+1, 1)
return self._fetchData( select )
'''
Get a list of monthly index histories
'''
def fetchMonthlyHistory(self, startDate, endDate, select=_selectTrue):
def _getNextMonth(year, month):
if month == 12:
year = year + 1
month = 1
else:
month += 1
return( year, month )
def _getFirstMonth(startDate):
return( startDate.year, startDate.month )
def _isEndOfPeriod(year, month, endDate):
checkIsEndOfPeriod = (year >= endDate.year)
checkIsEndOfPeriod = checkIsEndOfPeriod and (month >= endDate.month)
return checkIsEndOfPeriod
# --- start of function ---
monthlyHistory = list()
currentPeriod = _getFirstMonth( startDate )
while not (_isEndOfPeriod(currentPeriod[0], currentPeriod[1], endDate)):
indexHistory = self.fetchDataByMonth(currentPeriod[0], currentPeriod[1], select)
if indexHistory.len() > 0:
monthlyHistory.append( indexHistory )
currentPeriod = _getNextMonth(currentPeriod[0], currentPeriod[1])
return monthlyHistory
def fetchSelectedHistory(self, startDate, endDate, startFunc, endFunc):
isInTransaction = False
meanHistoryList = list()
idxHistory = IndexHistory()
for idxData in self.collection.find({'date': {'$gte': self.startDate, '$lt': self.endDate} }).sort('date'):
if isInTransaction:
if endFunc.checkEndTransaction( idxData, idxHistory.len() ):
meanHistoryList.append( idxHistory )
isInTransaction = False
else:
idxHistory.addIndexData( idxData )
if not isInTransaction:
if startFunc.checkStartTransaction( idxData ):
isInTransaction = True
idxHistory = IndexHistory()
idxHistory.addIndexData( idxData )
endFunc.reset( idxData )
if isInTransaction:
meanHistoryList.append( idxHistory )
return meanHistoryList
def fetchHistoryValue(self, year, month, day):
searchDate = datetime.datetime( year, month, day )
startDate = searchDate
startDate = startDate + datetime.timedelta(-1)
hasEntry = False
idxEntry = IndexData()
'''
if self.collection.find_one({'date': {'$lt': searchDate} }) != None:
entry = None
while entry == None:
entry = self.collection.find_one({'date': {'$gte': startDate, '$lt': searchDate} })
if entry == None:
startDate = startDate + datetime.timedelta(-1)
idxEntry = IndexData()
idxEntry.setDictionary(entry)
return idxEntry
else:
return None
'''
for entry in self.collection.find({'date' : {'$lt': searchDate}}).sort('date', -1).limit(1):
idxEntry.setDictionary(entry)
hasEntry = True
if hasEntry:
return idxEntry
else:
return None
def fetchNextHistoryValue(self, year, month, day):
searchDate = datetime.datetime( year, month, day )
hasEntry = False
idxEntry = IndexData()
for entry in self.collection.find( {'date' : {'$gte' : searchDate}}).sort('date', 1).limit(1):
idxEntry.setDictionary(entry)
hasEntry = True
if hasEntry:
return idxEntry
else:
return None
def fetchLastDayOfMonth(self, year, month):
if month == 12:
month = 1
year = year + 1
else:
month = month+1
return self.fetchHistoryValue( year, month, 1)
if __name__ == '__main__':
start = datetime.datetime(1998, 1, 2, 0, 0);
end = datetime.datetime(1998, 2, 1, 0, 0)
fetchData = FetchData( 'dax',)
fetchData.fetchDataByDate( start, end )
|
selentd/pythontools
|
pytools/src/IndexEval/fetchdata.py
|
Python
|
apache-2.0
| 5,671
|
import sys, argparse
class MyParser(argparse.ArgumentParser):
def error(self, message):
'''Wraps error and prints in a shorter way'''
sys.stderr.write('error: %s\n' % message)
#self.print_help()
sys.exit(2)
|
viliusl/dockery
|
objects/myparser.py
|
Python
|
apache-2.0
| 246
|
# coding=utf-8
# Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from transformers import MobileBertConfig, is_torch_available
from transformers.models.auto import get_values
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device
from ..test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask
if is_torch_available():
import torch
from transformers import (
MODEL_FOR_PRETRAINING_MAPPING,
MobileBertForMaskedLM,
MobileBertForMultipleChoice,
MobileBertForNextSentencePrediction,
MobileBertForPreTraining,
MobileBertForQuestionAnswering,
MobileBertForSequenceClassification,
MobileBertForTokenClassification,
MobileBertModel,
)
class MobileBertModelTester:
def __init__(
self,
parent,
batch_size=13,
seq_length=7,
is_training=True,
use_input_mask=True,
use_token_type_ids=True,
use_labels=True,
vocab_size=99,
hidden_size=64,
embedding_size=32,
num_hidden_layers=5,
num_attention_heads=4,
intermediate_size=37,
hidden_act="gelu",
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_position_embeddings=512,
type_vocab_size=16,
type_sequence_label_size=2,
initializer_range=0.02,
num_labels=3,
num_choices=4,
scope=None,
):
self.parent = parent
self.batch_size = batch_size
self.seq_length = seq_length
self.is_training = is_training
self.use_input_mask = use_input_mask
self.use_token_type_ids = use_token_type_ids
self.use_labels = use_labels
self.vocab_size = vocab_size
self.hidden_size = hidden_size
self.embedding_size = embedding_size
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.intermediate_size = intermediate_size
self.hidden_act = hidden_act
self.hidden_dropout_prob = hidden_dropout_prob
self.attention_probs_dropout_prob = attention_probs_dropout_prob
self.max_position_embeddings = max_position_embeddings
self.type_vocab_size = type_vocab_size
self.type_sequence_label_size = type_sequence_label_size
self.initializer_range = initializer_range
self.num_labels = num_labels
self.num_choices = num_choices
self.scope = scope
def prepare_config_and_inputs(self):
input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size)
input_mask = None
if self.use_input_mask:
input_mask = random_attention_mask([self.batch_size, self.seq_length])
token_type_ids = None
if self.use_token_type_ids:
token_type_ids = ids_tensor([self.batch_size, self.seq_length], self.type_vocab_size)
sequence_labels = None
token_labels = None
choice_labels = None
if self.use_labels:
sequence_labels = ids_tensor([self.batch_size], self.type_sequence_label_size)
token_labels = ids_tensor([self.batch_size, self.seq_length], self.num_labels)
choice_labels = ids_tensor([self.batch_size], self.num_choices)
config = self.get_config()
return config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels
def get_config(self):
return MobileBertConfig(
vocab_size=self.vocab_size,
hidden_size=self.hidden_size,
num_hidden_layers=self.num_hidden_layers,
num_attention_heads=self.num_attention_heads,
intermediate_size=self.intermediate_size,
embedding_size=self.embedding_size,
hidden_act=self.hidden_act,
hidden_dropout_prob=self.hidden_dropout_prob,
attention_probs_dropout_prob=self.attention_probs_dropout_prob,
max_position_embeddings=self.max_position_embeddings,
type_vocab_size=self.type_vocab_size,
is_decoder=False,
initializer_range=self.initializer_range,
)
def create_and_check_mobilebert_model(
self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels
):
model = MobileBertModel(config=config)
model.to(torch_device)
model.eval()
result = model(input_ids, attention_mask=input_mask, token_type_ids=token_type_ids)
result = model(input_ids, token_type_ids=token_type_ids)
result = model(input_ids)
self.parent.assertEqual(result.last_hidden_state.shape, (self.batch_size, self.seq_length, self.hidden_size))
self.parent.assertEqual(result.pooler_output.shape, (self.batch_size, self.hidden_size))
def create_and_check_mobilebert_for_masked_lm(
self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels
):
model = MobileBertForMaskedLM(config=config)
model.to(torch_device)
model.eval()
result = model(input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, labels=token_labels)
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.vocab_size))
def create_and_check_mobilebert_for_next_sequence_prediction(
self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels
):
model = MobileBertForNextSentencePrediction(config=config)
model.to(torch_device)
model.eval()
result = model(
input_ids,
attention_mask=input_mask,
token_type_ids=token_type_ids,
labels=sequence_labels,
)
self.parent.assertEqual(result.logits.shape, (self.batch_size, 2))
def create_and_check_mobilebert_for_pretraining(
self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels
):
model = MobileBertForPreTraining(config=config)
model.to(torch_device)
model.eval()
result = model(
input_ids,
attention_mask=input_mask,
token_type_ids=token_type_ids,
labels=token_labels,
next_sentence_label=sequence_labels,
)
self.parent.assertEqual(result.prediction_logits.shape, (self.batch_size, self.seq_length, self.vocab_size))
self.parent.assertEqual(result.seq_relationship_logits.shape, (self.batch_size, 2))
def create_and_check_mobilebert_for_question_answering(
self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels
):
model = MobileBertForQuestionAnswering(config=config)
model.to(torch_device)
model.eval()
result = model(
input_ids,
attention_mask=input_mask,
token_type_ids=token_type_ids,
start_positions=sequence_labels,
end_positions=sequence_labels,
)
self.parent.assertEqual(result.start_logits.shape, (self.batch_size, self.seq_length))
self.parent.assertEqual(result.end_logits.shape, (self.batch_size, self.seq_length))
def create_and_check_mobilebert_for_sequence_classification(
self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels
):
config.num_labels = self.num_labels
model = MobileBertForSequenceClassification(config)
model.to(torch_device)
model.eval()
result = model(input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, labels=sequence_labels)
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.num_labels))
def create_and_check_mobilebert_for_token_classification(
self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels
):
config.num_labels = self.num_labels
model = MobileBertForTokenClassification(config=config)
model.to(torch_device)
model.eval()
result = model(input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, labels=token_labels)
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.num_labels))
def create_and_check_mobilebert_for_multiple_choice(
self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels
):
config.num_choices = self.num_choices
model = MobileBertForMultipleChoice(config=config)
model.to(torch_device)
model.eval()
multiple_choice_inputs_ids = input_ids.unsqueeze(1).expand(-1, self.num_choices, -1).contiguous()
multiple_choice_token_type_ids = token_type_ids.unsqueeze(1).expand(-1, self.num_choices, -1).contiguous()
multiple_choice_input_mask = input_mask.unsqueeze(1).expand(-1, self.num_choices, -1).contiguous()
result = model(
multiple_choice_inputs_ids,
attention_mask=multiple_choice_input_mask,
token_type_ids=multiple_choice_token_type_ids,
labels=choice_labels,
)
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.num_choices))
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
(
config,
input_ids,
token_type_ids,
input_mask,
sequence_labels,
token_labels,
choice_labels,
) = config_and_inputs
inputs_dict = {"input_ids": input_ids, "token_type_ids": token_type_ids, "attention_mask": input_mask}
return config, inputs_dict
@require_torch
class MobileBertModelTest(ModelTesterMixin, unittest.TestCase):
all_model_classes = (
(
MobileBertModel,
MobileBertForMaskedLM,
MobileBertForMultipleChoice,
MobileBertForNextSentencePrediction,
MobileBertForPreTraining,
MobileBertForQuestionAnswering,
MobileBertForSequenceClassification,
MobileBertForTokenClassification,
)
if is_torch_available()
else ()
)
fx_compatible = True
# special case for ForPreTraining model
def _prepare_for_class(self, inputs_dict, model_class, return_labels=False):
inputs_dict = super()._prepare_for_class(inputs_dict, model_class, return_labels=return_labels)
if return_labels:
if model_class in get_values(MODEL_FOR_PRETRAINING_MAPPING):
inputs_dict["labels"] = torch.zeros(
(self.model_tester.batch_size, self.model_tester.seq_length), dtype=torch.long, device=torch_device
)
inputs_dict["next_sentence_label"] = torch.zeros(
self.model_tester.batch_size, dtype=torch.long, device=torch_device
)
return inputs_dict
def setUp(self):
self.model_tester = MobileBertModelTester(self)
self.config_tester = ConfigTester(self, config_class=MobileBertConfig, hidden_size=37)
def test_config(self):
self.config_tester.run_common_tests()
def test_mobilebert_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_mobilebert_model(*config_and_inputs)
def test_for_masked_lm(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_mobilebert_for_masked_lm(*config_and_inputs)
def test_for_multiple_choice(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_mobilebert_for_multiple_choice(*config_and_inputs)
def test_for_next_sequence_prediction(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_mobilebert_for_next_sequence_prediction(*config_and_inputs)
def test_for_pretraining(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_mobilebert_for_pretraining(*config_and_inputs)
def test_for_question_answering(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_mobilebert_for_question_answering(*config_and_inputs)
def test_for_sequence_classification(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_mobilebert_for_sequence_classification(*config_and_inputs)
def test_for_token_classification(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_mobilebert_for_token_classification(*config_and_inputs)
def _long_tensor(tok_lst):
return torch.tensor(
tok_lst,
dtype=torch.long,
device=torch_device,
)
TOLERANCE = 1e-3
@require_torch
@require_sentencepiece
@require_tokenizers
class MobileBertModelIntegrationTests(unittest.TestCase):
@slow
def test_inference_no_head(self):
model = MobileBertModel.from_pretrained("google/mobilebert-uncased").to(torch_device)
input_ids = _long_tensor([[101, 7110, 1005, 1056, 2023, 11333, 17413, 1029, 102]])
with torch.no_grad():
output = model(input_ids)[0]
expected_shape = torch.Size((1, 9, 512))
self.assertEqual(output.shape, expected_shape)
expected_slice = torch.tensor(
[
[
[-2.4736526e07, 8.2691656e04, 1.6521838e05],
[-5.7541704e-01, 3.9056022e00, 4.4011507e00],
[2.6047359e00, 1.5677652e00, -1.7324188e-01],
]
],
device=torch_device,
)
# MobileBERT results range from 10e0 to 10e8. Even a 0.0000001% difference with a value of 10e8 results in a
# ~1 difference, it's therefore not a good idea to measure using addition.
# Here, we instead divide the expected result with the result in order to obtain ~1. We then check that the
# result is held between bounds: 1 - TOLERANCE < expected_result / result < 1 + TOLERANCE
lower_bound = torch.all((expected_slice / output[..., :3, :3]) >= 1 - TOLERANCE)
upper_bound = torch.all((expected_slice / output[..., :3, :3]) <= 1 + TOLERANCE)
self.assertTrue(lower_bound and upper_bound)
|
huggingface/transformers
|
tests/mobilebert/test_modeling_mobilebert.py
|
Python
|
apache-2.0
| 15,383
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
from setuptools import setup
setup(
name='Earo',
version='0.1.0',
url='https://github.com/Everley1993/Laky-Earo',
license='Apache',
author='Everley',
author_email='463785757@qq.com',
description='A microframework based on EDA for business logic development.',
packages=['earo'],
package_data={'earo':['static/css/*.css', 'static/fonts/*', 'static/js/*.js', 'static/*.html']},
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'flask',
'enum',
'atomic',
]
)
|
Everley1993/Laky-Earo
|
setup.py
|
Python
|
apache-2.0
| 608
|
#
# Copyright (c) 2015 Juniper Networks, Inc. All rights reserved.
#
from gevent import monkey
monkey.patch_all()
from pysandesh.sandesh_base import sandesh_global
from sandesh_common.vns.ttypes import Module
from nodemgr.common.event_manager import EventManager, EventManagerTypeInfo
class ConfigEventManager(EventManager):
def __init__(self, config, unit_names):
type_info = EventManagerTypeInfo(
module_type=Module.CONFIG_NODE_MGR,
object_table='ObjectConfigNode')
super(ConfigEventManager, self).__init__(config, type_info,
sandesh_global, unit_names)
|
eonpatapon/contrail-controller
|
src/nodemgr/config_nodemgr/event_manager.py
|
Python
|
apache-2.0
| 616
|
# -*- encoding: utf-8 -*-
"""
h2o -- module for using H2O services.
:copyright: (c) 2016 H2O.ai
:license: Apache License Version 2.0 (see LICENSE for details)
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import os
import warnings
import webbrowser
import types
from h2o.backend import H2OConnection
from h2o.backend import H2OConnectionConf
from h2o.backend import H2OLocalServer
from h2o.exceptions import H2OConnectionError, H2OValueError
from h2o.utils.config import H2OConfigReader
from h2o.utils.shared_utils import check_frame_id, deprecated, gen_header, py_tmp_key, quoted, urlopen
from h2o.utils.typechecks import assert_is_type, assert_satisfies, BoundInt, BoundNumeric, I, is_type, numeric, U
from .estimators.deeplearning import H2OAutoEncoderEstimator
from .estimators.deeplearning import H2ODeepLearningEstimator
from .estimators.deepwater import H2ODeepWaterEstimator
from .estimators.estimator_base import H2OEstimator
from .estimators.xgboost import H2OXGBoostEstimator
from .estimators.gbm import H2OGradientBoostingEstimator
from .estimators.glm import H2OGeneralizedLinearEstimator
from .estimators.glrm import H2OGeneralizedLowRankEstimator
from .estimators.kmeans import H2OKMeansEstimator
from .estimators.naive_bayes import H2ONaiveBayesEstimator
from .estimators.pca import H2OPrincipalComponentAnalysisEstimator
from .estimators.random_forest import H2ORandomForestEstimator
from .estimators.stackedensemble import H2OStackedEnsembleEstimator
from .estimators.word2vec import H2OWord2vecEstimator
from .estimators.isolation_forest import H2OIsolationForestEstimator
from .expr import ExprNode
from .frame import H2OFrame
from .grid.grid_search import H2OGridSearch
from .job import H2OJob
from .model.model_base import ModelBase
from .transforms.decomposition import H2OSVD
from .utils.debugging import * # NOQA
from .utils.compatibility import * # NOQA
from .utils.compatibility import PY3
logging.basicConfig()
# An IPython deprecation warning is triggered after h2o.init(). Remove this once the deprecation has been resolved
warnings.filterwarnings('ignore', category=DeprecationWarning, module='.*/IPython/.*')
h2oconn = None # type: H2OConnection
def connect(server=None, url=None, ip=None, port=None, https=None, verify_ssl_certificates=None, auth=None,
proxy=None, cookies=None, verbose=True, config=None):
"""
Connect to an existing H2O server, remote or local.
There are two ways to connect to a server: either pass a `server` parameter containing an instance of
an H2OLocalServer, or specify `ip` and `port` of the server that you want to connect to.
:param server: An H2OLocalServer instance to connect to (optional).
:param url: Full URL of the server to connect to (can be used instead of `ip` + `port` + `https`).
:param ip: The ip address (or host name) of the server where H2O is running.
:param port: Port number that H2O service is listening to.
:param https: Set to True to connect via https:// instead of http://.
:param verify_ssl_certificates: When using https, setting this to False will disable SSL certificates verification.
:param auth: Either a (username, password) pair for basic authentication, an instance of h2o.auth.SpnegoAuth
or one of the requests.auth authenticator objects.
:param proxy: Proxy server address.
:param cookies: Cookie (or list of) to add to request
:param verbose: Set to False to disable printing connection status messages.
:param connection_conf: Connection configuration object encapsulating connection parameters.
:returns: the new :class:`H2OConnection` object.
"""
global h2oconn
if config:
if "connect_params" in config:
h2oconn = _connect_with_conf(config["connect_params"])
else:
h2oconn = _connect_with_conf(config)
else:
h2oconn = H2OConnection.open(server=server, url=url, ip=ip, port=port, https=https,
auth=auth, verify_ssl_certificates=verify_ssl_certificates,
proxy=proxy, cookies=cookies,
verbose=verbose)
if verbose:
h2oconn.cluster.show_status()
return h2oconn
def api(endpoint, data=None, json=None, filename=None, save_to=None):
"""
Perform a REST API request to a previously connected server.
This function is mostly for internal purposes, but may occasionally be useful for direct access to
the backend H2O server. It has same parameters as :meth:`H2OConnection.request <h2o.backend.H2OConnection.request>`.
"""
# type checks are performed in H2OConnection class
_check_connection()
return h2oconn.request(endpoint, data=data, json=json, filename=filename, save_to=save_to)
def connection():
"""Return the current :class:`H2OConnection` handler."""
return h2oconn
def version_check():
"""Used to verify that h2o-python module and the H2O server are compatible with each other."""
from .__init__ import __version__ as ver_pkg
ci = h2oconn.cluster
if not ci:
raise H2OConnectionError("Connection not initialized. Did you run h2o.connect()?")
ver_h2o = ci.version
if ver_pkg == "SUBST_PROJECT_VERSION": ver_pkg = "UNKNOWN"
if str(ver_h2o) != str(ver_pkg):
branch_name_h2o = ci.branch_name
build_number_h2o = ci.build_number
if build_number_h2o is None or build_number_h2o == "unknown":
raise H2OConnectionError(
"Version mismatch. H2O is version {0}, but the h2o-python package is version {1}. "
"Upgrade H2O and h2o-Python to latest stable version - "
"http://h2o-release.s3.amazonaws.com/h2o/latest_stable.html"
"".format(ver_h2o, ver_pkg))
elif build_number_h2o == "99999":
raise H2OConnectionError(
"Version mismatch. H2O is version {0}, but the h2o-python package is version {1}. "
"This is a developer build, please contact your developer."
"".format(ver_h2o, ver_pkg))
else:
raise H2OConnectionError(
"Version mismatch. H2O is version {0}, but the h2o-python package is version {1}. "
"Install the matching h2o-Python version from - "
"http://h2o-release.s3.amazonaws.com/h2o/{2}/{3}/index.html."
"".format(ver_h2o, ver_pkg, branch_name_h2o, build_number_h2o))
# Check age of the install
if ci.build_too_old:
print("Warning: Your H2O cluster version is too old ({})! Please download and install the latest "
"version from http://h2o.ai/download/".format(ci.build_age))
def init(url=None, ip=None, port=None, name=None, https=None, insecure=None, username=None, password=None,
cookies=None, proxy=None, start_h2o=True, nthreads=-1, ice_root=None, log_dir=None, log_level=None,
enable_assertions=True, max_mem_size=None, min_mem_size=None, strict_version_check=None, ignore_config=False,
extra_classpath=None, jvm_custom_args=None, bind_to_localhost=True, **kwargs):
"""
Attempt to connect to a local server, or if not successful start a new server and connect to it.
:param url: Full URL of the server to connect to (can be used instead of `ip` + `port` + `https`).
:param ip: The ip address (or host name) of the server where H2O is running.
:param port: Port number that H2O service is listening to.
:param name: cloud name. If None while connecting to an existing cluster it will not check the cloud name.
If set then will connect only if the target cloud name matches. If no instance is found and decides to start a local
one then this will be used as the cloud name or a random one will be generated if set to None.
:param https: Set to True to connect via https:// instead of http://.
:param insecure: When using https, setting this to True will disable SSL certificates verification.
:param username: Username and
:param password: Password for basic authentication.
:param cookies: Cookie (or list of) to add to each request.
:param proxy: Proxy server address.
:param start_h2o: If False, do not attempt to start an h2o server when connection to an existing one failed.
:param nthreads: "Number of threads" option when launching a new h2o server.
:param ice_root: Directory for temporary files for the new h2o server.
:param log_dir: Directory for H2O logs to be stored if a new instance is started. Ignored if connecting to an existing node.
:param log_level: The logger level for H2O if a new instance is started. One of TRACE,DEBUG,INFO,WARN,ERRR,FATA. Default is INFO. Ignored if connecting to an existing node.
:param enable_assertions: Enable assertions in Java for the new h2o server.
:param max_mem_size: Maximum memory to use for the new h2o server. Integer input will be evaluated as gigabytes. Other units can be specified by passing in a string (e.g. "160M" for 160 megabytes).
:param min_mem_size: Minimum memory to use for the new h2o server. Integer input will be evaluated as gigabytes. Other units can be specified by passing in a string (e.g. "160M" for 160 megabytes).
:param strict_version_check: If True, an error will be raised if the client and server versions don't match.
:param ignore_config: Indicates whether a processing of a .h2oconfig file should be conducted or not. Default value is False.
:param extra_classpath: List of paths to libraries that should be included on the Java classpath when starting H2O from Python.
:param kwargs: (all other deprecated attributes)
:param jvm_custom_args: Customer, user-defined argument's for the JVM H2O is instantiated in. Ignored if there is an instance of H2O already running and the client connects to it.
"""
global h2oconn
assert_is_type(url, str, None)
assert_is_type(ip, str, None)
assert_is_type(port, int, str, None)
assert_is_type(name, str, None)
assert_is_type(https, bool, None)
assert_is_type(insecure, bool, None)
assert_is_type(username, str, None)
assert_is_type(password, str, None)
assert_is_type(cookies, str, [str], None)
assert_is_type(proxy, {str: str}, None)
assert_is_type(start_h2o, bool, None)
assert_is_type(nthreads, int)
assert_is_type(ice_root, str, None)
assert_is_type(log_dir, str, None)
assert_is_type(log_level, str, None)
assert_satisfies(log_level, log_level in [None, "TRACE", "DEBUG", "INFO", "WARN", "ERRR", "FATA"])
assert_is_type(enable_assertions, bool)
assert_is_type(max_mem_size, int, str, None)
assert_is_type(min_mem_size, int, str, None)
assert_is_type(strict_version_check, bool, None)
assert_is_type(extra_classpath, [str], None)
assert_is_type(jvm_custom_args, [str], None)
assert_is_type(bind_to_localhost, bool)
assert_is_type(kwargs, {"proxies": {str: str}, "max_mem_size_GB": int, "min_mem_size_GB": int,
"force_connect": bool, "as_port": bool})
def get_mem_size(mmint, mmgb):
if not mmint: # treat 0 and "" as if they were None
if mmgb is None: return None
return mmgb << 30
if is_type(mmint, int):
# If the user gives some small number just assume it's in Gigabytes...
if mmint < 1000: return mmint << 30
return mmint
if is_type(mmint, str):
last = mmint[-1].upper()
num = mmint[:-1]
if not (num.isdigit() and last in "MGT"):
raise H2OValueError("Wrong format for a *_memory_size argument: %s (should be a number followed by "
"a suffix 'M', 'G' or 'T')" % mmint)
if last == "T": return int(num) << 40
if last == "G": return int(num) << 30
if last == "M": return int(num) << 20
scheme = "https" if https else "http"
proxy = proxy[scheme] if proxy is not None and scheme in proxy else \
kwargs["proxies"][scheme] if "proxies" in kwargs and scheme in kwargs["proxies"] else None
mmax = get_mem_size(max_mem_size, kwargs.get("max_mem_size_GB"))
mmin = get_mem_size(min_mem_size, kwargs.get("min_mem_size_GB"))
auth = (username, password) if username and password else None
check_version = True
verify_ssl_certificates = True
# Apply the config file if ignore_config=False
if not ignore_config:
config = H2OConfigReader.get_config()
if url is None and ip is None and port is None and https is None and "init.url" in config:
url = config["init.url"]
if proxy is None and "init.proxy" in config:
proxy = config["init.proxy"]
if cookies is None and "init.cookies" in config:
cookies = config["init.cookies"].split(";")
if auth is None and "init.username" in config and "init.password" in config:
auth = (config["init.username"], config["init.password"])
if strict_version_check is None:
if "init.check_version" in config:
check_version = config["init.check_version"].lower() != "false"
elif os.environ.get("H2O_DISABLE_STRICT_VERSION_CHECK"):
check_version = False
else:
check_version = strict_version_check
if insecure is None:
if "init.verify_ssl_certificates" in config:
verify_ssl_certificates = config["init.verify_ssl_certificates"].lower() != "false"
else:
verify_ssl_certificates = not insecure
if not start_h2o:
print("Warning: if you don't want to start local H2O server, then use of `h2o.connect()` is preferred.")
try:
h2oconn = H2OConnection.open(url=url, ip=ip, port=port, name=name, https=https,
verify_ssl_certificates=verify_ssl_certificates,
auth=auth, proxy=proxy,cookies=cookies, verbose=True,
_msgs=("Checking whether there is an H2O instance running at {url} ",
"connected.", "not found."))
except H2OConnectionError:
# Backward compatibility: in init() port parameter really meant "baseport" when starting a local server...
if port and not str(port).endswith("+") and not kwargs.get("as_port", False):
port = str(port) + "+"
if not start_h2o: raise
if ip and not (ip == "localhost" or ip == "127.0.0.1"):
raise H2OConnectionError('Can only start H2O launcher if IP address is localhost.')
hs = H2OLocalServer.start(nthreads=nthreads, enable_assertions=enable_assertions, max_mem_size=mmax,
min_mem_size=mmin, ice_root=ice_root, log_dir=log_dir, log_level=log_level,
port=port, name=name,
extra_classpath=extra_classpath, jvm_custom_args=jvm_custom_args,
bind_to_localhost=bind_to_localhost)
h2oconn = H2OConnection.open(server=hs, https=https, verify_ssl_certificates=not insecure,
auth=auth, proxy=proxy,cookies=cookies, verbose=True)
if check_version:
version_check()
h2oconn.cluster.timezone = "UTC"
h2oconn.cluster.show_status()
def lazy_import(path, pattern=None):
"""
Import a single file or collection of files.
:param path: A path to a data file (remote or local).
:param pattern: Character string containing a regular expression to match file(s) in the folder.
:returns: either a :class:`H2OFrame` with the content of the provided file, or a list of such frames if
importing multiple files.
"""
assert_is_type(path, str, [str])
assert_is_type(pattern, str, None)
paths = [path] if is_type(path, str) else path
return _import_multi(paths, pattern)
def _import_multi(paths, pattern):
assert_is_type(paths, [str])
assert_is_type(pattern, str, None)
j = api("POST /3/ImportFilesMulti", {"paths": paths, "pattern": pattern})
if j["fails"]: raise ValueError("ImportFiles of '" + ".".join(paths) + "' failed on " + str(j["fails"]))
return j["destination_frames"]
def upload_file(path, destination_frame=None, header=0, sep=None, col_names=None, col_types=None,
na_strings=None, skipped_columns=None):
"""
Upload a dataset from the provided local path to the H2O cluster.
Does a single-threaded push to H2O. Also see :meth:`import_file`.
:param path: A path specifying the location of the data to upload.
:param destination_frame: The unique hex key assigned to the imported file. If none is given, a key will
be automatically generated.
:param header: -1 means the first line is data, 0 means guess, 1 means first line is header.
:param sep: The field separator character. Values on each line of the file are separated by
this character. If not provided, the parser will automatically detect the separator.
:param col_names: A list of column names for the file.
:param col_types: A list of types or a dictionary of column names to types to specify whether columns
should be forced to a certain type upon import parsing. If a list, the types for elements that are
one will be guessed. The possible types a column may have are:
- "unknown" - this will force the column to be parsed as all NA
- "uuid" - the values in the column must be true UUID or will be parsed as NA
- "string" - force the column to be parsed as a string
- "numeric" - force the column to be parsed as numeric. H2O will handle the compression of the numeric
data in the optimal manner.
- "enum" - force the column to be parsed as a categorical column.
- "time" - force the column to be parsed as a time column. H2O will attempt to parse the following
list of date time formats: (date) "yyyy-MM-dd", "yyyy MM dd", "dd-MMM-yy", "dd MMM yy", (time)
"HH:mm:ss", "HH:mm:ss:SSS", "HH:mm:ss:SSSnnnnnn", "HH.mm.ss" "HH.mm.ss.SSS", "HH.mm.ss.SSSnnnnnn".
Times can also contain "AM" or "PM".
:param na_strings: A list of strings, or a list of lists of strings (one list per column), or a dictionary
of column names to strings which are to be interpreted as missing values.
:param skipped_columns: an integer lists of column indices to skip and not parsed into the final frame from the import file.
:returns: a new :class:`H2OFrame` instance.
:examples:
>>> frame = h2o.upload_file("/path/to/local/data")
"""
coltype = U(None, "unknown", "uuid", "string", "float", "real", "double", "int", "numeric",
"categorical", "factor", "enum", "time")
natype = U(str, [str])
assert_is_type(path, str)
assert_is_type(destination_frame, str, None)
assert_is_type(header, -1, 0, 1)
assert_is_type(sep, None, I(str, lambda s: len(s) == 1))
assert_is_type(col_names, [str], None)
assert_is_type(col_types, [coltype], {str: coltype}, None)
assert_is_type(na_strings, [natype], {str: natype}, None)
assert (skipped_columns==None) or isinstance(skipped_columns, list), \
"The skipped_columns should be an list of column names!"
check_frame_id(destination_frame)
if path.startswith("~"):
path = os.path.expanduser(path)
return H2OFrame()._upload_parse(path, destination_frame, header, sep, col_names, col_types, na_strings, skipped_columns)
def import_file(path=None, destination_frame=None, parse=True, header=0, sep=None, col_names=None, col_types=None,
na_strings=None, pattern=None, skipped_columns=None):
"""
Import a dataset that is already on the cluster.
The path to the data must be a valid path for each node in the H2O cluster. If some node in the H2O cluster
cannot see the file, then an exception will be thrown by the H2O cluster. Does a parallel/distributed
multi-threaded pull of the data. The main difference between this method and :func:`upload_file` is that
the latter works with local files, whereas this method imports remote files (i.e. files local to the server).
If you running H2O server on your own maching, then both methods behave the same.
:param path: path(s) specifying the location of the data to import or a path to a directory of files to import
:param destination_frame: The unique hex key assigned to the imported file. If none is given, a key will be
automatically generated.
:param parse: If True, the file should be parsed after import. If False, then a list is returned containing the file path.
:param header: -1 means the first line is data, 0 means guess, 1 means first line is header.
:param sep: The field separator character. Values on each line of the file are separated by
this character. If not provided, the parser will automatically detect the separator.
:param col_names: A list of column names for the file.
:param col_types: A list of types or a dictionary of column names to types to specify whether columns
should be forced to a certain type upon import parsing. If a list, the types for elements that are
one will be guessed. The possible types a column may have are:
- "unknown" - this will force the column to be parsed as all NA
- "uuid" - the values in the column must be true UUID or will be parsed as NA
- "string" - force the column to be parsed as a string
- "numeric" - force the column to be parsed as numeric. H2O will handle the compression of the numeric
data in the optimal manner.
- "enum" - force the column to be parsed as a categorical column.
- "time" - force the column to be parsed as a time column. H2O will attempt to parse the following
list of date time formats: (date) "yyyy-MM-dd", "yyyy MM dd", "dd-MMM-yy", "dd MMM yy", (time)
"HH:mm:ss", "HH:mm:ss:SSS", "HH:mm:ss:SSSnnnnnn", "HH.mm.ss" "HH.mm.ss.SSS", "HH.mm.ss.SSSnnnnnn".
Times can also contain "AM" or "PM".
:param na_strings: A list of strings, or a list of lists of strings (one list per column), or a dictionary
of column names to strings which are to be interpreted as missing values.
:param pattern: Character string containing a regular expression to match file(s) in the folder if `path` is a
directory.
:param skipped_columns: an integer list of column indices to skip and not parsed into the final frame from the import file.
:returns: a new :class:`H2OFrame` instance.
:examples:
>>> # Single file import
>>> iris = import_file("h2o-3/smalldata/iris.csv")
>>> # Return all files in the folder iris/ matching the regex r"iris_.*\.csv"
>>> iris_pattern = h2o.import_file(path = "h2o-3/smalldata/iris",
... pattern = "iris_.*\.csv")
"""
coltype = U(None, "unknown", "uuid", "string", "float", "real", "double", "int", "numeric",
"categorical", "factor", "enum", "time")
natype = U(str, [str])
assert_is_type(path, str, [str])
assert_is_type(pattern, str, None)
assert_is_type(destination_frame, str, None)
assert_is_type(parse, bool)
assert_is_type(header, -1, 0, 1)
assert_is_type(sep, None, I(str, lambda s: len(s) == 1))
assert_is_type(col_names, [str], None)
assert_is_type(col_types, [coltype], {str: coltype}, None)
assert_is_type(na_strings, [natype], {str: natype}, None)
assert isinstance(skipped_columns, (type(None), list)), "The skipped_columns should be an list of column names!"
check_frame_id(destination_frame)
patharr = path if isinstance(path, list) else [path]
if any(os.path.split(p)[0] == "~" for p in patharr):
raise H2OValueError("Paths relative to a current user (~) are not valid in the server environment. "
"Please use absolute paths if possible.")
if not parse:
return lazy_import(path, pattern)
else:
return H2OFrame()._import_parse(path, pattern, destination_frame, header, sep, col_names, col_types, na_strings, skipped_columns)
def import_sql_table(connection_url, table, username, password, columns=None, optimize=True, fetch_mode=None):
"""
Import SQL table to H2OFrame in memory.
Assumes that the SQL table is not being updated and is stable.
Runs multiple SELECT SQL queries concurrently for parallel ingestion.
Be sure to start the h2o.jar in the terminal with your downloaded JDBC driver in the classpath::
java -cp <path_to_h2o_jar>:<path_to_jdbc_driver_jar> water.H2OApp
Also see :func:`import_sql_select`.
Currently supported SQL databases are MySQL, PostgreSQL, MariaDB, and Netezza. Support for Oracle 12g and Microsoft SQL
Server is forthcoming.
:param connection_url: URL of the SQL database connection as specified by the Java Database Connectivity (JDBC)
Driver. For example, "jdbc:mysql://localhost:3306/menagerie?&useSSL=false"
:param table: name of SQL table
:param columns: a list of column names to import from SQL table. Default is to import all columns.
:param username: username for SQL server
:param password: password for SQL server
:param optimize: DEPRECATED. Ignored - use fetch_mode instead. Optimize import of SQL table for faster imports.
:param fetch_mode: Set to DISTRIBUTED to enable distributed import. Set to SINGLE to force a sequential read by a single node
from the database.
:returns: an :class:`H2OFrame` containing data of the specified SQL table.
:examples:
>>> conn_url = "jdbc:mysql://172.16.2.178:3306/ingestSQL?&useSSL=false"
>>> table = "citibike20k"
>>> username = "root"
>>> password = "abc123"
>>> my_citibike_data = h2o.import_sql_table(conn_url, table, username, password)
"""
assert_is_type(connection_url, str)
assert_is_type(table, str)
assert_is_type(username, str)
assert_is_type(password, str)
assert_is_type(columns, [str], None)
assert_is_type(optimize, bool)
assert_is_type(fetch_mode, str, None)
p = {"connection_url": connection_url, "table": table, "username": username, "password": password,
"fetch_mode": fetch_mode}
if columns:
p["columns"] = ", ".join(columns)
j = H2OJob(api("POST /99/ImportSQLTable", data=p), "Import SQL Table").poll()
return get_frame(j.dest_key)
def import_sql_select(connection_url, select_query, username, password, optimize=True, fetch_mode=None):
"""
Import the SQL table that is the result of the specified SQL query to H2OFrame in memory.
Creates a temporary SQL table from the specified sql_query.
Runs multiple SELECT SQL queries on the temporary table concurrently for parallel ingestion, then drops the table.
Be sure to start the h2o.jar in the terminal with your downloaded JDBC driver in the classpath::
java -cp <path_to_h2o_jar>:<path_to_jdbc_driver_jar> water.H2OApp
Also see h2o.import_sql_table. Currently supported SQL databases are MySQL, PostgreSQL, and MariaDB. Support
for Oracle 12g and Microsoft SQL Server is forthcoming.
:param connection_url: URL of the SQL database connection as specified by the Java Database Connectivity (JDBC)
Driver. For example, "jdbc:mysql://localhost:3306/menagerie?&useSSL=false"
:param select_query: SQL query starting with `SELECT` that returns rows from one or more database tables.
:param username: username for SQL server
:param password: password for SQL server
:param optimize: DEPRECATED. Ignored - use fetch_mode instead. Optimize import of SQL table for faster imports.
:param fetch_mode: Set to DISTRIBUTED to enable distributed import. Set to SINGLE to force a sequential read by a single node
from the database.
:returns: an :class:`H2OFrame` containing data of the specified SQL query.
:examples:
>>> conn_url = "jdbc:mysql://172.16.2.178:3306/ingestSQL?&useSSL=false"
>>> select_query = "SELECT bikeid from citibike20k"
>>> username = "root"
>>> password = "abc123"
>>> my_citibike_data = h2o.import_sql_select(conn_url, select_query,
... username, password, fetch_mode)
"""
assert_is_type(connection_url, str)
assert_is_type(select_query, str)
assert_is_type(username, str)
assert_is_type(password, str)
assert_is_type(optimize, bool)
assert_is_type(fetch_mode, str, None)
p = {"connection_url": connection_url, "select_query": select_query, "username": username, "password": password,
"fetch_mode": fetch_mode}
j = H2OJob(api("POST /99/ImportSQLTable", data=p), "Import SQL Table").poll()
return get_frame(j.dest_key)
def parse_setup(raw_frames, destination_frame=None, header=0, separator=None, column_names=None,
column_types=None, na_strings=None, skipped_columns=None):
"""
Retrieve H2O's best guess as to what the structure of the data file is.
During parse setup, the H2O cluster will make several guesses about the attributes of
the data. This method allows a user to perform corrective measures by updating the
returning dictionary from this method. This dictionary is then fed into `parse_raw` to
produce the H2OFrame instance.
:param raw_frames: a collection of imported file frames
:param destination_frame: The unique hex key assigned to the imported file. If none is given, a key will
automatically be generated.
:param header: -1 means the first line is data, 0 means guess, 1 means first line is header.
:param separator: The field separator character. Values on each line of the file are separated by
this character. If not provided, the parser will automatically detect the separator.
:param column_names: A list of column names for the file. If skipped_columns are specified, only list column names
of columns that are not skipped.
:param column_types: A list of types or a dictionary of column names to types to specify whether columns
should be forced to a certain type upon import parsing. If a list, the types for elements that are
one will be guessed. If skipped_columns are specified, only list column types of columns that are not skipped.
The possible types a column may have are:
- "unknown" - this will force the column to be parsed as all NA
- "uuid" - the values in the column must be true UUID or will be parsed as NA
- "string" - force the column to be parsed as a string
- "numeric" - force the column to be parsed as numeric. H2O will handle the compression of the numeric
data in the optimal manner.
- "enum" - force the column to be parsed as a categorical column.
- "time" - force the column to be parsed as a time column. H2O will attempt to parse the following
list of date time formats: (date) "yyyy-MM-dd", "yyyy MM dd", "dd-MMM-yy", "dd MMM yy", (time)
"HH:mm:ss", "HH:mm:ss:SSS", "HH:mm:ss:SSSnnnnnn", "HH.mm.ss" "HH.mm.ss.SSS", "HH.mm.ss.SSSnnnnnn".
Times can also contain "AM" or "PM".
:param na_strings: A list of strings, or a list of lists of strings (one list per column), or a dictionary
of column names to strings which are to be interpreted as missing values.
:param skipped_columns: an integer lists of column indices to skip and not parsed into the final frame from the import file.
:returns: a dictionary containing parse parameters guessed by the H2O backend.
"""
coltype = U(None, "unknown", "uuid", "string", "float", "real", "double", "int", "numeric",
"categorical", "factor", "enum", "time")
natype = U(str, [str])
assert_is_type(raw_frames, str, [str])
assert_is_type(destination_frame, None, str)
assert_is_type(header, -1, 0, 1)
assert_is_type(separator, None, I(str, lambda s: len(s) == 1))
assert_is_type(column_names, [str], None)
assert_is_type(column_types, [coltype], {str: coltype}, None)
assert_is_type(na_strings, [natype], {str: natype}, None)
check_frame_id(destination_frame)
# The H2O backend only accepts things that are quoted
if is_type(raw_frames, str): raw_frames = [raw_frames]
# temporary dictionary just to pass the following information to the parser: header, separator
kwargs = {"check_header": header, "source_frames": [quoted(frame_id) for frame_id in raw_frames]}
if separator:
kwargs["separator"] = ord(separator)
j = api("POST /3/ParseSetup", data=kwargs)
if "warnings" in j and j["warnings"]:
for w in j["warnings"]:
warnings.warn(w)
# TODO: really should be url encoding...
if destination_frame:
j["destination_frame"] = destination_frame
parse_column_len = len(j["column_types"]) if skipped_columns is None else (len(j["column_types"])-len(skipped_columns))
tempColumnNames = j["column_names"] if j["column_names"] is not None else gen_header(j["number_columns"])
useType = [True]*len(tempColumnNames)
if skipped_columns is not None:
useType = [True]*len(tempColumnNames)
for ind in range(len(tempColumnNames)):
if ind in skipped_columns:
useType[ind]=False
if column_names is not None:
if not isinstance(column_names, list): raise ValueError("col_names should be a list")
if (skipped_columns is not None) and len(skipped_columns)>0:
if (len(column_names)) != parse_column_len:
raise ValueError(
"length of col_names should be equal to the number of columns parsed: %d vs %d"
% (len(column_names), parse_column_len))
else:
if len(column_names) != len(j["column_types"]): raise ValueError(
"length of col_names should be equal to the number of columns: %d vs %d"
% (len(column_names), len(j["column_types"])))
j["column_names"] = column_names
counter = 0
for ind in range(len(tempColumnNames)):
if useType[ind]:
tempColumnNames[ind]=column_names[counter]
counter=counter+1
if (column_types is not None): # keep the column types to include all columns
if isinstance(column_types, dict):
# overwrite dictionary to ordered list of column types. if user didn't specify column type for all names,
# use type provided by backend
if j["column_names"] is None: # no colnames discovered! (C1, C2, ...)
j["column_names"] = gen_header(j["number_columns"])
if not set(column_types.keys()).issubset(set(j["column_names"])): raise ValueError(
"names specified in col_types is not a subset of the column names")
idx = 0
column_types_list = []
for name in tempColumnNames: # column_names may have already been changed
if name in column_types:
column_types_list.append(column_types[name])
else:
column_types_list.append(j["column_types"][idx])
idx += 1
column_types = column_types_list
elif isinstance(column_types, list):
if len(column_types) != parse_column_len: raise ValueError(
"length of col_types should be equal to the number of parsed columns")
# need to expand it out to all columns, not just the parsed ones
column_types_list = j["column_types"]
counter = 0
for ind in range(len(j["column_types"])):
if useType[ind] and (column_types[counter]!=None):
column_types_list[ind]=column_types[counter]
counter=counter+1
column_types = column_types_list
else: # not dictionary or list
raise ValueError("col_types should be a list of types or a dictionary of column names to types")
j["column_types"] = column_types
if na_strings is not None:
if isinstance(na_strings, dict):
# overwrite dictionary to ordered list of lists of na_strings
if not j["column_names"]: raise ValueError("column names should be specified")
if not set(na_strings.keys()).issubset(set(j["column_names"])): raise ValueError(
"names specified in na_strings is not a subset of the column names")
j["na_strings"] = [[] for _ in range(len(j["column_names"]))]
for name, na in na_strings.items():
idx = j["column_names"].index(name)
if is_type(na, str): na = [na]
for n in na: j["na_strings"][idx].append(quoted(n))
elif is_type(na_strings, [[str]]):
if len(na_strings) != len(j["column_types"]):
raise ValueError("length of na_strings should be equal to the number of columns")
j["na_strings"] = [[quoted(na) for na in col] if col is not None else [] for col in na_strings]
elif isinstance(na_strings, list):
j["na_strings"] = [[quoted(na) for na in na_strings]] * len(j["column_types"])
else: # not a dictionary or list
raise ValueError(
"na_strings should be a list, a list of lists (one list per column), or a dictionary of column "
"names to strings which are to be interpreted as missing values")
if skipped_columns is not None:
if isinstance(skipped_columns, list):
j["skipped_columns"] = []
for colidx in skipped_columns:
if (colidx < 0): raise ValueError("skipped column index cannot be negative")
j["skipped_columns"].append(colidx)
# quote column names and column types also when not specified by user
if j["column_names"]: j["column_names"] = list(map(quoted, j["column_names"]))
j["column_types"] = list(map(quoted, j["column_types"]))
return j
def parse_raw(setup, id=None, first_line_is_header=0):
"""
Parse dataset using the parse setup structure.
:param setup: Result of ``h2o.parse_setup()``
:param id: an id for the frame.
:param first_line_is_header: -1, 0, 1 if the first line is to be used as the header
:returns: an :class:`H2OFrame` object.
"""
assert_is_type(setup, dict)
assert_is_type(id, str, None)
assert_is_type(first_line_is_header, -1, 0, 1)
check_frame_id(id)
if id:
setup["destination_frame"] = id
if first_line_is_header != (-1, 0, 1):
if first_line_is_header not in (-1, 0, 1): raise ValueError("first_line_is_header should be -1, 0, or 1")
setup["check_header"] = first_line_is_header
fr = H2OFrame()
fr._parse_raw(setup)
return fr
def assign(data, xid):
"""
(internal) Assign new id to the frame.
:param data: an H2OFrame whose id should be changed
:param xid: new id for the frame.
:returns: the passed frame.
"""
assert_is_type(data, H2OFrame)
assert_is_type(xid, str)
assert_satisfies(xid, xid != data.frame_id)
check_frame_id(xid)
data._ex = ExprNode("assign", xid, data)._eval_driver(False)
data._ex._cache._id = xid
data._ex._children = None
return data
def deep_copy(data, xid):
"""
Create a deep clone of the frame ``data``.
:param data: an H2OFrame to be cloned
:param xid: (internal) id to be assigned to the new frame.
:returns: new :class:`H2OFrame` which is the clone of the passed frame.
"""
assert_is_type(data, H2OFrame)
assert_is_type(xid, str)
assert_satisfies(xid, xid != data.frame_id)
check_frame_id(xid)
duplicate = data.apply(lambda x: x)
duplicate._ex = ExprNode("assign", xid, duplicate)._eval_driver(False)
duplicate._ex._cache._id = xid
duplicate._ex._children = None
return duplicate
def get_model(model_id):
"""
Load a model from the server.
:param model_id: The model identification in H2O
:returns: Model object, a subclass of H2OEstimator
"""
assert_is_type(model_id, str)
model_json = api("GET /3/Models/%s" % model_id)["models"][0]
algo = model_json["algo"]
if algo == "svd": m = H2OSVD()
elif algo == "pca": m = H2OPrincipalComponentAnalysisEstimator()
elif algo == "drf": m = H2ORandomForestEstimator()
elif algo == "naivebayes": m = H2ONaiveBayesEstimator()
elif algo == "kmeans": m = H2OKMeansEstimator()
elif algo == "glrm": m = H2OGeneralizedLowRankEstimator()
elif algo == "glm": m = H2OGeneralizedLinearEstimator()
elif algo == "gbm": m = H2OGradientBoostingEstimator()
elif algo == "deepwater": m = H2ODeepWaterEstimator()
elif algo == "xgboost": m = H2OXGBoostEstimator()
elif algo == "word2vec": m = H2OWord2vecEstimator()
elif algo == "deeplearning":
if model_json["output"]["model_category"] == "AutoEncoder":
m = H2OAutoEncoderEstimator()
else:
m = H2ODeepLearningEstimator()
elif algo == "stackedensemble": m = H2OStackedEnsembleEstimator()
elif algo == "isolationforest": m = H2OIsolationForestEstimator()
else:
raise ValueError("Unknown algo type: " + algo)
m._resolve_model(model_id, model_json)
return m
def get_grid(grid_id):
"""
Return the specified grid.
:param grid_id: The grid identification in h2o
:returns: an :class:`H2OGridSearch` instance.
"""
assert_is_type(grid_id, str)
grid_json = api("GET /99/Grids/%s" % grid_id)
models = [get_model(key["name"]) for key in grid_json["model_ids"]]
# get first model returned in list of models from grid search to get model class (binomial, multinomial, etc)
first_model_json = api("GET /3/Models/%s" % grid_json["model_ids"][0]["name"])["models"][0]
gs = H2OGridSearch(None, {}, grid_id)
gs._resolve_grid(grid_id, grid_json, first_model_json)
gs.models = models
hyper_params = {param: set() for param in gs.hyper_names}
for param in gs.hyper_names:
for model in models:
if isinstance(model.full_parameters[param]["actual_value"], list):
hyper_params[param].add(model.full_parameters[param]["actual_value"][0])
else:
hyper_params[param].add(model.full_parameters[param]["actual_value"])
hyper_params = {str(param): list(vals) for param, vals in hyper_params.items()}
gs.hyper_params = hyper_params
gs.model = model.__class__()
return gs
def get_frame(frame_id, **kwargs):
"""
Obtain a handle to the frame in H2O with the frame_id key.
:param str frame_id: id of the frame to retrieve.
:returns: an :class:`H2OFrame` object
"""
assert_is_type(frame_id, str)
return H2OFrame.get_frame(frame_id, **kwargs)
def no_progress():
"""
Disable the progress bar from flushing to stdout.
The completed progress bar is printed when a job is complete so as to demarcate a log file.
"""
H2OJob.__PROGRESS_BAR__ = False
def show_progress():
"""Enable the progress bar (it is enabled by default)."""
H2OJob.__PROGRESS_BAR__ = True
def enable_expr_optimizations(flag):
"""Enable expression tree local optimizations."""
ExprNode.__ENABLE_EXPR_OPTIMIZATIONS__ = flag
def is_expr_optimizations_enabled():
return ExprNode.__ENABLE_EXPR_OPTIMIZATIONS__
def log_and_echo(message=""):
"""
Log a message on the server-side logs.
This is helpful when running several pieces of work one after the other on a single H2O
cluster and you want to make a notation in the H2O server side log where one piece of
work ends and the next piece of work begins.
Sends a message to H2O for logging. Generally used for debugging purposes.
:param message: message to write to the log.
"""
assert_is_type(message, str)
api("POST /3/LogAndEcho", data={"message": str(message)})
def remove(x):
"""
Remove object(s) from H2O.
:param x: H2OFrame, H2OEstimator, or string, or a list of those things: the object(s) or unique id(s)
pointing to the object(s) to be removed.
"""
item_type = U(str, H2OFrame, H2OEstimator)
assert_is_type(x, item_type, [item_type])
if not isinstance(x, list): x = [x]
for xi in x:
if isinstance(xi, H2OFrame):
xi_id = xi._ex._cache._id # String or None
if xi_id is None: return # Lazy frame, never evaluated, nothing in cluster
rapids("(rm {})".format(xi_id))
xi._ex = None
elif isinstance(xi, H2OEstimator):
api("DELETE /3/DKV/%s" % xi.model_id)
xi._id = None
else:
# string may be a Frame key name part of a rapids session... need to call rm thru rapids here
try:
rapids("(rm {})".format(xi))
except:
api("DELETE /3/DKV/%s" % xi)
def remove_all():
"""Remove all objects from H2O."""
api("DELETE /3/DKV")
def rapids(expr):
"""
Execute a Rapids expression.
:param expr: The rapids expression (ascii string).
:returns: The JSON response (as a python dictionary) of the Rapids execution.
"""
assert_is_type(expr, str)
return ExprNode.rapids(expr)
def ls():
"""List keys on an H2O Cluster."""
return H2OFrame._expr(expr=ExprNode("ls")).as_data_frame(use_pandas=True)
def frame(frame_id):
"""
Retrieve metadata for an id that points to a Frame.
:param frame_id: the key of a Frame in H2O.
:returns: dict containing the frame meta-information.
"""
assert_is_type(frame_id, str)
return api("GET /3/Frames/%s" % frame_id)
def frames():
"""
Retrieve all the Frames.
:returns: Meta information on the frames
"""
return api("GET /3/Frames")
def download_pojo(model, path="", get_jar=True, jar_name=""):
"""
Download the POJO for this model to the directory specified by path; if path is "", then dump to screen.
:param model: the model whose scoring POJO should be retrieved.
:param path: an absolute path to the directory where POJO should be saved.
:param get_jar: retrieve the h2o-genmodel.jar also (will be saved to the same folder ``path``).
:param jar_name: Custom name of genmodel jar.
:returns: location of the downloaded POJO file.
"""
assert_is_type(model, ModelBase)
assert_is_type(path, str)
assert_is_type(get_jar, bool)
if not model.have_pojo:
raise H2OValueError("Export to POJO not supported")
if path == "":
java_code = api("GET /3/Models.java/%s" % model.model_id)
print(java_code)
return None
else:
filename = api("GET /3/Models.java/%s" % model.model_id, save_to=path)
if get_jar:
if jar_name == "":
api("GET /3/h2o-genmodel.jar", save_to=os.path.join(path, "h2o-genmodel.jar"))
else:
api("GET /3/h2o-genmodel.jar", save_to=os.path.join(path, jar_name))
return filename
def download_csv(data, filename):
"""
Download an H2O data set to a CSV file on the local disk.
Warning: Files located on the H2O server may be very large! Make sure you have enough
hard drive space to accommodate the entire file.
:param data: an H2OFrame object to be downloaded.
:param filename: name for the CSV file where the data should be saved to.
"""
assert_is_type(data, H2OFrame)
assert_is_type(filename, str)
url = h2oconn.make_url("DownloadDataset", 3) + "?frame_id={}&hex_string=false".format(data.frame_id)
with open(filename, "wb") as f:
f.write(urlopen()(url).read())
def download_all_logs(dirname=".", filename=None):
"""
Download H2O log files to disk.
:param dirname: a character string indicating the directory that the log file should be saved in.
:param filename: a string indicating the name that the CSV file should be. Note that the saved format is .zip, so the file name must include the .zip extension.
:returns: path of logs written in a zip file.
:examples: The following code will save the zip file `'autoh2o_log.zip'` in a directory that is one down from where you are currently working into a directory called `your_directory_name`. (Please note that `your_directory_name` should be replaced with the name of the directory that you've created and that already exists.)
>>> h2o.download_all_logs(dirname='./your_directory_name/', filename = 'autoh2o_log.zip')
"""
assert_is_type(dirname, str)
assert_is_type(filename, str, None)
url = "%s/3/Logs/download" % h2oconn.base_url
opener = urlopen()
response = opener(url)
if not os.path.exists(dirname): os.mkdir(dirname)
if filename is None:
if PY3:
headers = [h[1] for h in response.headers._headers]
else:
headers = response.headers.headers
for h in headers:
if "filename=" in h:
filename = h.split("filename=")[1].strip()
break
path = os.path.join(dirname, filename)
response = opener(url).read()
print("Writing H2O logs to " + path)
with open(path, "wb") as f:
f.write(response)
return path
def save_model(model, path="", force=False):
"""
Save an H2O Model object to disk. (Note that ensemble binary models can now be saved using this method.)
:param model: The model object to save.
:param path: a path to save the model at (hdfs, s3, local)
:param force: if True overwrite destination directory in case it exists, or throw exception if set to False.
:returns: the path of the saved model
:examples:
>>> path = h2o.save_model(my_model, dir=my_path)
"""
assert_is_type(model, ModelBase)
assert_is_type(path, str)
assert_is_type(force, bool)
path = os.path.join(os.getcwd() if path == "" else path, model.model_id)
return api("GET /99/Models.bin/%s" % model.model_id, data={"dir": path, "force": force})["dir"]
def load_model(path):
"""
Load a saved H2O model from disk. (Note that ensemble binary models can now be loaded using this method.)
:param path: the full path of the H2O Model to be imported.
:returns: an :class:`H2OEstimator` object
:examples:
>>> path = h2o.save_model(my_model, dir=my_path)
>>> h2o.load_model(path)
"""
assert_is_type(path, str)
res = api("POST /99/Models.bin/%s" % "", data={"dir": path})
return get_model(res["models"][0]["model_id"]["name"])
def export_file(frame, path, force=False, parts=1):
"""
Export a given H2OFrame to a path on the machine this python session is currently connected to.
:param frame: the Frame to save to disk.
:param path: the path to the save point on disk.
:param force: if True, overwrite any preexisting file with the same path
:param parts: enables export to multiple 'part' files instead of just a single file.
Convenient for large datasets that take too long to store in a single file.
Use parts=-1 to instruct H2O to determine the optimal number of part files or
specify your desired maximum number of part files. Path needs to be a directory
when exporting to multiple files, also that directory must be empty.
Default is ``parts = 1``, which is to export to a single file.
"""
assert_is_type(frame, H2OFrame)
assert_is_type(path, str)
assert_is_type(force, bool)
assert_is_type(parts, int)
H2OJob(api("POST /3/Frames/%s/export" % (frame.frame_id), data={"path": path, "num_parts": parts, "force": force}),
"Export File").poll()
def cluster():
"""Return :class:`H2OCluster` object describing the backend H2O cloud."""
return h2oconn.cluster if h2oconn else None
def create_frame(frame_id=None, rows=10000, cols=10, randomize=True,
real_fraction=None, categorical_fraction=None, integer_fraction=None,
binary_fraction=None, time_fraction=None, string_fraction=None,
value=0, real_range=100, factors=100, integer_range=100,
binary_ones_fraction=0.02, missing_fraction=0.01,
has_response=False, response_factors=2, positive_response=False,
seed=None, seed_for_column_types=None):
"""
Create a new frame with random data.
Creates a data frame in H2O with real-valued, categorical, integer, and binary columns specified by the user.
:param frame_id: the destination key. If empty, this will be auto-generated.
:param rows: the number of rows of data to generate.
:param cols: the number of columns of data to generate. Excludes the response column if has_response is True.
:param randomize: If True, data values will be randomly generated. This must be True if either
categorical_fraction or integer_fraction is non-zero.
:param value: if randomize is False, then all real-valued entries will be set to this value.
:param real_range: the range of randomly generated real values.
:param real_fraction: the fraction of columns that are real-valued.
:param categorical_fraction: the fraction of total columns that are categorical.
:param factors: the number of (unique) factor levels in each categorical column.
:param integer_fraction: the fraction of total columns that are integer-valued.
:param integer_range: the range of randomly generated integer values.
:param binary_fraction: the fraction of total columns that are binary-valued.
:param binary_ones_fraction: the fraction of values in a binary column that are set to 1.
:param time_fraction: the fraction of randomly created date/time columns.
:param string_fraction: the fraction of randomly created string columns.
:param missing_fraction: the fraction of total entries in the data frame that are set to NA.
:param has_response: A logical value indicating whether an additional response column should be prepended to the
final H2O data frame. If set to True, the total number of columns will be ``cols + 1``.
:param response_factors: if has_response is True, then this variable controls the type of the "response" column:
setting response_factors to 1 will generate real-valued response, any value greater or equal than 2 will
create categorical response with that many categories.
:param positive_reponse: when response variable is present and of real type, this will control whether it
contains positive values only, or both positive and negative.
:param seed: a seed used to generate random values when ``randomize`` is True.
:param seed_for_column_types: a seed used to generate random column types when ``randomize`` is True.
:returns: an :class:`H2OFrame` object
"""
t_fraction = U(None, BoundNumeric(0, 1))
assert_is_type(frame_id, str, None)
assert_is_type(rows, BoundInt(1))
assert_is_type(cols, BoundInt(1))
assert_is_type(randomize, bool)
assert_is_type(value, numeric)
assert_is_type(real_range, BoundNumeric(0))
assert_is_type(real_fraction, t_fraction)
assert_is_type(categorical_fraction, t_fraction)
assert_is_type(integer_fraction, t_fraction)
assert_is_type(binary_fraction, t_fraction)
assert_is_type(time_fraction, t_fraction)
assert_is_type(string_fraction, t_fraction)
assert_is_type(missing_fraction, t_fraction)
assert_is_type(binary_ones_fraction, t_fraction)
assert_is_type(factors, BoundInt(1))
assert_is_type(integer_range, BoundInt(1))
assert_is_type(has_response, bool)
assert_is_type(response_factors, None, BoundInt(1))
assert_is_type(positive_response, bool)
assert_is_type(seed, int, None)
assert_is_type(seed_for_column_types, int, None)
check_frame_id(frame_id)
if randomize and value:
raise H2OValueError("Cannot set data to a `value` if `randomize` is true")
if (categorical_fraction or integer_fraction) and not randomize:
raise H2OValueError("`randomize` should be True when either categorical or integer columns are used.")
# The total column fraction that the user has specified explicitly. This sum should not exceed 1. We will respect
# all explicitly set fractions, and will auto-select the remaining fractions.
frcs = [real_fraction, categorical_fraction, integer_fraction, binary_fraction, time_fraction, string_fraction]
wgts = [0.5, 0.2, 0.2, 0.1, 0.0, 0.0]
sum_explicit_fractions = sum(0 if f is None else f for f in frcs)
count_explicit_fractions = sum(0 if f is None else 1 for f in frcs)
remainder = 1 - sum_explicit_fractions
if sum_explicit_fractions >= 1 + 1e-10:
raise H2OValueError("Fractions of binary, integer, categorical, time and string columns should add up "
"to a number less than 1.")
elif sum_explicit_fractions >= 1 - 1e-10:
# The fractions already add up to almost 1. No need to do anything (the server will absorb the tiny
# remainder into the real_fraction column).
pass
else:
# sum_explicit_fractions < 1 => distribute the remainder among the columns that were not set explicitly
if count_explicit_fractions == 6:
raise H2OValueError("Fraction of binary, integer, categorical, time and string columns add up to a "
"number less than 1.")
# Each column type receives a certain part (proportional to column's "weight") of the remaining fraction.
sum_implicit_weights = sum(wgts[i] if frcs[i] is None else 0 for i in range(6))
for i, f in enumerate(frcs):
if frcs[i] is not None: continue
if sum_implicit_weights == 0:
frcs[i] = remainder
else:
frcs[i] = remainder * wgts[i] / sum_implicit_weights
remainder -= frcs[i]
sum_implicit_weights -= wgts[i]
for i, f in enumerate(frcs):
if f is None:
frcs[i] = 0
real_fraction, categorical_fraction, integer_fraction, binary_fraction, time_fraction, string_fraction = frcs
parms = {"dest": frame_id if frame_id else py_tmp_key(append=h2oconn.session_id),
"rows": rows,
"cols": cols,
"randomize": randomize,
"categorical_fraction": categorical_fraction,
"integer_fraction": integer_fraction,
"binary_fraction": binary_fraction,
"time_fraction": time_fraction,
"string_fraction": string_fraction,
# "real_fraction" is not provided, the backend computes it as 1 - sum(5 other fractions)
"value": value,
"real_range": real_range,
"factors": factors,
"integer_range": integer_range,
"binary_ones_fraction": binary_ones_fraction,
"missing_fraction": missing_fraction,
"has_response": has_response,
"response_factors": response_factors,
"positive_response": positive_response,
"seed": -1 if seed is None else seed,
"seed_for_column_types": -1 if seed_for_column_types is None else seed_for_column_types,
}
H2OJob(api("POST /3/CreateFrame", data=parms), "Create Frame").poll()
return get_frame(parms["dest"])
def interaction(data, factors, pairwise, max_factors, min_occurrence, destination_frame=None):
"""
Categorical Interaction Feature Creation in H2O.
Creates a frame in H2O with n-th order interaction features between categorical columns, as specified by
the user.
:param data: the H2OFrame that holds the target categorical columns.
:param factors: factor columns (either indices or column names).
:param pairwise: If True, create pairwise interactions between factors (otherwise create one
higher-order interaction). Only applicable if there are 3 or more factors.
:param max_factors: Max. number of factor levels in pair-wise interaction terms (if enforced, one extra
catch-all factor will be made).
:param min_occurrence: Min. occurrence threshold for factor levels in pair-wise interaction terms
:param destination_frame: a string indicating the destination key. If empty, this will be auto-generated by H2O.
:returns: :class:`H2OFrame`
"""
assert_is_type(data, H2OFrame)
assert_is_type(factors, [str, int])
assert_is_type(pairwise, bool)
assert_is_type(max_factors, int)
assert_is_type(min_occurrence, int)
assert_is_type(destination_frame, str, None)
factors = [data.names[n] if is_type(n, int) else n for n in factors]
parms = {"dest": py_tmp_key(append=h2oconn.session_id) if destination_frame is None else destination_frame,
"source_frame": data.frame_id,
"factor_columns": [quoted(f) for f in factors],
"pairwise": pairwise,
"max_factors": max_factors,
"min_occurrence": min_occurrence,
}
H2OJob(api("POST /3/Interaction", data=parms), "Interactions").poll()
return get_frame(parms["dest"])
def as_list(data, use_pandas=True, header=True):
"""
Convert an H2O data object into a python-specific object.
WARNING! This will pull all data local!
If Pandas is available (and use_pandas is True), then pandas will be used to parse the
data frame. Otherwise, a list-of-lists populated by character data will be returned (so
the types of data will all be str).
:param data: an H2O data object.
:param use_pandas: If True, try to use pandas for reading in the data.
:param header: If True, return column names as first element in list
:returns: List of lists (Rows x Columns).
"""
assert_is_type(data, H2OFrame)
assert_is_type(use_pandas, bool)
assert_is_type(header, bool)
return H2OFrame.as_data_frame(data, use_pandas=use_pandas, header=header)
def demo(funcname, interactive=True, echo=True, test=False):
"""
H2O built-in demo facility.
:param funcname: A string that identifies the h2o python function to demonstrate.
:param interactive: If True, the user will be prompted to continue the demonstration after every segment.
:param echo: If True, the python commands that are executed will be displayed.
:param test: If True, `h2o.init()` will not be called (used for pyunit testing).
:example:
>>> import h2o
>>> h2o.demo("gbm")
"""
import h2o.demos as h2odemo
assert_is_type(funcname, str)
assert_is_type(interactive, bool)
assert_is_type(echo, bool)
assert_is_type(test, bool)
demo_function = getattr(h2odemo, funcname, None)
if demo_function and type(demo_function) is type(demo):
demo_function(interactive, echo, test)
else:
print("Demo for %s is not available." % funcname)
def load_dataset(relative_path):
"""Imports a data file within the 'h2o_data' folder."""
assert_is_type(relative_path, str)
h2o_dir = os.path.split(__file__)[0]
for possible_file in [os.path.join(h2o_dir, relative_path),
os.path.join(h2o_dir, "h2o_data", relative_path),
os.path.join(h2o_dir, "h2o_data", relative_path + ".csv")]:
if os.path.exists(possible_file):
return upload_file(possible_file)
# File not found -- raise an error!
raise H2OValueError("Data file %s cannot be found" % relative_path)
def make_metrics(predicted, actual, domain=None, distribution=None):
"""
Create Model Metrics from predicted and actual values in H2O.
:param H2OFrame predicted: an H2OFrame containing predictions.
:param H2OFrame actuals: an H2OFrame containing actual values.
:param domain: list of response factors for classification.
:param distribution: distribution for regression.
"""
assert_is_type(predicted, H2OFrame)
assert_is_type(actual, H2OFrame)
# assert predicted.ncol == 1, "`predicted` frame should have exactly 1 column"
assert actual.ncol == 1, "`actual` frame should have exactly 1 column"
assert_is_type(distribution, str, None)
assert_satisfies(actual.ncol, actual.ncol == 1)
if domain is None and any(actual.isfactor()):
domain = actual.levels()[0]
res = api("POST /3/ModelMetrics/predictions_frame/%s/actuals_frame/%s" % (predicted.frame_id, actual.frame_id),
data={"domain": domain, "distribution": distribution})
return res["model_metrics"]
def flow():
"""
Open H2O Flow in your browser.
"""
webbrowser.open(connection().base_url, new = 1)
def _put_key(file_path, dest_key=None, overwrite=True):
"""
Upload given file into DKV and save it under give key as raw object.
:param dest_key: name of destination key in DKV
:param file_path: path to file to upload
:return: key name if object was uploaded successfully
"""
ret = api("POST /3/PutKey?destination_key={}&overwrite={}".format(dest_key if dest_key else '', overwrite),
filename=file_path)
return ret["destination_key"]
def _create_zip_file(dest_filename, *content_list):
from .utils.shared_utils import InMemoryZipArch
with InMemoryZipArch(dest_filename) as zip_arch:
for filename, file_content in content_list:
zip_arch.append(filename, file_content)
return dest_filename
def _default_source_provider(obj):
import inspect
# First try to get source code via inspect
try:
return ' '.join(inspect.getsourcelines(obj)[0])
except (OSError, TypeError):
# It seems like we are in interactive shell and
# we do not have access to class source code directly
# At this point we can:
# (1) get IPython history and find class definition, or
# (2) compose body of class from methods, since it is still possible to get
# method body
class_def = "class {}:\n".format(obj.__name__)
for name, member in inspect.getmembers(obj):
if inspect.ismethod(member):
class_def += inspect.getsource(member)
return class_def
def upload_custom_metric(func, func_file="metrics.py", func_name=None, class_name=None, source_provider=None):
"""
Upload given metrics function into H2O cluster.
The metrics can have different representation:
- method
- class: needs to inherit from water.udf.CFunc2 and implement method apply(actual, predict)
returning double
- string: the same as in class case, but the class is given as a string
:param func: metrics representation: string, class, function
:param func_file: internal name of file to save given metrics representation
:param func_name: name for h2o key under which the given metric is saved
:param class_name: name of class wrapping the metrics function
:param source_provider: a function which provides a source code for given function
:return: reference to uploaded metrics function
"""
import tempfile
import inspect
# Use default source provider
if not source_provider:
source_provider = _default_source_provider
# The template wraps given metrics representation
_CFUNC_CODE_TEMPLATE = """# Generated code
import water.udf.CMetricFunc as MetricFunc
# User given metric function as a class implementing
# 3 methods defined by interface CMetricFunc
{}
# Generated user metric which satisfies the interface
# of Java MetricFunc
class {}Wrapper({}, MetricFunc, object):
pass
"""
assert_satisfies(func, inspect.isclass(func) or isinstance(func, str),
"The argument func needs to be string or class !")
assert_satisfies(func_file, func_file is not None,
"The argument func_file is missing!")
assert_satisfies(func_file, func_file.endswith('.py'),
"The argument func_file needs to end with '.py'")
code = None
derived_func_name = None
module_name = func_file[:-3]
if isinstance(func, str):
assert_satisfies(class_name, class_name is not None,
"The argument class_name is missing! " +
"It needs to reference the class in given string!")
derived_func_name = "metrics_{}".format(class_name)
code = str
else:
assert_satisfies(func, inspect.isclass(func), "The parameter `func` should be str or class")
for method in ['map', 'reduce', 'metric']:
assert_satisfies(func, method in func.__dict__, "The class `func` needs to define method `{}`".format(method))
assert_satisfies(class_name, class_name is None,
"If class is specified then class_name parameter needs to be None")
class_name = "{}.{}Wrapper".format(module_name, func.__name__)
derived_func_name = "metrics_{}".format(func.__name__)
code = _CFUNC_CODE_TEMPLATE.format(source_provider(func), func.__name__, func.__name__)
# If the func name is not given, use whatever we can derived from given definition
if not func_name:
func_name = derived_func_name
# Saved into jar file
tmpdir = tempfile.mkdtemp(prefix="h2o-func")
func_arch_file = _create_zip_file("{}/func.jar".format(tmpdir), (func_file, code))
# Upload into K/V
dest_key = _put_key(func_arch_file, dest_key=func_name)
# Reference
return "python:{}={}".format(dest_key, class_name)
#-----------------------------------------------------------------------------------------------------------------------
# Private
#-----------------------------------------------------------------------------------------------------------------------
def _check_connection():
if not h2oconn or not h2oconn.cluster:
raise H2OConnectionError("Not connected to a cluster. Did you run `h2o.connect()`?")
def _connect_with_conf(conn_conf):
conf = conn_conf
if isinstance(conn_conf, dict):
conf = H2OConnectionConf(config=conn_conf)
assert_is_type(conf, H2OConnectionConf)
return connect(url = conf.url, verify_ssl_certificates = conf.verify_ssl_certificates,
auth = conf.auth, proxy = conf.proxy,cookies = conf.cookies, verbose = conf.verbose)
#-----------------------------------------------------------------------------------------------------------------------
# ALL DEPRECATED METHODS BELOW
#-----------------------------------------------------------------------------------------------------------------------
# Deprecated since 2015-10-08
@deprecated("Deprecated, use ``h2o.import_file()``.")
def import_frame():
"""Deprecated."""
import_file()
# Deprecated since 2015-10-08
@deprecated("Deprecated (converted to a private method).")
def parse():
"""Deprecated."""
pass
# Deprecated since 2016-08-04
@deprecated("Deprecated, use ``h2o.cluster().show_status()``.")
def cluster_info():
"""Deprecated."""
_check_connection()
cluster().show_status()
# Deprecated since 2016-08-04
@deprecated("Deprecated, use ``h2o.cluster().show_status(True)``.")
def cluster_status():
"""Deprecated."""
_check_connection()
cluster().show_status(True)
# Deprecated since 2016-08-04
@deprecated("Deprecated, use ``h2o.cluster().shutdown()``.")
def shutdown(prompt=False):
"""Deprecated."""
_check_connection()
cluster().shutdown(prompt)
# Deprecated since 2016-08-04
@deprecated("Deprecated, use ``h2o.cluster().network_test()``.")
def network_test():
"""Deprecated."""
_check_connection()
cluster().network_test()
# Deprecated since 2016-08-04
@deprecated("Deprecated, use ``h2o.cluster().timezone``.")
def get_timezone():
"""Deprecated."""
_check_connection()
return cluster().timezone
# Deprecated since 2016-08-04
@deprecated("Deprecated, set ``h2o.cluster().timezone`` instead.")
def set_timezone(value):
"""Deprecated."""
_check_connection()
cluster().timezone = value
# Deprecated since 2016-08-04
@deprecated("Deprecated, use ``h2o.cluster().list_timezones()``.")
def list_timezones():
"""Deprecated."""
_check_connection()
return cluster().list_timezones()
|
h2oai/h2o-dev
|
h2o-py/h2o/h2o.py
|
Python
|
apache-2.0
| 72,871
|
import json
import re
import subprocess
from django.conf import settings
default_app_config = "peering.apps.PeeringConfig"
def call_irr_as_set_resolver(irr_as_set, address_family=6):
"""
Call a subprocess to expand the given AS-SET for an IP version.
"""
prefixes = []
if not irr_as_set:
return prefixes
# Call bgpq3 with arguments to get a JSON result
command = [
settings.BGPQ3_PATH,
"-h",
settings.BGPQ3_HOST,
"-S",
settings.BGPQ3_SOURCES,
"-{}".format(address_family),
"-A",
"-j",
"-l",
"prefix_list",
irr_as_set,
]
# Merge user settings to command line right before the name of the prefix list
if settings.BGPQ3_ARGS:
index = len(command) - 3
command[index:index] = settings.BGPQ3_ARGS[
"ipv6" if address_family == 6 else "ipv4"
]
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate()
if process.returncode != 0:
error_log = "bgpq3 exit code is {}".format(process.returncode)
if err and err.strip():
error_log += ", stderr: {}".format(err)
raise ValueError(error_log)
prefixes.extend([p for p in json.loads(out.decode())["prefix_list"]])
return prefixes
def parse_irr_as_set(asn, irr_as_set):
"""
Validate that an AS-SET is usable and split it into smaller part if it is actually
composed of several AS-SETs.
"""
as_sets = []
# Can't work with empty or whitespace only AS-SET
if not irr_as_set or not irr_as_set.strip():
return ["AS{}".format(asn)]
unparsed = re.split(r"[/,&\s]", irr_as_set)
for value in unparsed:
value = value.strip()
if not value:
continue
for regexp in [
# Remove registry prefix if any
r"^(?:{}):[:\s]".format(settings.BGPQ3_SOURCES.replace(",", "|")),
# Removing "ipv4:" and "ipv6:"
r"^(?:ipv4|ipv6):",
]:
pattern = re.compile(regexp, flags=re.IGNORECASE)
value, number_of_subs_made = pattern.subn("", value)
# If some substitutions have been made, make sure to clean things up
if number_of_subs_made > 0:
value = value.strip()
as_sets.append(value)
return as_sets
|
respawner/peering-manager
|
peering/__init__.py
|
Python
|
apache-2.0
| 2,416
|
class Solution(object):
def constructRectangle(self, area):
"""
:type area: int
:rtype: List[int]
"""
ans = None
W = 1
while W * W <= area:
if area % W == 0:
ans = [area / W, W]
W += 1
return ans
|
ckclark/leetcode
|
py/construct-the-rectangle.py
|
Python
|
apache-2.0
| 304
|
# coding: utf-8
"""
Wavefront REST API
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import wavefront_api_client
from wavefront_api_client.api.source_api import SourceApi # noqa: E501
from wavefront_api_client.rest import ApiException
class TestSourceApi(unittest.TestCase):
"""SourceApi unit test stubs"""
def setUp(self):
self.api = wavefront_api_client.api.source_api.SourceApi() # noqa: E501
def tearDown(self):
pass
def test_add_source_tag(self):
"""Test case for add_source_tag
Add a tag to a specific source # noqa: E501
"""
pass
def test_create_source(self):
"""Test case for create_source
Create metadata (description or tags) for a specific source # noqa: E501
"""
pass
def test_delete_source(self):
"""Test case for delete_source
Delete metadata (description and tags) for a specific source # noqa: E501
"""
pass
def test_get_all_source(self):
"""Test case for get_all_source
Get all sources for a customer # noqa: E501
"""
pass
def test_get_source(self):
"""Test case for get_source
Get a specific source for a customer # noqa: E501
"""
pass
def test_get_source_tags(self):
"""Test case for get_source_tags
Get all tags associated with a specific source # noqa: E501
"""
pass
def test_remove_description(self):
"""Test case for remove_description
Remove description from a specific source # noqa: E501
"""
pass
def test_remove_source_tag(self):
"""Test case for remove_source_tag
Remove a tag from a specific source # noqa: E501
"""
pass
def test_set_description(self):
"""Test case for set_description
Set description associated with a specific source # noqa: E501
"""
pass
def test_set_source_tags(self):
"""Test case for set_source_tags
Set all tags associated with a specific source # noqa: E501
"""
pass
def test_update_source(self):
"""Test case for update_source
Update metadata (description or tags) for a specific source. # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
|
wavefrontHQ/python-client
|
test/test_source_api.py
|
Python
|
apache-2.0
| 2,945
|
#
# Copyright (c) 2001 - 2015 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# TODO:
# * supported arch for versions: for old versions of batch file without
# argument, giving bogus argument cannot be detected, so we have to hardcode
# this here
# * print warning when msvc version specified but not found
# * find out why warning do not print
# * test on 64 bits XP + VS 2005 (and VS 6 if possible)
# * SDK
# * Assembly
__revision__ = "src/engine/SCons/Tool/MSCommon/vc.py rel_2.3.5:3329:275e75118ad4 2015/06/20 11:18:26 bdbaddog"
__doc__ = """Module for Visual C/C++ detection and configuration.
"""
import SCons.compat
import os
import platform
from string import digits as string_digits
import SCons.Warnings
import common
debug = common.debug
import sdk
get_installed_sdks = sdk.get_installed_sdks
class VisualCException(Exception):
pass
class UnsupportedVersion(VisualCException):
pass
class UnsupportedArch(VisualCException):
pass
class MissingConfiguration(VisualCException):
pass
class NoVersionFound(VisualCException):
pass
class BatchFileExecutionError(VisualCException):
pass
# Dict to 'canonalize' the arch
_ARCH_TO_CANONICAL = {
"amd64" : "amd64",
"emt64" : "amd64",
"i386" : "x86",
"i486" : "x86",
"i586" : "x86",
"i686" : "x86",
"ia64" : "ia64",
"itanium" : "ia64",
"x86" : "x86",
"x86_64" : "amd64",
"x86_amd64" : "x86_amd64", # Cross compile to 64 bit from 32bits
}
# Given a (host, target) tuple, return the argument for the bat file. Both host
# and targets should be canonalized.
_HOST_TARGET_ARCH_TO_BAT_ARCH = {
("x86", "x86"): "x86",
("x86", "amd64"): "x86_amd64",
("x86", "x86_amd64"): "x86_amd64",
("amd64", "x86_amd64"): "x86_amd64", # This is present in (at least) VS2012 express
("amd64", "amd64"): "amd64",
("amd64", "x86"): "x86",
("x86", "ia64"): "x86_ia64"
}
def get_host_target(env):
debug('vc.py:get_host_target()')
host_platform = env.get('HOST_ARCH')
if not host_platform:
host_platform = platform.machine()
# TODO(2.5): the native Python platform.machine() function returns
# '' on all Python versions before 2.6, after which it also uses
# PROCESSOR_ARCHITECTURE.
if not host_platform:
host_platform = os.environ.get('PROCESSOR_ARCHITECTURE', '')
# Retain user requested TARGET_ARCH
req_target_platform = env.get('TARGET_ARCH')
debug('vc.py:get_host_target() req_target_platform:%s'%req_target_platform)
if req_target_platform:
# If user requested a specific platform then only try that one.
target_platform = req_target_platform
else:
target_platform = host_platform
try:
host = _ARCH_TO_CANONICAL[host_platform.lower()]
except KeyError, e:
msg = "Unrecognized host architecture %s"
raise ValueError(msg % repr(host_platform))
try:
target = _ARCH_TO_CANONICAL[target_platform.lower()]
except KeyError, e:
all_archs = str(_ARCH_TO_CANONICAL.keys())
raise ValueError("Unrecognized target architecture %s\n\tValid architectures: %s" % (target_platform, all_archs))
return (host, target,req_target_platform)
# If you update this, update SupportedVSList in Tool/MSCommon/vs.py, and the
# MSVC_VERSION documentation in Tool/msvc.xml.
_VCVER = ["15.0", "14.0", "14.0Exp", "12.0", "12.0Exp", "11.0", "11.0Exp", "10.0", "10.0Exp", "9.0", "9.0Exp","8.0", "8.0Exp","7.1", "7.0", "6.0"]
_VCVER_TO_PRODUCT_DIR = {
'15.0' : [
r'Microsoft\VisualStudio\SxS\VS7\15.0'],
'14.0' : [
r'Microsoft\VisualStudio\14.0\Setup\VC\ProductDir'],
'14.0' : [
r'Microsoft\VisualStudio\14.0\Setup\VC\ProductDir'],
'12.0' : [
r'Microsoft\VisualStudio\12.0\Setup\VC\ProductDir'],
'12.0Exp' : [
r'Microsoft\VCExpress\12.0\Setup\VC\ProductDir'],
'11.0': [
r'Microsoft\VisualStudio\11.0\Setup\VC\ProductDir'],
'11.0Exp' : [
r'Microsoft\VCExpress\11.0\Setup\VC\ProductDir'],
'10.0': [
r'Microsoft\VisualStudio\10.0\Setup\VC\ProductDir'],
'10.0Exp' : [
r'Microsoft\VCExpress\10.0\Setup\VC\ProductDir'],
'9.0': [
r'Microsoft\VisualStudio\9.0\Setup\VC\ProductDir'],
'9.0Exp' : [
r'Microsoft\VCExpress\9.0\Setup\VC\ProductDir'],
'8.0': [
r'Microsoft\VisualStudio\8.0\Setup\VC\ProductDir'],
'8.0Exp': [
r'Microsoft\VCExpress\8.0\Setup\VC\ProductDir'],
'7.1': [
r'Microsoft\VisualStudio\7.1\Setup\VC\ProductDir'],
'7.0': [
r'Microsoft\VisualStudio\7.0\Setup\VC\ProductDir'],
'6.0': [
r'Microsoft\VisualStudio\6.0\Setup\Microsoft Visual C++\ProductDir']
}
def msvc_version_to_maj_min(msvc_version):
msvc_version_numeric = ''.join([x for x in msvc_version if x in string_digits + '.'])
t = msvc_version_numeric.split(".")
if not len(t) == 2:
raise ValueError("Unrecognized version %s (%s)" % (msvc_version,msvc_version_numeric))
try:
maj = int(t[0])
min = int(t[1])
return maj, min
except ValueError, e:
raise ValueError("Unrecognized version %s (%s)" % (msvc_version,msvc_version_numeric))
def is_host_target_supported(host_target, msvc_version):
"""Return True if the given (host, target) tuple is supported given the
msvc version.
Parameters
----------
host_target: tuple
tuple of (canonalized) host-target, e.g. ("x86", "amd64") for cross
compilation from 32 bits windows to 64 bits.
msvc_version: str
msvc version (major.minor, e.g. 10.0)
Note
----
This only check whether a given version *may* support the given (host,
target), not that the toolchain is actually present on the machine.
"""
# We assume that any Visual Studio version supports x86 as a target
if host_target[1] != "x86":
maj, min = msvc_version_to_maj_min(msvc_version)
if maj < 8:
return False
return True
def find_vc_pdir(msvc_version):
"""Try to find the product directory for the given
version.
Note
----
If for some reason the requested version could not be found, an
exception which inherits from VisualCException will be raised."""
root = 'Software\\'
if common.is_win64():
root = root + 'Wow6432Node\\'
try:
hkeys = _VCVER_TO_PRODUCT_DIR[msvc_version]
except KeyError:
debug("Unknown version of MSVC: %s" % msvc_version)
raise UnsupportedVersion("Unknown version %s" % msvc_version)
for key in hkeys:
key = root + key
try:
comps = common.read_reg(key)
except WindowsError, e:
debug('find_vc_dir(): no VC registry key %s' % repr(key))
else:
debug('find_vc_dir(): found VC in registry: %s' % comps)
if msvc_version == "15.0":
comps = os.path.join(comps, "VC")
if os.path.exists(comps):
return comps
else:
debug('find_vc_dir(): reg says dir is %s, but it does not exist. (ignoring)'\
% comps)
raise MissingConfiguration("registry dir %s not found on the filesystem" % comps)
return None
def find_batch_file(env,msvc_version,host_arch,target_arch):
"""
Find the location of the batch script which should set up the compiler
for any TARGET_ARCH whose compilers were installed by Visual Studio/VCExpress
"""
pdir = find_vc_pdir(msvc_version)
if pdir is None:
raise NoVersionFound("No version of Visual Studio found")
debug('vc.py: find_batch_file() pdir:%s'%pdir)
# filter out e.g. "Exp" from the version name
msvc_ver_numeric = ''.join([x for x in msvc_version if x in string_digits + "."])
vernum = float(msvc_ver_numeric)
if 7 <= vernum < 8:
pdir = os.path.join(pdir, os.pardir, "Common7", "Tools")
batfilename = os.path.join(pdir, "vsvars32.bat")
elif vernum < 7:
pdir = os.path.join(pdir, "Bin")
batfilename = os.path.join(pdir, "vcvars32.bat")
elif vernum >= 15:
pdir = os.path.join(pdir, "Auxiliary", "Build")
batfilename = os.path.join(pdir, "vcvarsall.bat")
else: # >= 8
batfilename = os.path.join(pdir, "vcvarsall.bat")
if not os.path.exists(batfilename):
debug("Not found: %s" % batfilename)
batfilename = None
installed_sdks=get_installed_sdks()
for _sdk in installed_sdks:
sdk_bat_file = _sdk.get_sdk_vc_script(host_arch,target_arch)
if not sdk_bat_file:
debug("vc.py:find_batch_file() not found:%s"%_sdk)
else:
sdk_bat_file_path = os.path.join(pdir,sdk_bat_file)
if os.path.exists(sdk_bat_file_path):
debug('vc.py:find_batch_file() sdk_bat_file_path:%s'%sdk_bat_file_path)
return (batfilename,sdk_bat_file_path)
return (batfilename,None)
__INSTALLED_VCS_RUN = None
def cached_get_installed_vcs():
global __INSTALLED_VCS_RUN
if __INSTALLED_VCS_RUN is None:
ret = get_installed_vcs()
__INSTALLED_VCS_RUN = ret
return __INSTALLED_VCS_RUN
def get_installed_vcs():
installed_versions = []
for ver in _VCVER:
debug('trying to find VC %s' % ver)
try:
if find_vc_pdir(ver):
debug('found VC %s' % ver)
installed_versions.append(ver)
else:
debug('find_vc_pdir return None for ver %s' % ver)
except VisualCException, e:
debug('did not find VC %s: caught exception %s' % (ver, str(e)))
return installed_versions
def reset_installed_vcs():
"""Make it try again to find VC. This is just for the tests."""
__INSTALLED_VCS_RUN = None
# Running these batch files isn't cheap: most of the time spent in
# msvs.generate() is due to vcvars*.bat. In a build that uses "tools='msvs'"
# in multiple environments, for example:
# env1 = Environment(tools='msvs')
# env2 = Environment(tools='msvs')
# we can greatly improve the speed of the second and subsequent Environment
# (or Clone) calls by memoizing the environment variables set by vcvars*.bat.
script_env_stdout_cache = {}
def script_env(script, args=None):
cache_key = (script, args)
stdout = script_env_stdout_cache.get(cache_key, None)
if stdout is None:
stdout = common.get_output(script, args)
script_env_stdout_cache[cache_key] = stdout
# Stupid batch files do not set return code: we take a look at the
# beginning of the output for an error message instead
olines = stdout.splitlines()
if olines[0].startswith("The specified configuration type is missing"):
raise BatchFileExecutionError("\n".join(olines[:2]))
return common.parse_output(stdout)
def get_default_version(env):
debug('get_default_version()')
msvc_version = env.get('MSVC_VERSION')
msvs_version = env.get('MSVS_VERSION')
debug('get_default_version(): msvc_version:%s msvs_version:%s'%(msvc_version,msvs_version))
if msvs_version and not msvc_version:
SCons.Warnings.warn(
SCons.Warnings.DeprecatedWarning,
"MSVS_VERSION is deprecated: please use MSVC_VERSION instead ")
return msvs_version
elif msvc_version and msvs_version:
if not msvc_version == msvs_version:
SCons.Warnings.warn(
SCons.Warnings.VisualVersionMismatch,
"Requested msvc version (%s) and msvs version (%s) do " \
"not match: please use MSVC_VERSION only to request a " \
"visual studio version, MSVS_VERSION is deprecated" \
% (msvc_version, msvs_version))
return msvs_version
if not msvc_version:
installed_vcs = cached_get_installed_vcs()
debug('installed_vcs:%s' % installed_vcs)
if not installed_vcs:
#msg = 'No installed VCs'
#debug('msv %s\n' % repr(msg))
#SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, msg)
debug('msvc_setup_env: No installed VCs')
return None
msvc_version = installed_vcs[0]
debug('msvc_setup_env: using default installed MSVC version %s\n' % repr(msvc_version))
return msvc_version
def msvc_setup_env_once(env):
try:
has_run = env["MSVC_SETUP_RUN"]
except KeyError:
has_run = False
if not has_run:
msvc_setup_env(env)
env["MSVC_SETUP_RUN"] = True
def msvc_find_valid_batch_script(env,version):
debug('vc.py:msvc_find_valid_batch_script()')
# Find the host platform, target platform, and if present the requested
# target platform
(host_platform, target_platform,req_target_platform) = get_host_target(env)
try_target_archs = [target_platform]
debug("msvs_find_valid_batch_script(): req_target_platform %s target_platform:%s"%(req_target_platform,target_platform))
# VS2012 has a "cross compile" environment to build 64 bit
# with x86_amd64 as the argument to the batch setup script
if req_target_platform in ('amd64','x86_64'):
try_target_archs.append('x86_amd64')
elif not req_target_platform and target_platform in ['amd64','x86_64']:
# There may not be "native" amd64, but maybe "cross" x86_amd64 tools
try_target_archs.append('x86_amd64')
# If the user hasn't specifically requested a TARGET_ARCH, and
# The TARGET_ARCH is amd64 then also try 32 bits if there are no viable
# 64 bit tools installed
try_target_archs.append('x86')
debug("msvs_find_valid_batch_script(): host_platform: %s try_target_archs:%s"%(host_platform, try_target_archs))
d = None
for tp in try_target_archs:
# Set to current arch.
env['TARGET_ARCH']=tp
debug("vc.py:msvc_find_valid_batch_script() trying target_platform:%s"%tp)
host_target = (host_platform, tp)
if not is_host_target_supported(host_target, version):
warn_msg = "host, target = %s not supported for MSVC version %s" % \
(host_target, version)
SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg)
arg = _HOST_TARGET_ARCH_TO_BAT_ARCH[host_target]
# Try to locate a batch file for this host/target platform combo
try:
(vc_script,sdk_script) = find_batch_file(env,version,host_platform,tp)
debug('vc.py:msvc_find_valid_batch_script() vc_script:%s sdk_script:%s'%(vc_script,sdk_script))
except VisualCException, e:
msg = str(e)
debug('Caught exception while looking for batch file (%s)' % msg)
warn_msg = "VC version %s not installed. " + \
"C/C++ compilers are most likely not set correctly.\n" + \
" Installed versions are: %s"
warn_msg = warn_msg % (version, cached_get_installed_vcs())
SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg)
continue
# Try to use the located batch file for this host/target platform combo
debug('vc.py:msvc_find_valid_batch_script() use_script 2 %s, args:%s\n' % (repr(vc_script), arg))
if vc_script:
try:
d = script_env(vc_script, args=arg)
except BatchFileExecutionError, e:
debug('vc.py:msvc_find_valid_batch_script() use_script 3: failed running VC script %s: %s: Error:%s'%(repr(vc_script),arg,e))
vc_script=None
continue
if not vc_script and sdk_script:
debug('vc.py:msvc_find_valid_batch_script() use_script 4: trying sdk script: %s'%(sdk_script))
try:
d = script_env(sdk_script)
except BatchFileExecutionError,e:
debug('vc.py:msvc_find_valid_batch_script() use_script 5: failed running SDK script %s: Error:%s'%(repr(sdk_script),e))
continue
elif not vc_script and not sdk_script:
debug('vc.py:msvc_find_valid_batch_script() use_script 6: Neither VC script nor SDK script found')
continue
debug("vc.py:msvc_find_valid_batch_script() Found a working script/target: %s %s"%(repr(sdk_script),arg))
break # We've found a working target_platform, so stop looking
# If we cannot find a viable installed compiler, reset the TARGET_ARCH
# To it's initial value
if not d:
env['TARGET_ARCH']=req_target_platform
return d
def msvc_setup_env(env):
debug('msvc_setup_env()')
version = get_default_version(env)
if version is None:
warn_msg = "No version of Visual Studio compiler found - C/C++ " \
"compilers most likely not set correctly"
# Nuitka: Useless warning for us.
# SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg)
return None
debug('msvc_setup_env: using specified MSVC version %s\n' % repr(version))
# XXX: we set-up both MSVS version for backward
# compatibility with the msvs tool
env['MSVC_VERSION'] = version
env['MSVS_VERSION'] = version
env['MSVS'] = {}
use_script = env.get('MSVC_USE_SCRIPT', True)
if SCons.Util.is_String(use_script):
debug('vc.py:msvc_setup_env() use_script 1 %s\n' % repr(use_script))
d = script_env(use_script)
elif use_script:
d = msvc_find_valid_batch_script(env,version)
debug('vc.py:msvc_setup_env() use_script 2 %s\n' % d)
if not d:
return d
else:
debug('MSVC_USE_SCRIPT set to False')
warn_msg = "MSVC_USE_SCRIPT set to False, assuming environment " \
"set correctly."
# Nuitka: We use this on purpose.
# SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg)
return None
for k, v in d.items():
debug('vc.py:msvc_setup_env() env:%s -> %s'%(k,v))
env.PrependENVPath(k, v, delete_existing=True)
def msvc_exists(version=None):
vcs = cached_get_installed_vcs()
if version is None:
return len(vcs) > 0
return version in vcs
|
kayhayen/Nuitka
|
nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Tool/MSCommon/vc.py
|
Python
|
apache-2.0
| 19,499
|
from flask import render_template, flash, request, redirect, url_for
from flask_login import login_required
from kernel import agileCalendar
from kernel.DataBoard import Data
from kernel.NM_Aggregates import WorkBacklog, DevBacklog, RiskBacklog
from kconfig import coordinationBookByName
from . import coordination
__author__ = 'Manuel Escriche'
@coordination.route("/")
@coordination.route("/overview")
@login_required
def overview():
return redirect(url_for('coordination.delivery'))
@coordination.route("/success-stories")
@login_required
def success_stories():
cmp = coordinationBookByName['SuccessStories']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/success_stories.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/friendliness")
@login_required
def friendliness():
cmp = coordinationBookByName['Friendliness']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/friendliness.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/qualityassurance")
@login_required
def qualityassurance():
cmp = coordinationBookByName['QualityAssurance']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/quality_assurance.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/issues")
@login_required
def issues():
cmp = coordinationBookByName['Issues']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/issues.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/risks")
@login_required
def risks():
cmp = coordinationBookByName['Risks']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/risks.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/delivery")
@login_required
def delivery():
cmp = coordinationBookByName['Deliverables']
backlog = WorkBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/delivery.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/docs")
@login_required
def docs():
cmp = coordinationBookByName['Documentation']
backlog = WorkBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/docs.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/agile")
@login_required
def agile():
cmp = coordinationBookByName['Agile']
backlog = WorkBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/agile.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/scrum-master")
@login_required
def scrumtools():
cmp = coordinationBookByName['SMTools']
backlog = DevBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/scrum_tools.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
|
flopezag/fiware-backlog
|
app/coordination/views.py
|
Python
|
apache-2.0
| 6,105
|
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for custom landing pages."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
from core.tests import test_utils
import feconf
class FractionLandingRedirectPageTest(test_utils.GenericTestBase):
"""Test for redirecting landing page for fractions."""
def test_old_fractions_landing_url_without_viewer_type(self):
"""Test to validate the old Fractions landing url without viewerType
redirects to the new Fractions landing url.
"""
response = self.get_html_response(
feconf.FRACTIONS_LANDING_PAGE_URL, expected_status_int=302)
self.assertEqual(
'http://localhost/math/fractions',
response.headers['location'])
def test_old_fraction_landing_url_with_viewer_type(self):
"""Test to validate the old Fractions landing url with viewerType
redirects to the new Fractions landing url.
"""
response = self.get_html_response(
'%s?viewerType=student' % feconf.FRACTIONS_LANDING_PAGE_URL,
expected_status_int=302)
self.assertEqual(
'http://localhost/math/fractions',
response.headers['location'])
class TopicLandingRedirectPageTest(test_utils.GenericTestBase):
"""Test for redirecting the old landing page URL to the new one."""
def test_old_topic_url_redirect(self):
response = self.get_html_response(
'/learn/maths/fractions', expected_status_int=302)
self.assertEqual(
'http://localhost/math/fractions', response.headers['location'])
class TopicLandingPageTest(test_utils.GenericTestBase):
"""Test for showing landing pages."""
def test_valid_subject_and_topic_loads_correctly(self):
response = self.get_html_response('/math/fractions')
response.mustcontain('<topic-landing-page></topic-landing-page>')
class StewardsLandingPageTest(test_utils.GenericTestBase):
"""Test for showing the landing page for stewards (parents, teachers,
volunteers, or NGOs).
"""
def test_nonprofits_landing_page(self):
response = self.get_html_response(
feconf.CUSTOM_NONPROFITS_LANDING_PAGE_URL)
response.mustcontain(
'<stewards-landing-page></stewards-landing-page>')
def test_parents_landing_page(self):
response = self.get_html_response(
feconf.CUSTOM_PARENTS_LANDING_PAGE_URL)
response.mustcontain(
'<stewards-landing-page></stewards-landing-page>')
def test_teachers_landing_page(self):
response = self.get_html_response(
feconf.CUSTOM_TEACHERS_LANDING_PAGE_URL)
response.mustcontain('<stewards-landing-page></stewards-landing-page>')
def test_volunteers_landing_page(self):
response = self.get_html_response(
feconf.CUSTOM_VOLUNTEERS_LANDING_PAGE_URL)
response.mustcontain('<stewards-landing-page></stewards-landing-page>')
|
prasanna08/oppia
|
core/controllers/custom_landing_pages_test.py
|
Python
|
apache-2.0
| 3,649
|
import pytest
from tests.functional.services.api.images import (
add_image,
delete_image_by_id,
get_image_id,
wait_for_image_to_analyze,
)
from tests.functional.services.utils.http_utils import get_api_conf
@pytest.fixture(scope="package")
def add_image_with_teardown_package_scope(request):
def _add_image_with_teardown(tag, api_conf=get_api_conf):
# add image
add_resp = add_image(tag, api_conf)
image_id = get_image_id(add_resp)
wait_for_image_to_analyze(image_id, api_conf)
# add teardown
request.addfinalizer(lambda: delete_image_by_id(image_id, api_conf))
return add_resp
return _add_image_with_teardown
|
anchore/anchore-engine
|
tests/functional/conftest.py
|
Python
|
apache-2.0
| 696
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Performance analyzer constants."""
DISPLAY_COLUMNS = [
{
'name': 'type',
'title': 'Issue type',
'tooltip': 'Type of issue affecting the fuzz target.'
},
{
'name': 'percent',
'title': 'Percent runs affected',
'tooltip': 'Percentage of fuzz target runs impacted by this issue.'
},
{
'name': 'score',
'title': 'Priority score (experimental)',
'tooltip': 'Feature indicating the priority of this issue.'
},
{
'name': 'examples',
'title': 'Log examples',
'tooltip': 'Sample logs showing this issue.'
},
{
'name': 'solutions',
'title': 'Recommended solutions',
'tooltip': 'Possible solutions to fix this issue.'
},
]
ISSUE_TYPE_SOLUTIONS_MAP = {
'bad_instrumentation':
"""The fuzz target has been built incorrectly. Fuzzing engine has not
detected coverage information, so most likely coverage flags (i.e.
`-fsanitize-coverage`) have not been properly used during compilation.).""",
'coverage':
"""The fuzz target cannot find new 'interesting' inputs and hence
unable to cover new code. There are several ways to improve code coverage:<br/>
- Add a new dictionary or update existing ones with new strings.<br/>
- Add new testcases to the corpus (these can be manually generated, used from
unit tests, valid files, traffic streams, etc depending on the target).<br/>
- Update the target function to use different combinations of flags passed to
the target.<br/>
- Check `max_len` value, may be it is not appropriate for the target (too big
for some data which cannot be too big, or too small for some data which
cannot be too small).""",
'crash':
"""The fuzz target crashes frequently. You need to fix these crashers
first so that fuzzing can be efficient and explore new code and crashes.""",
'leak':
"""The fuzz target is leaking memory often. You need to fix these leaks
first so that fuzzing can be efficient and not crash on out-of-memory. If these
leaks are false positives, you can suppress them using LeakSanitizer
suppressions.""",
'logging':
"""The fuzz target writes too many log messages (either stdout or
stderr). Excessive logging is extremely detrimental to efficient fuzzing.
Most targets support different levels of logging for a target. You need to
modify the target function or compilation flags to use the lowest level of
logging verbosity.<br/>
If target does not provide a way to control logging levels or to disable
logging in any other possible way, you can use `-close_fd_mask` option of
libFuzzer.""",
'none':
"""The fuzz target is working well. No issues were detected.""",
'oom':
"""The fuzz target hits out-of-memory errors. It may be caused by a
valid input (e.g. a large array allocation). In that case, you need to
implement a workaround to avoid generation of such testcases. Or the target
function could be leaking memory, so you need to fix those memory leak
crashes.""",
'slow_unit':
"""The target spends several seconds on a single input. It can a bug in
the target, so you need to profile whether this is a real bug in the target.
For some cases, lowering `max_len` option may help to avoid slow units
(e.g. regexp processing time increases exponentially with larger inputs).""",
'speed':
"""Execution speed is one of the most important factors for efficient
fuzzing. You need to optimize the target function so that the execution speed
is at least 1,000 testcases per second.""",
'startup_crash':
"""The fuzz target does not work and crashes instantly on startup.
Compile the fuzz target locally and run it as per the documentation. In most
cases, fuzz target does not work due to linking errors or due to the bug in
target itself (i.e. `LLVMFuzzerTestOneInput` function).""",
'timeout':
"""The fuzz target hits timeout error. Timeout bugs slow down fuzzing
significantly since fuzz target hangs on the processing of those inputs. You
need to debug the root cause for the hang and fix it. Possible causes are
getting stuck on an infinite loop, some complex computation, etc.""",
}
QUERY_COLUMNS = [
'actual_duration',
'average_exec_per_sec',
'bad_instrumentation',
'crash_count',
'expected_duration',
'leak_count',
'log_lines_from_engine',
'log_lines_ignored',
'log_lines_unwanted',
'new_units_added',
'new_units_generated',
'oom_count',
'slow_units_count',
'startup_crash_count',
'strategy_corpus_subset',
'strategy_random_max_len',
'strategy_value_profile',
'timeout_count',
'timestamp',
]
|
google/clusterfuzz
|
src/appengine/handlers/performance_report/constants.py
|
Python
|
apache-2.0
| 5,306
|
# Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import struct
from . import packet_base
from . import packet_utils
ICMP_ECHO_REPLY = 0
ICMP_DEST_UNREACH = 3
ICMP_SRC_QUENCH = 4
ICMP_REDIRECT = 5
ICMP_ECHO_REQUEST = 8
ICMP_TIME_EXCEEDED = 11
ICMP_ECHO_REPLY_CODE = 0
ICMP_HOST_UNREACH_CODE = 1
ICMP_PORT_UNREACH_CODE = 3
ICMP_TTL_EXPIRED_CODE = 0
class icmp(packet_base.PacketBase):
"""ICMP (RFC 792) header encoder/decoder class.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte order.
__init__ takes the correspondig args in this order.
============== ====================
Attribute Description
============== ====================
type Type
code Code
csum CheckSum \
(0 means automatically-calculate when encoding)
data Payload. \
Either a bytearray, or \
ryu.lib.packet.icmp.echo or \
ryu.lib.packet.icmp.dest_unreach or \
ryu.lib.packet.icmp.TimeExceeded object \
NOTE for icmp.echo: \
This includes "unused" 16 bits and the following \
"Internet Header + 64 bits of Original Data Datagram" of \
the ICMP header. \
NOTE for icmp.dest_unreach and icmp.TimeExceeded: \
This includes "unused" 8 or 24 bits and the following \
"Internet Header + leading octets of original datagram" \
of the original packet.
============== ====================
"""
_PACK_STR = '!BBH'
_MIN_LEN = struct.calcsize(_PACK_STR)
_ICMP_TYPES = {}
@staticmethod
def register_icmp_type(*args):
def _register_icmp_type(cls):
for type_ in args:
icmp._ICMP_TYPES[type_] = cls
return cls
return _register_icmp_type
def __init__(self, type_, code, csum, data=None):
super(icmp, self).__init__()
self.type = type_
self.code = code
self.csum = csum
self.data = data
@classmethod
def parser(cls, buf):
(type_, code, csum) = struct.unpack_from(cls._PACK_STR, buf)
msg = cls(type_, code, csum)
offset = cls._MIN_LEN
if len(buf) > offset:
cls_ = cls._ICMP_TYPES.get(type_, None)
if cls_:
msg.data = cls_.parser(buf, offset)
else:
msg.data = buf[offset:]
return msg, None, None
def serialize(self, payload, prev):
hdr = bytearray(struct.pack(icmp._PACK_STR, self.type,
self.code, self.csum))
if self.data is not None:
if self.type in icmp._ICMP_TYPES:
hdr += self.data.serialize()
else:
hdr += self.data
if self.csum == 0:
self.csum = packet_utils.checksum(hdr)
struct.pack_into('!H', hdr, 2, self.csum)
return hdr
@icmp.register_icmp_type(ICMP_ECHO_REPLY, ICMP_ECHO_REQUEST)
class echo(object):
"""ICMP sub encoder/decoder class for Echo and Echo Reply messages.
This is used with ryu.lib.packet.icmp.icmp for
ICMP Echo and Echo Reply messages.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte order.
__init__ takes the correspondig args in this order.
============== ====================
Attribute Description
============== ====================
id Identifier
seq Sequence Number
data Internet Header + 64 bits of Original Data Datagram
============== ====================
"""
_PACK_STR = '!HH'
_MIN_LEN = struct.calcsize(_PACK_STR)
def __init__(self, id_, seq, data=None):
super(echo, self).__init__()
self.id = id_
self.seq = seq
self.data = data
@classmethod
def parser(cls, buf, offset):
(id_, seq) = struct.unpack_from(cls._PACK_STR, buf, offset)
msg = cls(id_, seq)
offset += cls._MIN_LEN
if len(buf) > offset:
msg.data = buf[offset:]
return msg
def serialize(self):
hdr = bytearray(struct.pack(echo._PACK_STR, self.id,
self.seq))
if self.data is not None:
hdr += self.data
return hdr
@icmp.register_icmp_type(ICMP_DEST_UNREACH)
class dest_unreach(object):
"""ICMP sub encoder/decoder class for Destination Unreachable Message.
This is used with ryu.lib.packet.icmp.icmp for
ICMP Destination Unreachable Message.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte order.
__init__ takes the correspondig args in this order.
[RFC1191] reserves bits for the "Next-Hop MTU" field.
[RFC4884] introduced 8-bit data length attribute.
============== ====================
Attribute Description
============== ====================
data_len data length
mtu Next-Hop MTU \
NOTE: This field is required when icmp code is 4 \
code 4 = fragmentation needed and DF set
data Internet Header + leading octets of original datagram
============== ====================
"""
_PACK_STR = '!xBH'
_MIN_LEN = struct.calcsize(_PACK_STR)
def __init__(self, data_len=0, mtu=0, data=None):
super(dest_unreach, self).__init__()
self.data_len = data_len
self.mtu = mtu
self.data = data
@classmethod
def parser(cls, buf, offset):
(data_len, mtu) = struct.unpack_from(cls._PACK_STR,
buf, offset)
msg = cls(data_len, mtu)
offset += cls._MIN_LEN
if len(buf) > offset:
msg.data = buf[offset:]
return msg
def serialize(self):
hdr = bytearray(struct.pack(dest_unreach._PACK_STR,
self.data_len, self.mtu))
if self.data is not None:
hdr += self.data
return hdr
@icmp.register_icmp_type(ICMP_TIME_EXCEEDED)
class TimeExceeded(object):
"""ICMP sub encoder/decoder class for Time Exceeded Message.
This is used with ryu.lib.packet.icmp.icmp for
ICMP Time Exceeded Message.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte order.
__init__ takes the correspondig args in this order.
[RFC4884] introduced 8-bit data length attribute.
============== ====================
Attribute Description
============== ====================
data_len data length
data Internet Header + leading octets of original datagram
============== ====================
"""
_PACK_STR = '!xBxx'
_MIN_LEN = struct.calcsize(_PACK_STR)
def __init__(self, data_len=0, data=None):
self.data_len = data_len
self.data = data
@classmethod
def parser(cls, buf, offset):
data_len = struct.unpack_from(cls._PACK_STR, buf, offset)
msg = cls(data_len)
offset += cls._MIN_LEN
if len(buf) > offset:
msg.data = buf[offset:]
return msg
def serialize(self):
hdr = bytearray(struct.pack(TimeExceeded._PACK_STR, self.data_len))
if self.data is not None:
hdr += self.data
return hdr
|
samrussell/ryu
|
ryu/lib/packet/icmp.py
|
Python
|
apache-2.0
| 8,213
|
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import os
import tempfile
import threading
from pyformance.reporters.reporter import Reporter
from desktop.lib.metrics import global_registry
LOG = logging.getLogger(__name__)
class FileReporter(Reporter):
def __init__(self, location, *args, **kwargs):
super(FileReporter, self).__init__(*args, **kwargs)
self.location = location
def report_now(self, registry=None, timestamp=None):
dirname = os.path.dirname(self.location)
try:
os.makedirs(dirname)
except OSError:
pass
# Write the metrics to a temporary file, then atomically
# rename the file to the real location.
f = tempfile.NamedTemporaryFile(
dir=dirname,
delete=False)
try:
json.dump(self.registry.dump_metrics(), f)
f.close()
os.rename(f.name, self.location)
except Exception:
os.remove(f.name)
raise
_reporter = None
def start_file_reporter():
from desktop.conf import METRICS
global _reporter
if _reporter is None:
location = METRICS.LOCATION.get()
interval = METRICS.COLLECTION_INTERVAL.get()
if location is not None and interval is not None:
_reporter = FileReporter(
location,
reporting_interval=interval / 1000.0,
registry=global_registry())
_reporter.start()
|
sanjeevtripurari/hue
|
desktop/core/src/desktop/lib/metrics/file_reporter.py
|
Python
|
apache-2.0
| 2,100
|
#!/usr/bin/env python
# LatitudePlot.py
# Created 30 July 2013
# Created by snowdonjames@googlemail.com
import os, time, math
from datetime import datetime
from time import mktime
import xml.etree.ElementTree as ET
from PIL import Image, ImageDraw
def GetKmlFiles():
"""Locates and reads local .kml files, returns a list of kml dictionary data"""
KmlData = []
for dirname, dirnames, filenames in os.walk('.'):
for filename in filenames:
sp = filename.split('.')
if sp[len(sp)-1]== "kml": #locate kml files
print "Reading kml file " + filename
KmlData.append(ReadKmlFile(dirname, filename))
print KmlData
return KmlData
def ReadKmlFile(dirname, filename):
"""Parses a single kml file, returns a dict of format {time: [lat, long]}"""
KmlData = {}
kmltime = datetime.time
latlist = []
longlist = []
timelist = []
cnt =0
f = open(filename)
line = f.readline()
while line:
if 'when' in line:
timelist.append(time.strptime(ET.fromstring(line)[0].text,"%Y-%m-%dT%H:%M:%SZ"))
if 'coordinates' in line:
latlist.append(float(ET.fromstring(line)[0].text.split(',')[0]))
longlist.append(float(ET.fromstring(line)[0].text.split(',')[1]))
cnt+=1
if cnt % 5000 ==0:
print "Parsing " + filename + ": points found: " + str(cnt)
line = f.readline()
f.close()
return [latlist, longlist, timelist]
def DrawMapData(KmlData,InputImage, OutputImage, itop, ibottom, ileft, iright,xnudge,ynudge):
"""Draws kml line data on top of the specified image"""
im = Image.open(InputImage)
draw = ImageDraw.Draw(im)
cnt =0
for KmlD in KmlData:
for d in range(len(KmlD[0])-1):
#Get points x and y coordinates and draw line
x1=(LongToX(KmlD[0][d],ileft,iright,im.size[0]))+xnudge
y1=(LatToY(KmlD[1][d],itop,ibottom,im.size[1]))+ynudge
x2=(LongToX(KmlD[0][d+1],ileft,iright,im.size[0]))+xnudge
y2=(LatToY(KmlD[1][d+1],itop,ibottom,im.size[1]))+ynudge
if(EuclidDistance(x1,y1,x2,y2) < 10000):
#setting this around 80 works okay. Attempts to remove some noise
draw.line((x1,y1, x2,y2), fill=80)
cnt+=1
if cnt % 10000 ==0:
print "Drawing point number " + str(cnt)
im.save(OutputImage)
def LongToX(InputLong, LeftLong, RightLong, ImWidth):
"""Converts a longitude value in to an x coordinate"""
return ScalingFunc(InputLong+360, LeftLong+360, RightLong+360, ImWidth);
def LatToY(InputLat, TopLat, BottomLat, ImHeight):
"""Converts a latitude value in to a y coordinate"""
return ScalingFunc(InputLat+360, TopLat+360, BottomLat+360, ImHeight);
def EuclidDistance(x1, y1, x2, y2):
"""Calculates the euclidean distance between two points"""
return math.sqrt((x1 - x2)**2+(y1 - y2)**2)
def ScalingFunc(inputv, minv, maxv, size):
"""Helps convert latitudes and longitudes to x and y"""
if((float(maxv) -float(minv)) ==0):
return 0
return ((((float(inputv) - float(minv)) / (float(maxv) -float(minv))) * float(size)));
def ParseImageFile():
"""Reads SatelliteImageData.csv containing:
<File name of image to draw data on>,
<image top latitude>,
<image bottom lattitude>,
<image left longitude>,
<image right longitude>,
(optional) <x value nudge>,
(optional) <y value nudge>"""
with open('ImageData.csv', 'r') as f:
read_data = f.read().split(',')
while 5 <= len(read_data) < 7:
read_data.append(0)
ReturnData = [0]*7
ReturnData[0]=read_data[0]
for i in range(1,7):
ReturnData[i] = float(read_data[i])
return ReturnData
if __name__ == "__main__":
ImageData = ParseImageFile()
DrawMapData(GetKmlFiles(),ImageData[0], "LatitudeData.png", ImageData[1], ImageData[2], ImageData[3], ImageData[4],ImageData[5],ImageData[6])
|
TheR3ason/map-your-location-history
|
LatitudePlot.py
|
Python
|
apache-2.0
| 4,022
|
from mongoengine import Document, StringField, DateTimeField, ListField, DateTimeField, IntField, BooleanField, \
ObjectIdField, FloatField
class Covelement(Document):
instructionsCov = IntField()
instructionsMis = IntField()
branchesCov = IntField()
branchesMis = IntField()
lineCov = IntField()
lineMis = IntField()
complexityCov = IntField()
complexityMis = IntField()
methodCov = IntField()
methodMis = IntField()
class Covproject(Covelement):
classCov = IntField()
classMis = IntField()
class Covpackage(Covelement):
classCov = IntField()
classMis = IntField()
name = StringField(required=True)
class CovClass(Covelement):
classCov = IntField()
classMis = IntField()
name = StringField(required=True)
class CovMethod(Covelement):
name = StringField(required=True)
desc = StringField(required=True)
line = IntField()
class CovSourcefile(Covelement):
classCov = IntField()
classMis = IntField()
name = StringField(required=True)
class CovLine():
number = IntField()
branchesCov = IntField()
branchesMis = IntField()
instructionsCov = IntField()
instructionsMis = IntField()
|
ftrautsch/testEvolution
|
resultprocessor/coveragemodels.py
|
Python
|
apache-2.0
| 1,211
|
#!/usr/bin/env python
"""GRR restful API rendering plugins."""
# pylint: disable=unused-import
from grr.gui.api_plugins import aff4
from grr.gui.api_plugins import artifact
from grr.gui.api_plugins import config
from grr.gui.api_plugins import docs
from grr.gui.api_plugins import hunt
from grr.gui.api_plugins import reflection
from grr.gui.api_plugins import stats
|
wandec/grr
|
gui/api_plugins/__init__.py
|
Python
|
apache-2.0
| 368
|
from sklearn import datasets
from sklearn.linear_model import LinearRegression
import matplotlib.pyplot as plt
loaded_data = datasets.load_boston()
data_X = loaded_data.data
data_y = loaded_data.target
model = LinearRegression()
model.fit(data_X, data_y)
print(model.predict(data_X[:4,:]))
print(data_y[:4])
print(model.coef_)
print(model.intercept_)
print(model.score(data_X, data_y))
#X, y = datasets.make_regression(n_samples=100, n_features=1, n_targets=1, noise=20)
#plt.scatter(X,y)
#plt.show()
|
shunliz/test
|
python/scikit/linear.py
|
Python
|
apache-2.0
| 505
|
"""
Application level configuration and logging
"""
import os
import global_settings
import sys
from logging.config import dictConfig
from importlib import import_module
import logging
log = logging.getLogger(__name__)
class Settings(object):
"""
Configuration class for percept
"""
settings_list = None
def _initialize(self, settings_module):
"""
Initialize the settings from a given settings_module
settings_module - path to settings module
"""
#Get the global settings values and assign them as self attributes
self.settings_list = []
for setting in dir(global_settings):
#Only get upper case settings
if setting == setting.upper():
setattr(self, setting, getattr(global_settings, setting))
self.settings_list.append(setting)
#If a settings module was passed in, import it, and grab settings from it
#Overwrite global settings with theses
if settings_module is not None:
self.SETTINGS_MODULE = settings_module
#Try to import the settings module
try:
mod = import_module(self.SETTINGS_MODULE)
except ImportError:
error_message = "Could not import settings at {0}".format(self.SETTINGS_MODULE)
log.exception(error_message)
raise ImportError(error_message)
#Grab uppercased settings as set them as self attrs
for setting in dir(mod):
if setting == setting.upper():
if setting == "INSTALLED_APPS":
self.INSTALLED_APPS += getattr(mod, setting)
else:
setattr(self, setting, getattr(mod, setting))
self.settings_list.append(setting)
#If PATH_SETTINGS is in the settings file, extend the system path to include it
if hasattr(self, "PATH_SETTINGS"):
for path in self.PATH_SETTINGS:
sys.path.extend(getattr(self,path))
self.settings_list = list(set(self.settings_list))
def _setup(self):
"""
Perform initial setup of the settings class, such as getting the settings module and setting the settings
"""
settings_module = None
#Get the settings module from the environment variables
try:
settings_module = os.environ[global_settings.MODULE_VARIABLE]
except KeyError:
error_message = "Settings not properly configured. Cannot find the environment variable {0}".format(global_settings.MODULE_VARIABLE)
log.exception(error_message)
self._initialize(settings_module)
self._configure_logging()
def __getattr__(self, name):
"""
If a class is trying to get settings (attributes on this class)
"""
#If settings have not been setup, do so
if not self.configured:
self._setup()
#Return setting if it exists as a self attribute, None if it doesn't
if name in self.settings_list:
return getattr(self, name)
else:
return None
def _configure_logging(self):
"""
Setting up logging from logging config in settings
"""
if not self.LOGGING_CONFIG:
#Fallback to default logging in global settings if needed
dictConfig(self.DEFAULT_LOGGING)
else:
dictConfig(self.LOGGING_CONFIG)
@property
def configured(self):
return self.settings_list is not None
#Import this if trying to get settings elsewhere
settings = Settings()
|
VikParuchuri/percept
|
percept/conf/base.py
|
Python
|
apache-2.0
| 3,688
|
"""Test icatdump and icatingest.
"""
from subprocess import CalledProcessError
import pytest
import icat
import icat.config
from icat.query import Query
from conftest import DummyDatafile, gettestdata, getConfig, callscript
# Test input
ds_params = str(gettestdata("ingest-ds-params.xml"))
datafiles = str(gettestdata("ingest-datafiles.xml"))
@pytest.fixture(scope="module")
def client(setupicat):
client, conf = getConfig(confSection="acord", ids="mandatory")
client.login(conf.auth, conf.credentials)
return client
@pytest.fixture(scope="module")
def cmdargs(setupicat):
_, conf = getConfig(confSection="acord", ids="mandatory")
return conf.cmdargs + ["-f", "XML"]
@pytest.fixture(scope="function")
def dataset(client):
"""A dataset to be used in the test.
The dataset is not created by the fixture, it is assumed that the
test does it. The dataset will be eventually be deleted after the
test.
"""
inv = client.assertedSearch("Investigation [name='10100601-ST']")[0]
dstype = client.assertedSearch("DatasetType [name='raw']")[0]
dataset = client.new("dataset",
name="e208343", complete=False,
investigation=inv, type=dstype)
yield dataset
try:
ds = client.searchMatching(dataset)
dataset.id = ds.id
except icat.SearchResultError:
# Dataset not found, maybe the test failed, nothing to
# clean up then.
pass
else:
# If any datafile has been uploaded (i.e. the location is
# not NULL), need to delete it from IDS first. Any other
# datafile or dataset parameter will be deleted
# automatically with the dataset by cascading in the ICAT
# server.
query = Query(client, "Datafile",
conditions={"dataset.id": "= %d" % dataset.id,
"location": "IS NOT NULL"})
client.deleteData(client.search(query))
client.delete(dataset)
# Test datafiles to be created by test_ingest_datafiles:
testdatafiles = [
{
'dfname': "e208343.dat",
'size': 394,
'mtime': 1286600400,
},
{
'dfname': "e208343.nxs",
'size': 52857,
'mtime': 1286600400,
},
]
def verify_dataset_params(client, dataset, params):
query = Query(client, "DatasetParameter",
conditions={"dataset.id": "= %d" % dataset.id},
includes={"type"})
ps = client.search(query)
assert len(ps) == len(params)
values = { (p.type.name, p.numericValue, p.type.units) for p in ps }
assert values == params
def test_ingest_dataset_params(client, dataset, cmdargs):
"""Ingest a file setting some dataset parameters.
"""
dataset.create()
args = cmdargs + ["-i", ds_params]
callscript("icatingest.py", args)
verify_dataset_params(client, dataset, {
("Magnetic field", 5.3, "T"),
("Reactor power", 10.0, "MW"),
("Sample temperature", 293.15, "K")
})
def test_ingest_duplicate_throw(client, dataset, cmdargs):
"""Ingest with a collision of a duplicate object.
Same test as above, but now place a duplicate object in the way.
"""
dataset.create()
ptype = client.assertedSearch("ParameterType [name='Reactor power']")[0]
p = client.new("datasetParameter", numericValue=5.0,
dataset=dataset, type=ptype)
p.create()
args = cmdargs + ["-i", ds_params]
# FIXME: should inspect stderr and verify ICATObjectExistsError.
with pytest.raises(CalledProcessError) as err:
callscript("icatingest.py", args)
# Verify that the params have been set. The exceptions should
# have been raised while trying to ingest the second parameter.
# The first one (Magnetic field) should have been created and
# Reactor power should still have the value set above.
verify_dataset_params(client, dataset, {
("Magnetic field", 5.3, "T"),
("Reactor power", 5.0, "MW")
})
def test_ingest_duplicate_ignore(client, dataset, cmdargs):
"""Ingest with a collision of a duplicate object.
Same test as above, but now ignore the duplicate.
"""
dataset.create()
ptype = client.assertedSearch("ParameterType [name='Reactor power']")[0]
p = client.new("datasetParameter", numericValue=5.0,
dataset=dataset, type=ptype)
p.create()
args = cmdargs + ["-i", ds_params, "--duplicate", "IGNORE"]
callscript("icatingest.py", args)
verify_dataset_params(client, dataset, {
("Magnetic field", 5.3, "T"),
("Reactor power", 5.0, "MW"),
("Sample temperature", 293.15, "K")
})
def test_ingest_duplicate_check_err(client, dataset, cmdargs):
"""Ingest with a collision of a duplicate object.
Same test as above, but use CHECK which fails due to mismatch.
"""
dataset.create()
ptype = client.assertedSearch("ParameterType [name='Reactor power']")[0]
p = client.new("datasetParameter", numericValue=5.0,
dataset=dataset, type=ptype)
p.create()
args = cmdargs + ["-i", ds_params, "--duplicate", "CHECK"]
# FIXME: should inspect stderr and verify ICATObjectExistsError.
with pytest.raises(CalledProcessError) as err:
callscript("icatingest.py", args)
verify_dataset_params(client, dataset, {
("Magnetic field", 5.3, "T"),
("Reactor power", 5.0, "MW")
})
def test_ingest_duplicate_check_ok(client, dataset, cmdargs):
"""Ingest with a collision of a duplicate object.
Same test as above, but now it matches, so CHECK should return ok.
"""
dataset.create()
ptype = client.assertedSearch("ParameterType [name='Reactor power']")[0]
p = client.new("datasetParameter", numericValue=10.0,
dataset=dataset, type=ptype)
p.create()
args = cmdargs + ["-i", ds_params, "--duplicate", "CHECK"]
callscript("icatingest.py", args)
verify_dataset_params(client, dataset, {
("Magnetic field", 5.3, "T"),
("Reactor power", 10.0, "MW"),
("Sample temperature", 293.15, "K")
})
def test_ingest_duplicate_overwrite(client, dataset, cmdargs):
"""Ingest with a collision of a duplicate object.
Same test as above, but now overwrite the old value.
"""
dataset.create()
ptype = client.assertedSearch("ParameterType [name='Reactor power']")[0]
p = client.new("datasetParameter", numericValue=5.0,
dataset=dataset, type=ptype)
p.create()
args = cmdargs + ["-i", ds_params, "--duplicate", "OVERWRITE"]
callscript("icatingest.py", args)
verify_dataset_params(client, dataset, {
("Magnetic field", 5.3, "T"),
("Reactor power", 10.0, "MW"),
("Sample temperature", 293.15, "K")
})
# Minimal example, a Datafile featuring a string.
ingest_data_string = """<?xml version="1.0" encoding="utf-8"?>
<icatdata>
<data>
<datasetRef id="Dataset_001"
name="e208343"
investigation.name="10100601-ST"
investigation.visitId="1.1-N"/>
<datafile>
<name>dup_test_str.dat</name>
<dataset ref="Dataset_001"/>
</datafile>
</data>
</icatdata>
"""
# A Datafile featuring an int.
ingest_data_int = """<?xml version="1.0" encoding="utf-8"?>
<icatdata>
<data>
<datasetRef id="Dataset_001"
name="e208343"
investigation.name="10100601-ST"
investigation.visitId="1.1-N"/>
<datafile>
<fileSize>42</fileSize>
<name>dup_test_int.dat</name>
<dataset ref="Dataset_001"/>
</datafile>
</data>
</icatdata>
"""
# A Dataset featuring a boolean.
ingest_data_boolean = """<?xml version="1.0" encoding="utf-8"?>
<icatdata>
<data>
<dataset id="Dataset_001">
<complete>false</complete>
<name>e208343</name>
<investigation name="10100601-ST" visitId="1.1-N"/>
<type name="raw"/>
</dataset>
</data>
</icatdata>
"""
# A DatasetParameter featuring a float.
ingest_data_float = """<?xml version="1.0" encoding="utf-8"?>
<icatdata>
<data>
<datasetRef id="Dataset_001"
name="e208343"
investigation.name="10100601-ST"
investigation.visitId="1.1-N"/>
<datasetParameter>
<numericValue>5.3</numericValue>
<dataset ref="Dataset_001"/>
<type name="Magnetic field" units="T"/>
</datasetParameter>
</data>
</icatdata>
"""
# A Datafile featuring a date.
ingest_data_date = """<?xml version="1.0" encoding="utf-8"?>
<icatdata>
<data>
<datasetRef id="Dataset_001"
name="e208343"
investigation.name="10100601-ST"
investigation.visitId="1.1-N"/>
<datafile>
<datafileCreateTime>2008-06-18T09:31:11+02:00</datafileCreateTime>
<name>dup_test_date.dat</name>
<dataset ref="Dataset_001"/>
</datafile>
</data>
</icatdata>
"""
@pytest.mark.parametrize("inputdata", [
ingest_data_string,
ingest_data_int,
ingest_data_boolean,
ingest_data_float,
ingest_data_date,
])
def test_ingest_duplicate_check_types(tmpdirsec, dataset, cmdargs, inputdata):
"""Ingest with a collision of a duplicate object.
Similar to test_ingest_duplicate_check_ok(), but trying several
input datasets that test different data types. Issue #9.
"""
# Most input data create a datafile or a dataset parameter related
# to dataset and thus assume the dataset to already exist. Only
# ingest_data_boolean creates the dataset itself.
if inputdata is not ingest_data_boolean:
dataset.create()
# We simply ingest twice the same data, using duplicate=CHECK the
# second time. This obviously leads to matching duplicates.
inpfile = tmpdirsec / "ingest.xml"
with inpfile.open("wt") as f:
f.write(inputdata)
args = cmdargs + ["-i", str(inpfile)]
callscript("icatingest.py", args)
callscript("icatingest.py", args + ["--duplicate", "CHECK"])
def test_ingest_datafiles(tmpdirsec, client, dataset, cmdargs):
"""Ingest a dataset with some datafiles.
"""
dummyfiles = [ f['dfname'] for f in testdatafiles ]
args = cmdargs + ["-i", datafiles]
callscript("icatingest.py", args)
# Verify that the datafiles have been uploaded.
dataset = client.searchMatching(dataset)
for fname in dummyfiles:
query = Query(client, "Datafile", conditions={
"name": "= '%s'" % fname,
"dataset.id": "= %d" % dataset.id,
})
df = client.assertedSearch(query)[0]
assert df.location is None
def test_ingest_datafiles_upload(tmpdirsec, client, dataset, cmdargs):
"""Upload datafiles to IDS from icatingest.
Same as last test, but set the --upload-datafiles flag so that
icatingest will not create the datafiles as objects in the ICAT,
but upload the files to IDS instead.
"""
dummyfiles = [ DummyDatafile(tmpdirsec, f['dfname'], f['size'], f['mtime'])
for f in testdatafiles ]
args = cmdargs + ["-i", datafiles, "--upload-datafiles",
"--datafile-dir", str(tmpdirsec)]
callscript("icatingest.py", args)
# Verify that the datafiles have been uploaded.
dataset = client.searchMatching(dataset)
for f in dummyfiles:
query = Query(client, "Datafile", conditions={
"name": "= '%s'" % f.name,
"dataset.id": "= %d" % dataset.id,
})
df = client.assertedSearch(query)[0]
assert df.location is not None
assert df.fileSize == f.size
assert df.checksum == f.crc32
if f.mtime:
assert df.datafileModTime == f.mtime
|
icatproject/python-icat
|
tests/test_06_ingest.py
|
Python
|
apache-2.0
| 11,631
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for lookup ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
import numpy as np
import six
from tensorflow.python import tf2
from tensorflow.python.client import session
from tensorflow.python.data.experimental.ops import counter
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.ops import readers as reader_ops
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function
from tensorflow.python.eager import wrap_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_spec
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import lookup_ops
from tensorflow.python.ops import map_fn
from tensorflow.python.ops import string_ops
from tensorflow.python.ops import variables
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.platform import test
from tensorflow.python.saved_model import load as saved_model_load
from tensorflow.python.saved_model import save as saved_model_save
from tensorflow.python.training import saver
from tensorflow.python.training import server_lib
from tensorflow.python.training.tracking import graph_view
from tensorflow.python.training.tracking import tracking
from tensorflow.python.training.tracking import util as trackable
from tensorflow.python.util import compat
class BaseLookupTableTest(test.TestCase):
def getHashTable(self):
if tf2.enabled():
return lookup_ops.StaticHashTable
else:
return lookup_ops.StaticHashTableV1
def getVocabularyTable(self):
if tf2.enabled():
return lookup_ops.StaticVocabularyTable
else:
return lookup_ops.StaticVocabularyTableV1
def initialize_table(self, table):
if not tf2.enabled():
self.evaluate(table.initializer)
class StaticHashTableTest(BaseLookupTableTest):
def testStaticHashTable(self):
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
self.assertAllEqual([3], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
exported_keys_tensor, exported_values_tensor = table.export()
self.assertItemsEqual([b"brain", b"salad", b"surgery"],
self.evaluate(exported_keys_tensor))
self.assertItemsEqual([0, 1, 2], self.evaluate(exported_values_tensor))
def testStaticHashTableFindHighRank(self):
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([["brain", "salad"],
["tank", "tarkus"]])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([[0, 1], [-1, -1]], result)
def testStaticHashTableInitWithPythonArrays(self):
default_val = -1
keys = ["brain", "salad", "surgery"]
values = [0, 1, 2]
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(
keys, values, value_dtype=dtypes.int64), default_val)
self.initialize_table(table)
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testStaticHashTableInitWithNumPyArrays(self):
default_val = -1
keys = np.array(["brain", "salad", "surgery"], dtype=np.str)
values = np.array([0, 1, 2], dtype=np.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testMultipleStaticHashTables(self):
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table1 = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
table2 = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
table3 = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table1)
self.initialize_table(table2)
self.initialize_table(table3)
self.assertAllEqual(3, self.evaluate(table1.size()))
self.assertAllEqual(3, self.evaluate(table2.size()))
self.assertAllEqual(3, self.evaluate(table3.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output1 = table1.lookup(input_string)
output2 = table2.lookup(input_string)
output3 = table3.lookup(input_string)
out1, out2, out3 = self.evaluate([output1, output2, output3])
self.assertAllEqual([0, 1, -1], out1)
self.assertAllEqual([0, 1, -1], out2)
self.assertAllEqual([0, 1, -1], out3)
def testStaticHashTableWithTensorDefault(self):
default_val = constant_op.constant(-1, dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testStaticHashTableWithSparseTensorInput(self):
default_val = constant_op.constant(-1, dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
sp_indices = [[0, 0], [0, 1], [1, 0]]
sp_shape = [2, 2]
input_tensor = sparse_tensor.SparseTensor(
constant_op.constant(sp_indices, dtypes.int64),
constant_op.constant(["brain", "salad", "tank"]),
constant_op.constant(sp_shape, dtypes.int64))
output = table.lookup(input_tensor)
out_indices, out_values, out_shape = self.evaluate(output)
self.assertAllEqual([0, 1, -1], out_values)
self.assertAllEqual(sp_indices, out_indices)
self.assertAllEqual(sp_shape, out_shape)
def testStaticHashTableWithRaggedTensorInput(self):
default_val = constant_op.constant(-1, dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
row_splits = [0, 2, 3]
input_tensor = ragged_tensor.RaggedTensor.from_row_splits(
constant_op.constant(["brain", "salad", "tank"]),
constant_op.constant(row_splits, dtypes.int64))
output = table.lookup(input_tensor)
out = self.evaluate(output)
self.assertAllEqual([0, 1, -1], out.values)
self.assertAllEqual(row_splits, out.row_splits)
def testSignatureMismatch(self):
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
# Ref types do not produce a lookup signature mismatch.
input_string_ref = variables.Variable("brain")
self.evaluate(input_string_ref.initializer)
self.assertEqual(0, self.evaluate(table.lookup(input_string_ref)))
input_string = constant_op.constant([1, 2, 3], dtypes.int64)
with self.assertRaises(TypeError):
table.lookup(input_string)
with self.assertRaises(TypeError):
self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), "UNK")
def testDTypes(self):
default_val = -1
with self.assertRaises(TypeError):
self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(["a"], [1], [dtypes.string],
dtypes.int64), default_val)
@test_util.run_v1_only("(Cached) Sessions not available in TF2.0")
def testNotInitialized(self):
with self.cached_session():
default_val = -1
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(["a"], [1],
value_dtype=dtypes.int64),
default_val)
input_string = constant_op.constant(["brain", "salad", "surgery"])
output = table.lookup(input_string)
with self.assertRaisesOpError("Table not initialized"):
self.evaluate(output)
@test_util.run_v1_only("(Cached) Sessions not available in TF2.0")
def testInitializeTwice(self):
with self.cached_session():
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
# Make sure that initializing twice doesn't throw any errors.
self.initialize_table(table)
def testInitializationWithInvalidDimensions(self):
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2, 3, 4], dtypes.int64)
raised_error = ValueError
if context.executing_eagerly():
raised_error = errors_impl.InvalidArgumentError
with self.assertRaises(raised_error):
self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
@test_util.run_v1_only("Sessions not available in TF2.0")
def testMultipleSessions(self):
# Start a server
server = server_lib.Server({"local0": ["localhost:0"]},
protocol="grpc",
start=True)
# Create two sessions sharing the same state
session1 = session.Session(server.target)
session2 = session.Session(server.target)
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values),
default_val,
name="t1")
# Init the table in the first session.
with session1:
self.initialize_table(table)
self.assertAllEqual(3, self.evaluate(table.size()))
# Init the table in the second session and verify that we do not get a
# "Table already initialized" error.
with session2:
self.evaluate(table.initializer)
self.assertAllEqual(3, self.evaluate(table.size()))
@test_util.run_v2_only
def testImportedHashTable(self):
g = ops.Graph()
with g.as_default():
t = lookup_ops.StaticHashTable(
lookup_ops.KeyValueTensorInitializer(["a"], [1]),
2)
init_op = t._init_op
op = t.lookup(ops.convert_to_tensor(["a"]))
meta_graph = saver.export_meta_graph()
def f():
saver.import_meta_graph(meta_graph)
return ops.get_default_graph().get_tensor_by_name(op.name)
wrapped = wrap_function.wrap_function(f, [])
pruned_init_fn = wrapped.prune(
(), [wrapped.graph.get_operation_by_name(init_op.name)])
self.evaluate(pruned_init_fn())
self.assertAllEqual([1], wrapped())
def testStaticHashTableInt32String(self):
default_val = "n/a"
keys = constant_op.constant([0, 1, 2], dtypes.int32)
values = constant_op.constant(["brain", "salad", "surgery"])
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
input_tensor = constant_op.constant([0, 1, -1])
output = table.lookup(input_tensor)
result = self.evaluate(output)
self.assertAllEqual([b"brain", b"salad", b"n/a"], result)
def testTableUseInFunction(self):
if not context.executing_eagerly():
self.skipTest("Only Eager mode test.")
keys = constant_op.constant([0, 1, 2], dtypes.int32)
values = constant_op.constant(["brain", "salad", "surgery"])
table = self.getHashTable()(lookup_ops.KeyValueTensorInitializer(
keys, values), "n/a")
@function.defun()
def lookup_table_func(k):
return table.lookup(k)
result = lookup_table_func(constant_op.constant([0, 1, -1]))
self.assertAllEqual([b"brain", b"salad", b"n/a"], result)
result = lookup_table_func(constant_op.constant([2, -1, 1]))
self.assertAllEqual([b"surgery", b"n/a", b"salad"], result)
def testTableCreatedInFunction(self):
if not context.executing_eagerly():
self.skipTest("Only Eager mode test.")
keys = constant_op.constant([0, 1, 2], dtypes.int32)
values = constant_op.constant(["brain", "salad", "surgery"])
@function.defun()
def lookup_table_func(k):
table = self.getHashTable()(lookup_ops.KeyValueTensorInitializer(
keys, values), "n/a")
return table.lookup(k)
result = lookup_table_func(constant_op.constant([0, 1, -1]))
self.assertAllEqual([b"brain", b"salad", b"n/a"], result)
result = lookup_table_func(constant_op.constant([2, -1, 1]))
self.assertAllEqual([b"surgery", b"n/a", b"salad"], result)
def testTwoTablesInControlFlow(self):
keys = constant_op.constant([1, 2, 3], dtypes.int32)
values = constant_op.constant([5, 10, 15], dtypes.int32)
def table_func1(x):
table = self.getHashTable()(lookup_ops.KeyValueTensorInitializer(
keys, values), -1)
return table.lookup(x)
elems = np.array([2, 4, 1], dtype=np.int32)
result1 = map_fn.map_fn(table_func1, elems, dtype=dtypes.int32)
def table_func2(x):
table = self.getHashTable()(lookup_ops.KeyValueTensorInitializer(
keys, values), -1)
return table.lookup(x)
elems = np.array([2, 4, 1], dtype=np.int32)
result2 = map_fn.map_fn(table_func2, elems, dtype=dtypes.int32)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual([10, -1, 5], self.evaluate(result1))
self.assertAllEqual([10, -1, 5], self.evaluate(result2))
@test_util.enable_control_flow_v2
def testLookupTableInWhileV2(self):
lookup = self.getHashTable()(lookup_ops.KeyValueTensorInitializer(
constant_op.constant([2, 5], dtype=dtypes.int64),
constant_op.constant([-10.0, 1], dtype=dtypes.float32)), -1)
beta = variables.Variable(1.0, trainable=True)
@def_function.function
def get_loss(unused_beta):
return map_fn.map_fn(
lookup.lookup,
constant_op.constant([2, 3], dtype=dtypes.int64),
dtype=dtypes.float32)
with backprop.GradientTape() as tape:
loss = get_loss(beta)
self.assertIsNone(tape.gradient(loss, beta))
@test_util.enable_control_flow_v2
def testLookupTableInCondV2(self):
lookup = self.getHashTable()(lookup_ops.KeyValueTensorInitializer(
constant_op.constant([2, 5], dtype=dtypes.int64),
constant_op.constant([-10.0, 1], dtype=dtypes.float32)), -1)
beta = variables.Variable(1.0, trainable=True)
@def_function.function
def get_loss(beta):
def true_fn():
return lookup.lookup(constant_op.constant(2, dtype=dtypes.int64))
def false_fn():
return constant_op.constant(0, dtype=dtypes.float32)
return beta * control_flow_ops.cond(
constant_op.constant(True), true_fn=true_fn, false_fn=false_fn)
with backprop.GradientTape() as tape:
loss = get_loss(beta)
grad = tape.gradient(loss, beta)
self.evaluate(variables.global_variables_initializer())
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual(grad, -10.)
def testExportShapeInference(self):
table = self.getHashTable()(lookup_ops.KeyValueTensorInitializer(
constant_op.constant([2, 5], dtype=dtypes.int64),
constant_op.constant([-10.0, 1], dtype=dtypes.float32)), -1)
actual_shapes = [t.shape for t in table.export()]
inferred_shapes = []
@def_function.function
def f():
for t in table.export():
inferred_shapes.append(t.shape)
f()
self.assertLen(actual_shapes, 2)
self.assertLen(inferred_shapes, 2)
self.assertTrue(inferred_shapes[0].is_compatible_with(actual_shapes[0]))
self.assertTrue(inferred_shapes[1].is_compatible_with(actual_shapes[1]))
class KeyValueTensorInitializerTest(BaseLookupTableTest):
def test_string(self):
init = lookup_ops.KeyValueTensorInitializer(
("brain", "salad", "surgery"), (0, 1, 2), dtypes.string, dtypes.int64)
table = self.getHashTable()(init, default_value=-1)
self.initialize_table(table)
def test_multiple_tables(self):
with ops.name_scope("table_scope"):
init1 = lookup_ops.KeyValueTensorInitializer(
("brain", "salad", "surgery"), (0, 1, 2), dtypes.string, dtypes.int64)
table1 = self.getHashTable()(init1, default_value=-1)
if not context.executing_eagerly():
self.assertEqual("hash_table", table1.name)
self.assertEqual("table_scope/hash_table",
table1.resource_handle.op.name)
init2 = lookup_ops.KeyValueTensorInitializer(
("brain", "salad", "surgery"), (0, 1, 2), dtypes.string, dtypes.int64)
table2 = self.getHashTable()(init2, default_value=-1)
if not context.executing_eagerly():
self.assertEqual("hash_table_1", table2.name)
self.assertEqual("table_scope/hash_table_1",
table2.resource_handle.op.name)
def test_int64(self):
init = lookup_ops.KeyValueTensorInitializer((42, 1, -1000), (0, 1, 2),
dtypes.int64, dtypes.int64)
table = self.getHashTable()(init, default_value=-1)
self.initialize_table(table)
def test_int32(self):
init = lookup_ops.KeyValueTensorInitializer((42, 1, -1000), (0, 1, 2),
dtypes.int32, dtypes.int64)
with self.assertRaises(errors_impl.OpError):
table = self.getHashTable()(init, default_value=-1)
self.initialize_table(table)
class DatasetInitializerTest(BaseLookupTableTest):
def _createVocabFile(self, basename, values=("brain", "salad", "surgery")):
vocabulary_file = os.path.join(self.get_temp_dir(), basename)
with open(vocabulary_file, "w") as f:
f.write("\n".join(values) + "\n")
return vocabulary_file
def test_basic(self):
keys = dataset_ops.Dataset.range(100)
values = dataset_ops.Dataset.range(100).map(
lambda x: string_ops.as_string(x * 2))
ds = dataset_ops.Dataset.zip((keys, values))
init = lookup_ops.DatasetInitializer(ds)
table = self.getHashTable()(init, default_value="")
self.initialize_table(table)
output = table.lookup(constant_op.constant([0, 2, 5], dtypes.int64))
result = self.evaluate(output)
self.assertAllEqual(["0", "4", "10"], result)
def test_basic_bad_shape(self):
keys = dataset_ops.Dataset.range(100)
values = dataset_ops.Dataset.range(100).map(
lambda x: string_ops.as_string(x * 2))
values = values.batch(4)
ds = dataset_ops.Dataset.zip((keys, values))
with self.assertRaises(ValueError):
lookup_ops.DatasetInitializer(ds)
def test_from_file(self):
vocabulary_file = self._createVocabFile("test.txt", ("one", "two", "three"))
ds = reader_ops.TextLineDataset(vocabulary_file)
ds = ds.enumerate(start=1)
init = lookup_ops.DatasetInitializer(ds)
table = self.getHashTable()(init, default_value="")
self.initialize_table(table)
output = table.lookup(constant_op.constant([2, 3, 4], dtypes.int64))
result = self.evaluate(output)
self.assertAllEqual(["two", "three", ""], result)
def test_from_multiple_files(self):
vocabulary_file1 = self._createVocabFile("test1.txt",
("one", "two", "three"))
vocabulary_file2 = self._createVocabFile("test2.txt",
("four", "five", "six"))
ds = reader_ops.TextLineDataset([vocabulary_file1, vocabulary_file2])
ds = ds.enumerate(start=1)
init = lookup_ops.DatasetInitializer(ds)
table = self.getHashTable()(init, default_value="")
self.initialize_table(table)
output = table.lookup(constant_op.constant([2, 3, 4], dtypes.int64))
result = self.evaluate(output)
self.assertAllEqual(["two", "three", "four"], result)
def test_map_variable(self):
ds = dataset_ops.Dataset.range(100)
captured_var = variables.Variable(0)
def func(_):
return captured_var.assign_add(1)
ds = ds.map(func)
ds = ds.enumerate(start=1)
init = lookup_ops.DatasetInitializer(ds)
table = self.getHashTable()(init, default_value=-1)
self.evaluate(captured_var.initializer)
self.initialize_table(table)
output = table.lookup(constant_op.constant([1, 2, 101], dtypes.int64))
result = self.evaluate(output)
self.assertAllEqual([1, 2, -1], result)
def test_compatibility(self):
with ops.Graph().as_default():
keys = dataset_ops.Dataset.range(100)
values = dataset_ops.Dataset.range(100).map(string_ops.as_string)
ds = dataset_ops.Dataset.zip((keys, values))
init = lookup_ops.DatasetInitializer(ds)
table = self.getHashTable()(init, default_value="")
output = table.lookup(constant_op.constant([0, 2, 5], dtypes.int64))
self.evaluate(lookup_ops.tables_initializer())
result = self.evaluate(output)
self.assertAllEqual(["0", "2", "5"], result)
class InitializeTableFromFileOpTest(BaseLookupTableTest):
def _createVocabFile(self, basename, values=("brain", "salad", "surgery")):
vocabulary_file = os.path.join(self.get_temp_dir(), basename)
with open(vocabulary_file, "w") as f:
f.write("\n".join(values) + "\n")
return vocabulary_file
def testInitializeStringTable(self):
vocabulary_file = self._createVocabFile("one_column_1.txt")
default_value = -1
init = lookup_ops.TextFileInitializer(
vocabulary_file, dtypes.string, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER)
self.assertIn("one_column_1.txt_-2_-1", init._shared_name)
table = self.getHashTable()(init, default_value)
self.initialize_table(table)
output = table.lookup(constant_op.constant(["brain", "salad", "tank"]))
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testInitializeInt64Table(self):
vocabulary_file = self._createVocabFile(
"one_column_int64.txt", values=("42", "1", "-1000"))
with self.cached_session():
default_value = -1
init = lookup_ops.TextFileInitializer(
vocabulary_file, dtypes.int64, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER)
self.assertIn("one_column_int64.txt_-2_-1", init._shared_name)
table = self.getHashTable()(init, default_value)
self.initialize_table(table)
output = table.lookup(
constant_op.constant((42, 1, 11), dtype=dtypes.int64))
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testInitializeIndexTable(self):
vocabulary_file = self._createVocabFile("one_column_2.txt")
with self.cached_session():
default_value = "UNK"
key_index = lookup_ops.TextFileIndex.LINE_NUMBER
value_index = lookup_ops.TextFileIndex.WHOLE_LINE
init = lookup_ops.TextFileInitializer(
vocabulary_file, dtypes.int64, key_index, dtypes.string, value_index)
self.assertIn("one_column_2.txt_-1_-2", init._shared_name)
table = self.getHashTable()(init, default_value)
self.initialize_table(table)
input_values = constant_op.constant([0, 1, 2, 3], dtypes.int64)
output = table.lookup(input_values)
result = self.evaluate(output)
self.assertAllEqual([b"brain", b"salad", b"surgery", b"UNK"], result)
def testMultiColumn(self):
vocabulary_file = os.path.join(self.get_temp_dir(), "three_columns.txt")
with open(vocabulary_file, "w") as f:
f.write("\n".join(["0\tbrain\t1", "1\tsalad\t5", "2\tsurgery\t6"]) + "\n")
with self.cached_session():
default_value = -1
key_index = 1
value_index = 2
init = lookup_ops.TextFileInitializer(
vocabulary_file, dtypes.string, key_index, dtypes.int64, value_index)
self.assertIn("three_columns.txt_1_2", init._shared_name)
table = self.getHashTable()(init, default_value)
self.initialize_table(table)
input_string = constant_op.constant(["brain", "salad", "surgery"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([1, 5, 6], result)
def testInvalidDataTypeInMultiColumn(self):
vocabulary_file = os.path.join(self.get_temp_dir(), "three_columns.txt")
with open(vocabulary_file, "w") as f:
f.write("\n".join(["0\tbrain\t1", "1\tsalad\t5", "2\tsurgery\t6"]) + "\n")
with self.cached_session():
default_value = -1
key_index = 2
value_index = 1
init = lookup_ops.TextFileInitializer(
vocabulary_file, dtypes.string, key_index, dtypes.int64, value_index)
self.assertIn("three_columns.txt_2_1", init._shared_name)
with self.assertRaisesOpError("is not a valid"):
table = self.getHashTable()(init, default_value)
self.initialize_table(table)
def testInvalidDataType(self):
vocabulary_file = self._createVocabFile("one_column_3.txt")
with self.cached_session():
default_value = "UNK"
key_index = lookup_ops.TextFileIndex.WHOLE_LINE
value_index = lookup_ops.TextFileIndex.LINE_NUMBER
with self.assertRaises(ValueError):
init = lookup_ops.TextFileInitializer(vocabulary_file, dtypes.int64,
key_index, dtypes.string,
value_index)
self.assertIn("one_column_3.txt_-2_-1", init._shared_name)
self.getHashTable()(init, default_value)
def testInvalidIndex(self):
vocabulary_file = self._createVocabFile("one_column_4.txt")
with self.cached_session():
default_value = -1
key_index = 1 # second column of the line
value_index = lookup_ops.TextFileIndex.LINE_NUMBER
init = lookup_ops.TextFileInitializer(
vocabulary_file, dtypes.string, key_index, dtypes.int64, value_index)
self.assertIn("one_column_4.txt_1_-1", init._shared_name)
with self.assertRaisesOpError("Invalid number of columns"):
table = self.getHashTable()(init, default_value)
self.initialize_table(table)
def testInitializeSameTableWithMultipleNodes(self):
vocabulary_file = self._createVocabFile("one_column_5.txt")
with self.cached_session():
default_value = -1
init1 = lookup_ops.TextFileInitializer(
vocabulary_file, dtypes.string, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER)
self.assertIn("one_column_5.txt_-2_-1", init1._shared_name)
table1 = self.getHashTable()(init1, default_value)
init2 = lookup_ops.TextFileInitializer(
vocabulary_file, dtypes.string, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER)
self.assertIn("one_column_5.txt_-2_-1", init2._shared_name)
table2 = self.getHashTable()(init2, default_value)
init3 = lookup_ops.TextFileInitializer(
vocabulary_file, dtypes.string, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER)
self.assertIn("one_column_5.txt_-2_-1", init3._shared_name)
table3 = self.getHashTable()(init3, default_value)
self.evaluate(lookup_ops.tables_initializer())
input_string = constant_op.constant(["brain", "salad", "tank"])
output1 = table1.lookup(input_string)
output2 = table2.lookup(input_string)
output3 = table3.lookup(input_string)
out1, out2, out3 = self.evaluate([output1, output2, output3])
self.assertAllEqual([0, 1, -1], out1)
self.assertAllEqual([0, 1, -1], out2)
self.assertAllEqual([0, 1, -1], out3)
def testInitializeTableWithNoFilename(self):
with self.cached_session():
default_value = -1
with self.assertRaises(ValueError):
self.getHashTable()(lookup_ops.TextFileInitializer(
"", dtypes.string, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER), default_value)
def testInitializeWithVocabSize(self):
with self.cached_session():
default_value = -1
vocab_size = 3
vocabulary_file1 = self._createVocabFile("one_column6.txt")
init1 = lookup_ops.TextFileInitializer(
vocabulary_file1,
dtypes.string,
lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64,
lookup_ops.TextFileIndex.LINE_NUMBER,
vocab_size=vocab_size)
self.assertIn("one_column6.txt_3_-2_-1", init1._shared_name)
table1 = self.getHashTable()(init1, default_value)
# Initialize from file.
self.initialize_table(table1)
self.assertEqual(vocab_size, self.evaluate(table1.size()))
vocabulary_file2 = self._createVocabFile("one_column7.txt")
vocab_size = 5
init2 = lookup_ops.TextFileInitializer(
vocabulary_file2,
dtypes.string,
lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64,
lookup_ops.TextFileIndex.LINE_NUMBER,
vocab_size=vocab_size)
self.assertIn("one_column7.txt_5_-2_-1", init2._shared_name)
with self.assertRaisesOpError("Invalid vocab_size"):
table2 = self.getHashTable()(init2, default_value)
self.initialize_table(table2)
vocab_size = 1
vocabulary_file3 = self._createVocabFile("one_column3.txt")
init3 = lookup_ops.TextFileInitializer(
vocabulary_file3,
dtypes.string,
lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64,
lookup_ops.TextFileIndex.LINE_NUMBER,
vocab_size=vocab_size)
self.assertIn("one_column3.txt_1_-2_-1", init3._shared_name)
table3 = self.getHashTable()(init3, default_value)
# Smaller vocab size reads only vocab_size records.
self.initialize_table(table3)
self.assertEqual(vocab_size, self.evaluate(table3.size()))
@test_util.run_v1_only("placeholder usage")
def testFeedVocabularyName(self):
vocabulary_file = self._createVocabFile("feed_vocabulary.txt")
with self.cached_session():
default_value = -1
init = lookup_ops.TextFileInitializer(
"old_file.txt", dtypes.string, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER)
self.assertIn("old_file.txt_-2_-1", init._shared_name)
table = self.getHashTable()(init, default_value)
# Initialize with non existing file (old_file.txt) should fail.
# TODO(yleon): Update message, which might change per FileSystem.
with self.assertRaisesOpError("old_file.txt"):
self.evaluate(table.initializer)
# Initialize the model feeding the vocabulary file.
filenames = ops.get_collection(ops.GraphKeys.ASSET_FILEPATHS)
table.initializer.run(feed_dict={filenames[0]: vocabulary_file})
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testInvalidFilenames(self):
vocabulary_file = self._createVocabFile("filename_shape.txt")
with self.cached_session():
default_value = -1
# Invalid data type
other_type = constant_op.constant(1)
with self.assertRaises(Exception) as cm:
self.getHashTable()(lookup_ops.TextFileInitializer(
other_type, dtypes.string, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER), default_value)
self.assertIsInstance(cm.exception, (ValueError, TypeError))
# Non-scalar filename
filenames = constant_op.constant([vocabulary_file, vocabulary_file])
if not context.executing_eagerly():
with self.assertRaises(Exception) as cm:
self.getHashTable()(lookup_ops.TextFileInitializer(
filenames, dtypes.string, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER),
default_value)
self.assertIsInstance(cm.exception, (ValueError, TypeError))
else:
with self.assertRaises(errors_impl.InvalidArgumentError):
self.getHashTable()(lookup_ops.TextFileInitializer(
filenames, dtypes.string, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER),
default_value)
def testIdToStringTable(self):
vocab_file = self._createVocabFile("feat_to_id_1.txt")
with self.cached_session():
default_value = "UNK"
vocab_size = 3
init = lookup_ops.TextFileStringTableInitializer(
vocab_file, vocab_size=vocab_size)
self.assertTrue("feat_to_id_1.txt_3_-1_-2", init._shared_name)
table = self.getHashTable()(init, default_value)
self.initialize_table(table)
input_values = constant_op.constant([0, 1, 2, 3], dtypes.int64)
out = table.lookup(input_values)
self.assertAllEqual([b"brain", b"salad", b"surgery", b"UNK"],
self.evaluate(out))
self.assertEqual(vocab_size, self.evaluate(table.size()))
def testStringToIdTable(self):
vocab_file = self._createVocabFile("feat_to_id_2.txt")
with self.cached_session():
default_value = -1
vocab_size = 3
init = lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size)
self.assertTrue("feat_to_id_2.txt_3_-1_-2", init._shared_name)
table = self.getHashTable()(init, default_value)
self.initialize_table(table)
input_string = constant_op.constant(["brain", "salad", "surgery", "UNK"])
out = table.lookup(input_string)
self.assertAllEqual([0, 1, 2, -1], self.evaluate(out))
self.assertEqual(vocab_size, self.evaluate(table.size()))
def testInt64ToIdTable(self):
vocab_file = self._createVocabFile(
"feat_to_id_3.txt", values=("42", "1", "-1000"))
with self.cached_session():
default_value = -1
vocab_size = 3
init = lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size, key_dtype=dtypes.int64)
self.assertTrue("feat_to_id_3.txt_3_-1_-2", init._shared_name)
table = self.getHashTable()(init, default_value)
self.initialize_table(table)
out = table.lookup(
constant_op.constant((42, 1, -1000, 11), dtype=dtypes.int64))
self.assertAllEqual((0, 1, 2, -1), self.evaluate(out))
self.assertEqual(vocab_size, self.evaluate(table.size()))
class StaticVocabularyTableTest(BaseLookupTableTest):
def _createVocabFile(self, basename, values=("brain", "salad", "surgery")):
vocabulary_file = os.path.join(self.get_temp_dir(), basename)
with open(vocabulary_file, "w") as f:
f.write("\n".join(values) + "\n")
return vocabulary_file
def testStringStaticVocabularyTable(self):
vocab_file = self._createVocabFile("feat_to_id_1.txt")
vocab_size = 3
oov_buckets = 1
table = self.getVocabularyTable()(lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), oov_buckets)
self.initialize_table(table)
input_string = constant_op.constant(["brain", "salad", "surgery", "UNK"])
out = table.lookup(input_string)
self.assertAllEqual([0, 1, 2, 3], self.evaluate(out))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table.size()))
def testInt32StaticVocabularyTable(self):
vocab_file = self._createVocabFile("feat_to_id_2.txt", ("42", "1", "-1000"))
vocab_size = 3
oov_buckets = 1
table = self.getVocabularyTable()(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size, key_dtype=dtypes.int64),
oov_buckets,
lookup_key_dtype=dtypes.int32)
self.initialize_table(table)
values = constant_op.constant((42, 1, -1000, 11), dtype=dtypes.int32)
out = table.lookup(values)
self.assertAllEqual([0, 1, 2, 3], self.evaluate(out))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table.size()))
def testInt64StaticVocabularyTable(self):
vocab_file = self._createVocabFile("feat_to_id_3.txt", ("42", "1", "-1000"))
vocab_size = 3
oov_buckets = 1
table = self.getVocabularyTable()(lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size, key_dtype=dtypes.int64), oov_buckets)
self.initialize_table(table)
values = constant_op.constant((42, 1, -1000, 11), dtype=dtypes.int64)
out = table.lookup(values)
self.assertAllEqual([0, 1, 2, 3], self.evaluate(out))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table.size()))
def testStringStaticVocabularyTableNoInitializer(self):
oov_buckets = 5
# Set a table that only uses hash buckets, for each input value returns
# an id calculated by fingerprint("input") mod oov_buckets.
table = self.getVocabularyTable()(None, oov_buckets)
self.initialize_table(table)
values = constant_op.constant(("brain", "salad", "surgery"))
out = table.lookup(values)
self.assertAllEqual(
[
3, # fingerprint("brain") mod 5.
1, # fingerprint("salad") mod 5.
4 # fingerprint("surgery") mod 5
],
self.evaluate(out))
self.assertEqual(oov_buckets, self.evaluate(table.size()))
def testStaticVocabularyTableWithMultipleInitializers(self):
vocab_file = self._createVocabFile("feat_to_id_4.txt")
vocab_size = 3
oov_buckets = 3
init = lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size)
table1 = self.getVocabularyTable()(init, oov_buckets, name="table1")
table2 = self.getVocabularyTable()(init, oov_buckets, name="table2")
self.evaluate(lookup_ops.tables_initializer())
input_string = constant_op.constant(
["fruit", "brain", "salad", "surgery", "UNK"])
out1 = table1.lookup(input_string)
out2 = table2.lookup(input_string)
out1, out2 = self.evaluate([out1, out2])
self.assertAllEqual([5, 0, 1, 2, 5], out1)
self.assertAllEqual([5, 0, 1, 2, 5], out2)
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table1.size()))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table2.size()))
def testStaticVocabularyTableInitializationAcrossSessions(self):
vocab_file = self._createVocabFile("feat_to_id_5.txt")
with self.cached_session():
vocab_size = 3
oov_buckets = 1
table1 = self.getVocabularyTable()(lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), oov_buckets)
self.initialize_table(table1)
input_string_1 = constant_op.constant(
["brain", "salad", "surgery", "UNK"])
out1 = table1.lookup(input_string_1)
self.assertAllEqual([0, 1, 2, 3], self.evaluate(out1))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table1.size()))
with self.cached_session():
vocab_size = 3
oov_buckets = 1
# Underlying lookup table already initialized in previous session.
# No need to initialize table2
table2 = self.getVocabularyTable()(lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), oov_buckets)
input_string_2 = constant_op.constant(["fruit", "salad", "UNK"])
out2 = table2.lookup(input_string_2)
self.assertAllEqual([3, 1, 3], self.evaluate(out2))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table2.size()))
def testStaticVocabularyTableAssetTracking(self):
vocab_file = self._createVocabFile("vocab.txt")
vocab_size = 3
oov_buckets = 1
table = self.getVocabularyTable()(lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), oov_buckets)
object_graph_view = graph_view.ObjectGraphView(table)
objects = object_graph_view.list_objects()
assets = list(filter(lambda obj: isinstance(obj, tracking.Asset), objects))
self.assertLen(assets, 1)
self.assertEqual(
self.evaluate(assets[0].asset_path), compat.as_bytes(vocab_file))
def testSparseTensor(self):
vocab_file = self._createVocabFile("feat_to_id_7.txt")
input_indices = [[0, 0], [0, 1], [2, 0], [2, 2], [3, 0]]
input_shape = [4, 4]
sp_features = sparse_tensor.SparseTensor(
constant_op.constant(input_indices, dtypes.int64),
constant_op.constant(["brain", "salad", "brain", "surgery", "tarkus"],
dtypes.string),
constant_op.constant(input_shape, dtypes.int64))
table = self.getVocabularyTable()(lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=3), 1)
self.initialize_table(table)
sp_ids = table.lookup(sp_features)
self.assertAllEqual([5], sp_ids.values._shape_as_list())
sp_ids_ind, sp_ids_val, sp_ids_shape = self.evaluate(
[sp_ids.indices, sp_ids.values, sp_ids.dense_shape])
self.assertAllEqual(input_indices, sp_ids_ind)
self.assertAllEqual([0, 1, 0, 2, 3], sp_ids_val)
self.assertAllEqual(input_shape, sp_ids_shape)
def testRaggedTensor(self):
vocab_file = self._createVocabFile("feat_to_id_7.txt")
input_row_splits = [0, 2, 4, 5]
ragged_features = ragged_tensor.RaggedTensor.from_row_splits(
constant_op.constant(["brain", "salad", "brain", "surgery", "tarkus"],
dtypes.string),
constant_op.constant(input_row_splits, dtypes.int64))
table = self.getVocabularyTable()(lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=3), 1)
self.initialize_table(table)
ragged_ids = table.lookup(ragged_features)
self.assertAllEqual([5], ragged_ids.values._shape_as_list())
ragged_ids_val, ragged_ids_row_splits = self.evaluate(
[ragged_ids.values, ragged_ids.row_splits])
self.assertAllEqual([0, 1, 0, 2, 3], ragged_ids_val)
self.assertAllEqual(input_row_splits, ragged_ids_row_splits)
def testInt32SparseTensor(self):
input_indices = [[0, 0], [0, 1], [2, 0], [2, 2], [3, 0]]
input_shape = [4, 4]
sp_features = sparse_tensor.SparseTensor(
constant_op.constant(input_indices, dtypes.int64),
constant_op.constant([42, 1, 42, -1000, 11], dtypes.int32),
constant_op.constant(input_shape, dtypes.int64))
table = self.getVocabularyTable()(
lookup_ops.KeyValueTensorInitializer((42, 1, -1000), (0, 1, 2),
dtypes.int64, dtypes.int64),
1,
lookup_key_dtype=dtypes.int32)
self.initialize_table(table)
sp_ids = table.lookup(sp_features)
self.assertAllEqual([5], sp_ids.values._shape_as_list())
sp_ids_ind, sp_ids_val, sp_ids_shape = self.evaluate(
[sp_ids.indices, sp_ids.values, sp_ids.dense_shape])
self.assertAllEqual(input_indices, sp_ids_ind)
self.assertAllEqual([0, 1, 0, 2, 3], sp_ids_val)
self.assertAllEqual(input_shape, sp_ids_shape)
def testInt32RaggedTensor(self):
input_row_splits = [0, 2, 4, 5]
ragged_features = ragged_tensor.RaggedTensor.from_row_splits(
constant_op.constant([42, 1, 42, -1000, 11], dtypes.int32),
constant_op.constant(input_row_splits, dtypes.int64))
table = self.getVocabularyTable()(
lookup_ops.KeyValueTensorInitializer((42, 1, -1000), (0, 1, 2),
dtypes.int64, dtypes.int64),
1,
lookup_key_dtype=dtypes.int32)
self.initialize_table(table)
ragged_ids = table.lookup(ragged_features)
self.assertAllEqual([5], ragged_ids.values._shape_as_list())
ragged_ids_val, ragged_ids_row_splits = self.evaluate(
[ragged_ids.values, ragged_ids.row_splits])
self.assertAllEqual([0, 1, 0, 2, 3], ragged_ids_val)
self.assertAllEqual(input_row_splits, ragged_ids_row_splits)
def testInt64SparseTensor(self):
input_indices = [[0, 0], [0, 1], [2, 0], [2, 2], [3, 0]]
input_shape = [4, 4]
sp_features = sparse_tensor.SparseTensor(
constant_op.constant(input_indices, dtypes.int64),
constant_op.constant([42, 1, 42, -1000, 11], dtypes.int64),
constant_op.constant(input_shape, dtypes.int64))
table = self.getVocabularyTable()(lookup_ops.KeyValueTensorInitializer(
(42, 1, -1000), (0, 1, 2), dtypes.int64, dtypes.int64), 1)
self.initialize_table(table)
sp_ids = table.lookup(sp_features)
self.assertAllEqual([5], sp_ids.values._shape_as_list())
sp_ids_ind, sp_ids_val, sp_ids_shape = self.evaluate(
[sp_ids.indices, sp_ids.values, sp_ids.dense_shape])
self.assertAllEqual(input_indices, sp_ids_ind)
self.assertAllEqual([0, 1, 0, 2, 3], sp_ids_val)
self.assertAllEqual(input_shape, sp_ids_shape)
def testInt64RaggedTensor(self):
input_row_splits = [0, 2, 4, 5]
ragged_features = ragged_tensor.RaggedTensor.from_row_splits(
constant_op.constant([42, 1, 42, -1000, 11], dtypes.int64),
constant_op.constant(input_row_splits, dtypes.int64))
table = self.getVocabularyTable()(lookup_ops.KeyValueTensorInitializer(
(42, 1, -1000), (0, 1, 2), dtypes.int64, dtypes.int64), 1)
self.initialize_table(table)
ragged_ids = table.lookup(ragged_features)
self.assertAllEqual([5], ragged_ids.values._shape_as_list())
ragged_ids_val, ragged_ids_row_splits = self.evaluate(
[ragged_ids.values, ragged_ids.row_splits])
self.assertAllEqual([0, 1, 0, 2, 3], ragged_ids_val)
self.assertAllEqual(input_row_splits, ragged_ids_row_splits)
def testStaticVocabularyTableNoInnerTable(self):
table = self.getVocabularyTable()(None, num_oov_buckets=1)
self.assertIsNone(table.resource_handle)
class DenseHashTableOpTest(test.TestCase):
def testBasic(self):
with self.cached_session():
keys = constant_op.constant([11, 12, 13, 14], dtypes.int64)
values = constant_op.constant([0, 1, 2, 3], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=0,
deleted_key=-1)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(4, self.evaluate(table.size()))
remove_string = constant_op.constant([12, 15], dtypes.int64)
self.evaluate(table.remove(remove_string))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([11, 12, 15], dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([3], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([0, -1, -1], result)
def testBasicBool(self):
with self.cached_session():
keys = constant_op.constant([11, 12, 13, 14], dtypes.int64)
values = constant_op.constant([True, True, True, True], dtypes.bool)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.bool,
default_value=False,
empty_key=0,
deleted_key=-1)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(4, self.evaluate(table.size()))
remove_string = constant_op.constant([11, 15], dtypes.int64)
self.evaluate(table.remove(remove_string))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([11, 12, 15], dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([3], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([False, True, False], result)
def testSameEmptyAndDeletedKey(self):
with self.cached_session():
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Empty and deleted keys"):
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=42,
deleted_key=42)
self.assertAllEqual(0, self.evaluate(table.size()))
@test_util.run_v1_only("uses placeholders")
def testLookupUnknownShape(self):
with self.cached_session():
keys = constant_op.constant([11, 12, 13], dtypes.int64)
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=0,
deleted_key=-1)
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
placeholder_keys = array_ops.placeholder(dtypes.int64)
output = table.lookup(placeholder_keys)
self.assertAllEqual(None, output.get_shape())
result = output.eval({placeholder_keys: [11, 12, 15]})
self.assertAllEqual([0, 1, -1], result)
def testMapStringToFloat(self):
with self.cached_session():
keys = constant_op.constant(["a", "b", "c", "d"], dtypes.string)
values = constant_op.constant([0.0, 1.1, 2.2, 3.3], dtypes.float32)
default_value = constant_op.constant(-1.5, dtypes.float32)
table = lookup_ops.DenseHashTable(
dtypes.string,
dtypes.float32,
default_value=default_value,
empty_key="",
deleted_key="$")
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(4, self.evaluate(table.size()))
remove_string = constant_op.constant(["b", "e"])
self.evaluate(table.remove(remove_string))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["a", "b", "d", "e"], dtypes.string)
output = table.lookup(input_string)
self.assertAllEqual([4], output.get_shape())
result = self.evaluate(output)
self.assertAllClose([0, -1.5, 3.3, -1.5], result)
def testMapInt64ToFloat(self):
for float_dtype in [dtypes.float32, dtypes.float64]:
with self.cached_session():
keys = constant_op.constant([11, 12, 13, 14], dtypes.int64)
values = constant_op.constant([0.0, 1.1, 2.2, 3.3], float_dtype)
default_value = constant_op.constant(-1.5, float_dtype)
table = lookup_ops.DenseHashTable(
dtypes.int64,
float_dtype,
default_value=default_value,
empty_key=0,
deleted_key=-1)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(4, self.evaluate(table.size()))
remove_string = constant_op.constant([12, 15], dtypes.int64)
self.evaluate(table.remove(remove_string))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([11, 12, 14, 15], dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([4], output.get_shape())
result = self.evaluate(output)
self.assertAllClose([0, -1.5, 3.3, -1.5], result)
def testVectorValues(self):
with self.cached_session():
keys = constant_op.constant([11, 12, 13], dtypes.int64)
values = constant_op.constant([[0, 1, 2, 3], [3, 4, 5, 6], [6, 7, 8, 9]],
dtypes.int64)
default_value = constant_op.constant([-1, -2, -3, -4], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=0,
deleted_key=-1,
initial_num_buckets=4)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
self.assertAllEqual(4, len(self.evaluate(table.export()[0])))
self.evaluate(
table.insert(
constant_op.constant([14], dtypes.int64),
constant_op.constant([[2, 3, 4, 5]], dtypes.int64)))
self.assertAllEqual(4, self.evaluate(table.size()))
self.assertAllEqual(8, len(self.evaluate(table.export()[0])))
remove_string = constant_op.constant([12, 16], dtypes.int64)
self.evaluate(table.remove(remove_string))
self.assertAllEqual(3, self.evaluate(table.size()))
self.assertAllEqual(8, len(self.evaluate(table.export()[0])))
input_string = constant_op.constant([11, 12, 14, 15], dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([4, 4],
output.shape,
msg="Saw shape: %s" % output.shape)
result = self.evaluate(output)
self.assertAllEqual(
[[0, 1, 2, 3], [-1, -2, -3, -4], [2, 3, 4, 5], [-1, -2, -3, -4]],
result)
def testVectorKeys(self):
with self.cached_session():
keys = constant_op.constant([[0, 1], [1, 2], [1, 3]], dtypes.int64)
values = constant_op.constant([10, 11, 12], dtypes.int64)
empty_key = constant_op.constant([0, 3], dtypes.int64)
deleted_key = constant_op.constant([-1, -1], dtypes.int64)
default_value = constant_op.constant(-1, dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
initial_num_buckets=8)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
self.evaluate(
table.insert(
constant_op.constant([[0, 0]], dtypes.int64),
constant_op.constant([13], dtypes.int64)))
self.assertAllEqual(4, self.evaluate(table.size()))
self.assertAllEqual(8, len(self.evaluate(table.export()[0])))
remove_string = constant_op.constant([[1, 2], [7, 8]], dtypes.int64)
self.evaluate(table.remove(remove_string))
self.assertAllEqual(3, self.evaluate(table.size()))
self.assertAllEqual(8, len(self.evaluate(table.export()[0])))
input_string = constant_op.constant([[0, 1], [1, 2], [1, 3], [0, 2]],
dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([4], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([10, -1, 12, -1], result)
def testResize(self):
with self.cached_session():
keys = constant_op.constant([11, 12, 13], dtypes.int64)
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=0,
deleted_key=-1,
initial_num_buckets=4)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
self.assertAllEqual(4, len(self.evaluate(table.export()[0])))
keys2 = constant_op.constant([12, 99], dtypes.int64)
self.evaluate(table.remove(keys2))
self.assertAllEqual(2, self.evaluate(table.size()))
self.assertAllEqual(4, len(self.evaluate(table.export()[0])))
keys3 = constant_op.constant([13, 14, 15, 16, 17], dtypes.int64)
values3 = constant_op.constant([3, 4, 5, 6, 7], dtypes.int64)
self.evaluate(table.insert(keys3, values3))
self.assertAllEqual(6, self.evaluate(table.size()))
self.assertAllEqual(16, len(self.evaluate(table.export()[0])))
keys4 = constant_op.constant([10, 11, 12, 13, 14, 15, 16, 17, 18],
dtypes.int64)
output = table.lookup(keys4)
self.assertAllEqual([-1, 0, -1, 3, 4, 5, 6, 7, -1], self.evaluate(output))
def testExport(self):
with self.cached_session():
keys = constant_op.constant([11, 12, 13, 14], dtypes.int64)
values = constant_op.constant([1, 2, 3, 4], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=100,
deleted_key=200,
initial_num_buckets=8)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(4, self.evaluate(table.size()))
keys2 = constant_op.constant([12, 15], dtypes.int64)
self.evaluate(table.remove(keys2))
self.assertAllEqual(3, self.evaluate(table.size()))
exported_keys, exported_values = table.export()
np_keys = self.evaluate(exported_keys)
np_values = self.evaluate(exported_values)
self.assertAllEqual(8, len(np_keys))
self.assertAllEqual(8, len(np_values))
# pair up keys and values, drop extra added dimension
pairs = np.dstack((np_keys.flatten(), np_values.flatten()))[0]
# sort by key
pairs = pairs[pairs[:, 0].argsort()]
self.assertAllEqual([[11, 1], [13, 3], [14, 4], [100, 0], [100, 0],
[100, 0], [100, 0], [200, 2]], pairs)
@test_util.run_v1_only("Saver V1 only")
def testSaveRestore(self):
save_dir = os.path.join(self.get_temp_dir(), "save_restore")
save_path = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
with self.session(graph=ops.Graph()) as sess:
default_value = -1
empty_key = 0
deleted_key = -1
keys = constant_op.constant([11, 12, 13, 14], dtypes.int64)
values = constant_op.constant([0, 1, 2, 3], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t1",
checkpoint=True,
initial_num_buckets=32)
save = saver.Saver()
self.assertAllEqual(0, table.size())
table.insert(keys, values).run()
self.assertAllEqual(4, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
keys2 = constant_op.constant([12, 15], dtypes.int64)
table.remove(keys2).run()
self.assertAllEqual(3, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
val = save.save(sess, save_path)
self.assertIsInstance(val, six.string_types)
self.assertEqual(save_path, val)
with self.session(graph=ops.Graph()) as sess:
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t1",
checkpoint=True,
initial_num_buckets=64)
table.insert(
constant_op.constant([11, 14], dtypes.int64),
constant_op.constant([12, 24], dtypes.int64)).run()
self.assertAllEqual(2, table.size())
self.assertAllEqual(64, len(table.export()[0].eval()))
save = saver.Saver()
# Restore the saved values in the parameter nodes.
save.restore(sess, save_path)
self.assertAllEqual(3, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
input_string = constant_op.constant([10, 11, 12, 13, 14], dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([-1, 0, -1, 2, 3], output)
@test_util.run_v1_only("Saver V1 only")
def testSaveRestoreOnlyTable(self):
save_dir = os.path.join(self.get_temp_dir(), "save_restore")
save_path = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
with self.session(graph=ops.Graph()) as sess:
default_value = -1
empty_key = 0
deleted_key = -1
keys = constant_op.constant([11, 12, 13, 14], dtypes.int64)
values = constant_op.constant([0, 1, 2, 3], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t1",
checkpoint=True,
initial_num_buckets=32)
save = saver.Saver([table])
self.assertAllEqual(0, table.size())
table.insert(keys, values).run()
self.assertAllEqual(4, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
keys2 = constant_op.constant([12, 15], dtypes.int64)
table.remove(keys2).run()
self.assertAllEqual(3, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
val = save.save(sess, save_path)
self.assertIsInstance(val, six.string_types)
self.assertEqual(save_path, val)
with self.session(graph=ops.Graph()) as sess:
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t1",
checkpoint=True,
initial_num_buckets=64)
table.insert(
constant_op.constant([11, 14], dtypes.int64),
constant_op.constant([12, 24], dtypes.int64)).run()
self.assertAllEqual(2, table.size())
self.assertAllEqual(64, len(table.export()[0].eval()))
save = saver.Saver([table])
# Restore the saved values in the parameter nodes.
save.restore(sess, save_path)
self.assertAllEqual(3, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
input_string = constant_op.constant([10, 11, 12, 13, 14], dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([-1, 0, -1, 2, 3], output)
@test_util.run_in_graph_and_eager_modes
def testObjectSaveRestore(self):
save_dir = os.path.join(self.get_temp_dir(), "save_restore")
save_prefix = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
default_value = -1
empty_key = 0
deleted_key = -1
keys = constant_op.constant([11, 12, 13], dtypes.int64)
values = constant_op.constant([0, 1, 2], dtypes.int64)
save_table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t1",
checkpoint=True,
initial_num_buckets=32)
save_checkpoint = trackable.Checkpoint(table=save_table)
self.assertAllEqual(0, self.evaluate(save_table.size()))
self.evaluate(save_table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(save_table.size()))
self.assertAllEqual(32, len(self.evaluate(save_table.export()[0])))
save_path = save_checkpoint.save(save_prefix)
del save_table, save_checkpoint
load_table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t1",
checkpoint=True,
initial_num_buckets=64)
self.evaluate(
load_table.insert(
constant_op.constant([11, 14], dtypes.int64),
constant_op.constant([12, 24], dtypes.int64)))
self.assertAllEqual(2, self.evaluate(load_table.size()))
self.assertAllEqual(64, len(self.evaluate(load_table.export()[0])))
restore_checkpoint = trackable.Checkpoint(table=load_table)
# Restore the saved values in the parameter nodes.
restore_checkpoint.restore(save_path).run_restore_ops()
self.assertAllEqual(3, self.evaluate(load_table.size()))
self.assertAllEqual(32, len(self.evaluate(load_table.export()[0])))
input_string = constant_op.constant([10, 11, 12, 13, 14], dtypes.int64)
output = load_table.lookup(input_string)
self.assertAllEqual([-1, 0, 1, 2, -1], self.evaluate(output))
@test_util.run_v2_only
def testSavedModelSaveRestore(self):
save_dir = os.path.join(self.get_temp_dir(), "save_restore")
save_path = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
root = tracking.AutoTrackable()
default_value = -1
empty_key = 0
deleted_key = -1
keys = constant_op.constant([11, 12, 13], dtypes.int64)
values = constant_op.constant([0, 1, 2], dtypes.int64)
root.table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t1",
checkpoint=True,
initial_num_buckets=32)
@def_function.function(
input_signature=[tensor_spec.TensorSpec((), dtypes.int64)])
def lookup(key):
return root.table.lookup(key)
root.lookup = lookup
self.assertAllEqual(0, root.table.size())
root.table.insert(keys, values)
self.assertAllEqual(3, self.evaluate(root.table.size()))
self.assertAllEqual(32, len(self.evaluate(root.table.export()[0])))
saved_model_save.save(root, save_path)
del root
loaded = saved_model_load.load(save_path)
self.assertEqual(loaded.lookup(12), 1)
self.assertEqual(loaded.lookup(10), -1)
@test_util.run_v1_only("Saver V1 only")
def testVectorSaveRestore(self):
save_dir = os.path.join(self.get_temp_dir(), "vector_save_restore")
save_path = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
with self.session(graph=ops.Graph()) as sess:
empty_key = constant_op.constant([11, 13], dtypes.int64)
deleted_key = constant_op.constant([-2, -3], dtypes.int64)
default_value = constant_op.constant([-1, -2], dtypes.int64)
keys = constant_op.constant([[11, 12], [11, 14], [12, 13], [13, 14]],
dtypes.int64)
values = constant_op.constant([[0, 1], [2, 3], [2, 4], [4, 5]],
dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t1",
checkpoint=True,
initial_num_buckets=32)
save = saver.Saver()
self.assertAllEqual(0, table.size())
table.insert(keys, values).run()
self.assertAllEqual(4, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
keys2 = constant_op.constant([[12, 13], [16, 17]], dtypes.int64)
table.remove(keys2).run()
self.assertAllEqual(3, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
val = save.save(sess, save_path)
self.assertIsInstance(val, six.string_types)
self.assertEqual(save_path, val)
with self.session(graph=ops.Graph()) as sess:
empty_key = constant_op.constant([11, 13], dtypes.int64)
deleted_key = constant_op.constant([-2, -3], dtypes.int64)
default_value = constant_op.constant([-1, -2], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t1",
checkpoint=True,
initial_num_buckets=64)
table.insert(
constant_op.constant([[11, 12], [13, 15]], dtypes.int64),
constant_op.constant([[21, 22], [23, 24]], dtypes.int64)).run()
self.assertAllEqual(2, table.size())
self.assertAllEqual(64, len(table.export()[0].eval()))
save = saver.Saver()
# Restore the saved values in the parameter nodes.
save.restore(sess, save_path)
self.assertAllEqual(3, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
input_string = constant_op.constant(
[[11, 12], [11, 14], [11, 15], [13, 14], [13, 15]], dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([[0, 1], [2, 3], [-1, -2], [4, 5], [-1, -2]],
self.evaluate(output))
@test_util.run_v1_only("Saver V1 only")
def testVectorScalarSaveRestore(self):
save_dir = os.path.join(self.get_temp_dir(), "vector_scalar_save_restore")
save_path = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
with self.session(graph=ops.Graph()) as sess:
empty_key = constant_op.constant([11, 13], dtypes.int64)
deleted_key = constant_op.constant([-1, -1], dtypes.int64)
default_value = constant_op.constant(-1, dtypes.int64)
keys = constant_op.constant([[11, 12], [11, 14], [12, 13], [13, 14]],
dtypes.int64)
values = constant_op.constant([0, 1, 2, 3], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t2",
checkpoint=True,
initial_num_buckets=32)
save = saver.Saver()
self.assertAllEqual(0, table.size())
table.insert(keys, values).run()
self.assertAllEqual(4, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
keys2 = constant_op.constant([[12, 13], [15, 16]], dtypes.int64)
table.remove(keys2).run()
self.assertAllEqual(3, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
val = save.save(sess, save_path)
self.assertIsInstance(val, six.string_types)
self.assertEqual(save_path, val)
with self.session(graph=ops.Graph()) as sess:
empty_key = constant_op.constant([11, 13], dtypes.int64)
deleted_key = constant_op.constant([-1, -1], dtypes.int64)
default_value = constant_op.constant(-1, dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t2",
checkpoint=True,
initial_num_buckets=64)
table.insert(
constant_op.constant([[11, 12], [13, 15]], dtypes.int64),
constant_op.constant([3, 4], dtypes.int64)).run()
self.assertAllEqual(2, table.size())
self.assertAllEqual(64, len(table.export()[0].eval()))
save = saver.Saver()
# Restore the saved values in the parameter nodes.
save.restore(sess, save_path)
self.assertAllEqual(3, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
input_string = constant_op.constant(
[[11, 12], [11, 14], [11, 15], [13, 14], [13, 15]], dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([0, 1, -1, 3, -1], output)
def testReprobe(self):
with self.cached_session():
# Insert 6 keys into a table with 8 buckets.
# The values are chosen to make sure collisions occur when using GCC STL
keys = constant_op.constant([11, 12, 13, 19, 20, 21], dtypes.int64)
values = constant_op.constant([51, 52, 53, 54, 55, 56], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=0,
deleted_key=-1,
initial_num_buckets=8)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(6, self.evaluate(table.size()))
input_string = constant_op.constant([10, 11, 12, 13, 14, 19, 20, 21, 22],
dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([9], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([-1, 51, 52, 53, -1, 54, 55, 56, -1], result)
def testCustomEmptyKey(self):
with self.cached_session():
keys = constant_op.constant([11, 0, 13], dtypes.int64)
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=12,
deleted_key=-1)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([11, 0, 15], dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([3], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testErrors(self):
with self.cached_session():
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=0,
deleted_key=-1)
# Inserting the empty key returns an error
keys1 = constant_op.constant([11, 0], dtypes.int64)
values1 = constant_op.constant([0, 1], dtypes.int64)
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"empty_key"):
self.evaluate(table.insert(keys1, values1))
# Looking up the empty key returns an error
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"empty_key"):
self.evaluate(table.lookup(keys1))
# Inserting the deleted key returns an error
keys2 = constant_op.constant([11, -1], dtypes.int64)
values2 = constant_op.constant([0, 1], dtypes.int64)
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"deleted_key"):
self.evaluate(table.insert(keys2, values2))
# Looking up the empty key returns an error
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"deleted_key"):
self.evaluate(table.lookup(keys2))
# Arbitrary tensors of keys are not supported
keys = constant_op.constant([[11, 0], [12, 1]], dtypes.int64)
values = constant_op.constant([[11, 0], [12, 1]], dtypes.int64)
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Expected key shape"):
self.evaluate(table.lookup(keys))
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Expected key shape"):
self.evaluate(table.insert(keys, values))
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Number of buckets must be"):
table2 = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=17,
deleted_key=-1,
initial_num_buckets=12)
self.assertAllEqual(0, self.evaluate(table2.size()))
with self.assertRaisesRegex(
errors_impl.InvalidArgumentError,
"Empty and deleted keys must have same shape"):
table3 = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=42,
deleted_key=[1, 2])
self.assertAllEqual(0, self.evaluate(table3.size()))
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Empty and deleted keys cannot be equal"):
table4 = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=42,
deleted_key=42)
self.assertAllEqual(0, self.evaluate(table4.size()))
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Empty and deleted keys cannot be equal"):
table5 = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=[1, 2, 3],
deleted_key=[1, 2, 3])
self.assertAllEqual(0, self.evaluate(table5.size()))
@test_util.run_in_graph_and_eager_modes
def testStringToResource(self):
v = variables.Variable(1.)
v1 = variables.Variable(1.)
table = lookup_ops.DenseHashTable(
dtypes.string,
dtypes.resource,
default_value=v.handle,
empty_key="<empty>",
deleted_key="<deleted>")
self.assertEqual([], table.lookup("not_found").shape)
table.insert("v1", v1.handle)
self.assertEqual([], table.lookup("v1").shape)
def testExportShapeInference(self):
default_value = -1
empty_key = 0
deleted_key = -1
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key)
actual_shapes = [t.shape for t in table.export()]
inferred_shapes = []
@def_function.function
def f():
for t in table.export():
inferred_shapes.append(t.shape)
f()
self.assertLen(actual_shapes, 2)
self.assertLen(inferred_shapes, 2)
self.assertTrue(inferred_shapes[0].is_compatible_with(actual_shapes[0]))
self.assertTrue(inferred_shapes[1].is_compatible_with(actual_shapes[1]))
class IndexTableFromFile(test.TestCase):
def _createVocabFile(self, basename, values=("brain", "salad", "surgery")):
vocabulary_file = os.path.join(self.get_temp_dir(), basename)
with open(vocabulary_file, "w") as f:
f.write("\n".join(values) + "\n")
return vocabulary_file
def test_string_index_table_from_file(self):
vocabulary_file = self._createVocabFile("f2i_vocab1.txt")
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, num_oov_buckets=1)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
def test_string_index_table_from_multicolumn_file(self):
vocabulary_file = self._createVocabFile(
"f2i_vocab1.txt", values=("brain\t300", "salad\t20", "surgery\t1"))
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file,
num_oov_buckets=1,
key_column_index=0,
value_column_index=lookup_ops.TextFileIndex.LINE_NUMBER)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
def test_string_index_table_from_multicolumn_file_custom_delimiter(self):
vocabulary_file = self._createVocabFile(
"f2i_vocab1.txt", values=("brain 300", "salad 20", "surgery 1"))
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file,
num_oov_buckets=1,
key_column_index=0,
value_column_index=lookup_ops.TextFileIndex.LINE_NUMBER,
delimiter=" ")
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
def test_string_index_table_from_file_tensor_filename(self):
vocabulary_file = self._createVocabFile("f2i_vocab1.txt")
with self.cached_session():
vocabulary_file = constant_op.constant(vocabulary_file)
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, num_oov_buckets=1)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
if not context.executing_eagerly():
self.assertEqual(1,
len(ops.get_collection(ops.GraphKeys.ASSET_FILEPATHS)))
@test_util.run_v1_only("placeholder usage")
def test_string_index_table_from_file_placeholder_filename(self):
vocabulary_file = self._createVocabFile("f2i_vocab1.txt")
with self.cached_session():
vocabulary_placeholder = array_ops.placeholder(dtypes.string, [])
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_placeholder, num_oov_buckets=1)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
feed_dict = {vocabulary_placeholder.name: vocabulary_file}
lookup_ops.tables_initializer().run(feed_dict=feed_dict)
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
self.assertEqual(0,
len(ops.get_collection(ops.GraphKeys.ASSET_FILEPATHS)))
def test_int32_index_table_from_file(self):
vocabulary_file = self._createVocabFile(
"f2i_vocab2.txt", values=("42", "1", "-1000"))
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file,
num_oov_buckets=1,
key_dtype=dtypes.int32)
ids = table.lookup(
constant_op.constant((1, -1000, 11), dtype=dtypes.int32))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
def test_int64_index_table_from_file(self):
vocabulary_file = self._createVocabFile(
"f2i_vocab3.txt", values=("42", "1", "-1000"))
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file,
num_oov_buckets=1,
key_dtype=dtypes.int64)
ids = table.lookup(
constant_op.constant((1, -1000, 11), dtype=dtypes.int64))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
def test_index_table_from_file_with_default_value(self):
default_value = -42
vocabulary_file = self._createVocabFile("f2i_vocab4.txt")
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, default_value=default_value)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, default_value), self.evaluate(ids))
def test_index_table_from_file_with_oov_buckets(self):
vocabulary_file = self._createVocabFile("f2i_vocab5.txt")
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, num_oov_buckets=1000)
ids = table.lookup(
constant_op.constant(["salad", "surgery", "tarkus", "toccata"]))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual(
(
1, # From vocabulary file.
2, # From vocabulary file.
867, # 3 + fingerprint("tarkus") mod 300.
860), # 3 + fingerprint("toccata") mod 300.
self.evaluate(ids))
def test_index_table_from_file_fails_with_empty_vocabulary_file_name(self):
self.assertRaises(
ValueError, lookup_ops.index_table_from_file, vocabulary_file="")
def test_index_table_from_file_fails_with_empty_vocabulary(self):
self.assertRaises(
ValueError, lookup_ops.index_table_from_file, vocabulary_file=None)
def test_index_table_from_file_str_fails_with_zero_size_vocabulary(self):
vocabulary_file = self._createVocabFile("zero_vocab_str.txt")
self.assertRaisesRegex(
ValueError, "vocab_size must be greater than 0, got 0. "
"vocabulary_file: .*zero_vocab_str.txt",
lookup_ops.index_table_from_file,
vocabulary_file=vocabulary_file,
vocab_size=0)
def test_index_table_from_file_tensor_fails_with_zero_size_vocabulary(self):
vocabulary_file = constant_op.constant(
self._createVocabFile("zero_vocab_tensor.txt"))
self.assertRaisesRegex(
ValueError, "vocab_size must be greater than 0, got 0. "
"vocabulary_file: .*zero_vocab_tensor.txt",
lookup_ops.index_table_from_file,
vocabulary_file=vocabulary_file,
vocab_size=0)
def test_index_table_from_file_with_vocab_size_too_small(self):
vocabulary_file = self._createVocabFile("f2i_vocab6.txt")
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, vocab_size=2)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, -1, -1), self.evaluate(ids))
self.assertEqual(2, self.evaluate(table.size()))
def test_index_table_from_file_with_vocab_size_too_large(self):
vocabulary_file = self._createVocabFile("f2i_vocab7.txt")
with self.cached_session():
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Invalid vocab_size"):
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, vocab_size=4)
self.evaluate(table.initializer)
def test_index_table_from_file_with_vocab_size(self):
vocabulary_file = self._createVocabFile("f2i_vocab8.txt")
self.assertRaises(
ValueError,
lookup_ops.index_table_from_file,
vocabulary_file=vocabulary_file,
vocab_size=0)
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, vocab_size=3)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, -1), self.evaluate(ids))
self.assertEqual(3, self.evaluate(table.size()))
def test_index_table_from_file_with_invalid_hashers(self):
vocabulary_file = self._createVocabFile("invalid_hasher.txt")
with self.cached_session():
with self.assertRaises(TypeError):
lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file,
vocab_size=3,
num_oov_buckets=1,
hasher_spec=1)
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file,
vocab_size=3,
num_oov_buckets=1,
hasher_spec=lookup_ops.HasherSpec("my-awesome-hash", None))
self.assertRaises(ValueError, table.lookup,
constant_op.constant(["salad", "surgery", "tarkus"]))
def test_index_table_from_file_table_ref_with_oov_buckets(self):
vocabulary_file = self._createVocabFile("f2i_vocab9.txt")
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, num_oov_buckets=1)
self.assertIsNotNone(table.resource_handle)
def test_index_table_from_file_table_ref_without_oov_buckets(self):
vocabulary_file = self._createVocabFile("f2i_vocab10.txt")
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, num_oov_buckets=0)
self.assertIsNotNone(table.resource_handle)
class IndexTableFromTensor(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def test_index_table_from_tensor_with_tensor_init(self):
table = lookup_ops.index_table_from_tensor(
vocabulary_list=("brain", "salad", "surgery"), num_oov_buckets=1)
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(
table.lookup(constant_op.constant(("salad", "surgery", "tarkus"))))
else:
# Reinitializing a table in eager should work.
table = lookup_ops.index_table_from_tensor(
vocabulary_list=("brain", "salad", "surgery"), num_oov_buckets=1)
self.evaluate(lookup_ops.tables_initializer())
ids = table.lookup(constant_op.constant(("salad", "surgery", "tarkus")))
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
def test_int32_index_table_from_tensor_with_tensor_init(self):
with self.cached_session():
table = lookup_ops.index_table_from_tensor(
vocabulary_list=(42, 1, -1000), num_oov_buckets=1, dtype=dtypes.int32)
ids = table.lookup(
constant_op.constant((1, -1000, 11), dtype=dtypes.int32))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.FailedPreconditionError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
def test_int64_index_table_from_tensor_with_tensor_init(self):
with self.cached_session():
table = lookup_ops.index_table_from_tensor(
vocabulary_list=(42, 1, -1000), num_oov_buckets=1, dtype=dtypes.int64)
ids = table.lookup(
constant_op.constant((1, -1000, 11), dtype=dtypes.int64))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.FailedPreconditionError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
def test_index_table_from_tensor_with_default_value(self):
default_value = -42
with self.cached_session():
table = lookup_ops.index_table_from_tensor(
vocabulary_list=["brain", "salad", "surgery"],
default_value=default_value)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.FailedPreconditionError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, default_value), self.evaluate(ids))
def test_index_table_from_tensor_missing_vocabulary_list(self):
with self.cached_session():
with self.assertRaisesRegex(ValueError,
"vocabulary_list must be specified"):
lookup_ops.index_table_from_tensor(
vocabulary_list=None, num_oov_buckets=1)
def test_index_table_from_tensor_empty_vocabulary_list(self):
with self.cached_session():
with self.assertRaisesRegex(errors_impl.OpError,
"keys and values cannot be empty"):
_ = lookup_ops.index_table_from_tensor(
vocabulary_list=np.array([], dtype=np.str_), num_oov_buckets=1)
self.evaluate(lookup_ops.tables_initializer())
def test_index_table_from_tensor_with_invalid_hashers(self):
with self.cached_session():
with self.assertRaises(TypeError):
lookup_ops.index_table_from_tensor(
vocabulary_list=["brain", "salad", "surgery"],
num_oov_buckets=1,
hasher_spec=1)
table = lookup_ops.index_table_from_tensor(
vocabulary_list=["brain", "salad", "surgery"],
num_oov_buckets=1,
hasher_spec=lookup_ops.HasherSpec("my-awesome-hash", None))
self.assertRaises(ValueError, table.lookup,
constant_op.constant(["salad", "surgery", "tarkus"]))
class IndexToStringTableFromFileTest(test.TestCase):
def _createVocabFile(self, basename, values=("brain", "salad", "surgery")):
vocabulary_file = os.path.join(self.get_temp_dir(), basename)
with open(vocabulary_file, "w") as f:
f.write("\n".join(values) + "\n")
return vocabulary_file
def test_index_to_string_table(self):
vocabulary_path = self._createVocabFile("i2f_vocab1.txt")
# vocabulary_file supports string and tensor
type_funcs = [str, constant_op.constant]
for type_func in type_funcs:
vocabulary_file = type_func(vocabulary_path)
with self.cached_session():
table = lookup_ops.index_to_string_table_from_file(
vocabulary_file=vocabulary_file)
features = table.lookup(
constant_op.constant([0, 1, 2, 3], dtypes.int64))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(features)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((b"brain", b"salad", b"surgery", b"UNK"),
self.evaluate(features))
def test_index_to_string_table_from_multicolumn_file(self):
vocabulary_file = self._createVocabFile(
"f2i_vocab1.txt", values=("brain\t300", "salad\t20", "surgery\t1"))
with self.cached_session():
table = lookup_ops.index_to_string_table_from_file(
vocabulary_file=vocabulary_file,
key_column_index=lookup_ops.TextFileIndex.LINE_NUMBER,
value_column_index=0)
features = table.lookup(constant_op.constant([0, 1, 2, 3], dtypes.int64))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(features)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((b"brain", b"salad", b"surgery", b"UNK"),
self.evaluate(features))
def test_index_to_string_table_from_multicolumn_file_custom_delimiter(self):
vocabulary_file = self._createVocabFile(
"f2i_vocab1.txt", values=("brain 300", "salad 20", "surgery 1"))
with self.cached_session():
table = lookup_ops.index_to_string_table_from_file(
vocabulary_file=vocabulary_file,
key_column_index=lookup_ops.TextFileIndex.LINE_NUMBER,
value_column_index=0,
delimiter=" ")
features = table.lookup(constant_op.constant([0, 1, 2, 3], dtypes.int64))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(features)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((b"brain", b"salad", b"surgery", b"UNK"),
self.evaluate(features))
def test_index_to_string_table_with_default_value(self):
default_value = b"NONE"
vocabulary_file = self._createVocabFile("f2i_vocab2.txt")
with self.cached_session():
table = lookup_ops.index_to_string_table_from_file(
vocabulary_file=vocabulary_file, default_value=default_value)
features = table.lookup(constant_op.constant([1, 2, 4], dtypes.int64))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(features)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((b"salad", b"surgery", default_value),
self.evaluate(features))
def test_index_to_string_table_with_vocab_size_too_small(self):
default_value = b"NONE"
vocabulary_file = self._createVocabFile("f2i_vocab2.txt")
with self.cached_session():
table = lookup_ops.index_to_string_table_from_file(
vocabulary_file=vocabulary_file,
vocab_size=2,
default_value=default_value)
features = table.lookup(constant_op.constant([1, 2, 4], dtypes.int64))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(features)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((b"salad", default_value, default_value),
self.evaluate(features))
def test_index_to_string_table_with_vocab_size_too_large(self):
vocabulary_file = self._createVocabFile("f2i_vocab6.txt")
with self.cached_session():
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Invalid vocab_size"):
_ = lookup_ops.index_to_string_table_from_file(
vocabulary_file=vocabulary_file, vocab_size=4)
self.evaluate(lookup_ops.tables_initializer())
def test_index_to_string_table_with_vocab_size(self):
vocabulary_file = self._createVocabFile("f2i_vocab7.txt")
with self.cached_session():
table = lookup_ops.index_to_string_table_from_file(
vocabulary_file=vocabulary_file, vocab_size=3)
features = table.lookup(constant_op.constant([1, 2, 4], dtypes.int64))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(features)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((b"salad", b"surgery", b"UNK"),
self.evaluate(features))
class IndexToStringTableFromTensorTest(test.TestCase):
def test_index_to_string_table_from_tensor(self):
with self.cached_session():
vocabulary_list = constant_op.constant(["brain", "salad", "surgery"])
table = lookup_ops.index_to_string_table_from_tensor(
vocabulary_list=vocabulary_list)
indices = constant_op.constant([0, 1, 2, 3], dtypes.int64)
features = table.lookup(indices)
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(features)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((b"brain", b"salad", b"surgery", b"UNK"),
self.evaluate(features))
def test_duplicate_entries(self):
with self.cached_session():
vocabulary_list = constant_op.constant(["hello", "hello"])
table = lookup_ops.index_to_string_table_from_tensor(
vocabulary_list=vocabulary_list)
indices = constant_op.constant([0, 1, 4], dtypes.int64)
features = table.lookup(indices)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((b"hello", b"hello", b"UNK"), self.evaluate(features))
def test_index_to_string_with_default_value(self):
default_value = b"NONE"
with self.cached_session():
vocabulary_list = constant_op.constant(["brain", "salad", "surgery"])
table = lookup_ops.index_to_string_table_from_tensor(
vocabulary_list=vocabulary_list, default_value=default_value)
indices = constant_op.constant([1, 2, 4], dtypes.int64)
features = table.lookup(indices)
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(features)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((b"salad", b"surgery", default_value),
self.evaluate(features))
class IdTableWithHashBucketsTest(test.TestCase):
def _createVocabFile(self, basename, values=("brain", "salad", "surgery")):
vocabulary_file = os.path.join(self.get_temp_dir(), basename)
with open(vocabulary_file, "w") as f:
f.write("\n".join(values) + "\n")
return vocabulary_file
def testStringIdTableWithHashBuckets(self):
vocab_file = self._createVocabFile("feat_to_id_1.txt")
default_value = -1
vocab_size = 3
oov_buckets = 1
table = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), default_value),
oov_buckets)
self.evaluate(table.initializer)
input_string = constant_op.constant(["brain", "salad", "surgery", "UNK"])
out = table.lookup(input_string)
self.assertAllEqual([0, 1, 2, 3], self.evaluate(out))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table.size()))
def testInt32IdTableWithHashBuckets(self):
vocab_file = self._createVocabFile("feat_to_id_2.txt", ("42", "1", "-1000"))
default_value = -1
vocab_size = 3
oov_buckets = 1
table = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size, key_dtype=dtypes.int64),
default_value),
oov_buckets,
key_dtype=dtypes.int32)
self.evaluate(table.initializer)
values = constant_op.constant((42, 1, -1000, 11), dtype=dtypes.int32)
out = table.lookup(values)
self.assertAllEqual([0, 1, 2, 3], self.evaluate(out))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table.size()))
def testInt64IdTableWithHashBuckets(self):
vocab_file = self._createVocabFile("feat_to_id_3.txt", ("42", "1", "-1000"))
default_value = -1
vocab_size = 3
oov_buckets = 1
table = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size, key_dtype=dtypes.int64),
default_value), oov_buckets)
self.evaluate(table.initializer)
values = constant_op.constant((42, 1, -1000, 11), dtype=dtypes.int64)
out = table.lookup(values)
self.assertAllEqual([0, 1, 2, 3], self.evaluate(out))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table.size()))
def testStringIdTableWithOnlyHashBucket(self):
oov_buckets = 5
# Set a table that only uses hash buckets, for each input value returns
# an id calculated by fingerprint("input") mod oov_buckets.
table = lookup_ops.IdTableWithHashBuckets(None, oov_buckets)
self.evaluate(table.initializer)
values = constant_op.constant(("brain", "salad", "surgery"))
out = table.lookup(values)
self.assertAllEqual(
[
3, # fingerprint("brain") mod 5.
1, # fingerprint("salad") mod 5.
4 # fingerprint("surgery") mod 5
],
self.evaluate(out))
self.assertEqual(oov_buckets, self.evaluate(table.size()))
def testInt32IdTableWithOnlyHashBucket(self):
oov_buckets = 5
# Set a table that only uses hash buckets, for each input value returns
# an id calculated by fingerprint("input") mod oov_buckets.
table = lookup_ops.IdTableWithHashBuckets(
None, oov_buckets, key_dtype=dtypes.int32)
self.evaluate(table.initializer)
input_string = constant_op.constant([42, 1, -1000], dtype=dtypes.int32)
out = table.lookup(input_string)
self.assertAllEqual(
[
1, # fingerprint("42") mod 5.
4, # fingerprint("1") mod 5.
2 # fingerprint("-1000") mod 5
],
self.evaluate(out))
self.assertEqual(oov_buckets, self.evaluate(table.size()))
def testFloat64IdTableWithOnlyHashBucket(self):
with self.assertRaisesRegex(TypeError, "Invalid key_dtype"):
lookup_ops.IdTableWithHashBuckets(
None, num_oov_buckets=5, key_dtype=dtypes.float64)
def testBoolIdTableWithOnlyHashBucket(self):
with self.assertRaisesRegex(TypeError, "Invalid key_dtype"):
lookup_ops.IdTableWithHashBuckets(
None, num_oov_buckets=5, key_dtype=dtypes.bool)
def testIdTableWithHashBucketsWithMultipleInitializers(self):
vocab_file = self._createVocabFile("feat_to_id_4.txt")
default_value = -1
vocab_size = 3
oov_buckets = 3
vocab_table = lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), default_value)
table1 = lookup_ops.IdTableWithHashBuckets(
vocab_table,
oov_buckets,
hasher_spec=lookup_ops.FastHashSpec,
name="table1")
table2 = lookup_ops.IdTableWithHashBuckets(
vocab_table,
oov_buckets,
hasher_spec=lookup_ops.StrongHashSpec((1, 2)),
name="table2")
self.evaluate(lookup_ops.tables_initializer())
input_string = constant_op.constant(
["fruit", "brain", "salad", "surgery", "UNK"])
out1 = table1.lookup(input_string)
out2 = table2.lookup(input_string)
out1, out2 = self.evaluate([out1, out2])
self.assertAllEqual([5, 0, 1, 2, 5], out1)
self.assertAllEqual([5, 0, 1, 2, 3], out2)
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table1.size()))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table2.size()))
if not context.executing_eagerly():
test_util.assert_ops_in_graph({
"table1_Lookup/hash_bucket": "StringToHashBucketFast",
"table2_Lookup/hash_bucket": "StringToHashBucketStrong",
}, ops.get_default_graph())
def testIdTableWithHashBucketsInitializationAcrossSessions(self):
vocab_file = self._createVocabFile("feat_to_id_5.txt")
with self.cached_session():
default_value = -1
vocab_size = 3
oov_buckets = 1
table1 = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), default_value),
oov_buckets)
self.evaluate(table1.initializer)
input_string_1 = constant_op.constant(
["brain", "salad", "surgery", "UNK"])
out1 = table1.lookup(input_string_1)
self.assertAllEqual([0, 1, 2, 3], self.evaluate(out1))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table1.size()))
with self.cached_session():
default_value = -1
vocab_size = 3
oov_buckets = 1
# Underlying lookup table already initialized in previous session.
# No need to call self.evaluate(table2.initializer)
table2 = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), default_value),
oov_buckets)
input_string_2 = constant_op.constant(["fruit", "salad", "UNK"])
out2 = table2.lookup(input_string_2)
self.assertAllEqual([3, 1, 3], self.evaluate(out2))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table2.size()))
def testIdTableWithHashBucketsWithMultipleInitializersDifferentDefault(self):
vocab_file = self._createVocabFile("feat_to_id_6.txt")
default_value1 = -1
vocab_size = 3
oov_buckets = 0
table1 = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), default_value1),
oov_buckets)
default_value2 = -2
table2 = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), default_value2),
oov_buckets)
self.evaluate(lookup_ops.tables_initializer())
input_string_1 = constant_op.constant(
["brain", "salad", "surgery", "UNK"])
input_string_2 = constant_op.constant(["fruit", "salad", "UNK"])
out1 = table1.lookup(input_string_1)
out2 = table2.lookup(input_string_2)
out1, out2 = self.evaluate([out1, out2])
self.assertAllEqual([0, 1, 2, -1], out1)
self.assertAllEqual([-2, 1, -2], out2)
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table1.size()))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table2.size()))
def testSparseTensor(self):
vocab_file = self._createVocabFile("feat_to_id_7.txt")
input_indices = [[0, 0], [0, 1], [2, 0], [2, 2], [3, 0]]
input_shape = [4, 4]
sp_features = sparse_tensor.SparseTensor(
constant_op.constant(input_indices, dtypes.int64),
constant_op.constant(["brain", "salad", "brain", "surgery", "tarkus"],
dtypes.string),
constant_op.constant(input_shape, dtypes.int64))
table = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(vocab_file, vocab_size=3),
-1), 1)
self.evaluate(table.initializer)
sp_ids = table.lookup(sp_features)
self.assertAllEqual([5], sp_ids.values._shape_as_list())
sp_ids_ind, sp_ids_val, sp_ids_shape = self.evaluate(
[sp_ids.indices, sp_ids.values, sp_ids.dense_shape])
self.assertAllEqual(input_indices, sp_ids_ind)
self.assertAllEqual([0, 1, 0, 2, 3], sp_ids_val)
self.assertAllEqual(input_shape, sp_ids_shape)
def testRaggedTensor(self):
vocab_file = self._createVocabFile("feat_to_id_7.txt")
input_row_splits = [0, 2, 4, 5]
ragged_features = ragged_tensor.RaggedTensor.from_row_splits(
constant_op.constant(["brain", "salad", "brain", "surgery", "tarkus"],
dtypes.string),
constant_op.constant(input_row_splits, dtypes.int64))
table = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(vocab_file, vocab_size=3),
-1), 1)
self.evaluate(table.initializer)
ragged_ids = table.lookup(ragged_features)
self.assertAllEqual([5], ragged_ids.values._shape_as_list())
ragged_ids_val, ragged_ids_row_splits = self.evaluate(
[ragged_ids.values, ragged_ids.row_splits])
self.assertAllEqual([0, 1, 0, 2, 3], ragged_ids_val)
self.assertAllEqual(input_row_splits, ragged_ids_row_splits)
def testInt32SparseTensor(self):
input_indices = [[0, 0], [0, 1], [2, 0], [2, 2], [3, 0]]
input_shape = [4, 4]
sp_features = sparse_tensor.SparseTensor(
constant_op.constant(input_indices, dtypes.int64),
constant_op.constant([42, 1, 42, -1000, 11], dtypes.int32),
constant_op.constant(input_shape, dtypes.int64))
table = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.KeyValueTensorInitializer(
(42, 1, -1000), (0, 1, 2), dtypes.int64, dtypes.int64), -1),
1,
key_dtype=dtypes.int32)
self.evaluate(table.initializer)
sp_ids = table.lookup(sp_features)
self.assertAllEqual([5], sp_ids.values._shape_as_list())
sp_ids_ind, sp_ids_val, sp_ids_shape = self.evaluate(
[sp_ids.indices, sp_ids.values, sp_ids.dense_shape])
self.assertAllEqual(input_indices, sp_ids_ind)
self.assertAllEqual([0, 1, 0, 2, 3], sp_ids_val)
self.assertAllEqual(input_shape, sp_ids_shape)
def testInt32RaggedTensor(self):
input_row_splits = [0, 2, 4, 5]
ragged_features = ragged_tensor.RaggedTensor.from_row_splits(
constant_op.constant([42, 1, 42, -1000, 11], dtypes.int32),
constant_op.constant(input_row_splits, dtypes.int32))
table = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.KeyValueTensorInitializer(
(42, 1, -1000), (0, 1, 2), dtypes.int64, dtypes.int64), -1),
1,
key_dtype=dtypes.int32)
self.evaluate(table.initializer)
ragged_ids = table.lookup(ragged_features)
self.assertAllEqual([5], ragged_ids.values._shape_as_list())
ragged_ids_val, ragged_ids_row_splits = self.evaluate(
[ragged_ids.values, ragged_ids.row_splits])
self.assertAllEqual([0, 1, 0, 2, 3], ragged_ids_val)
self.assertAllEqual(input_row_splits, ragged_ids_row_splits)
def testInt64SparseTensor(self):
input_indices = [[0, 0], [0, 1], [2, 0], [2, 2], [3, 0]]
input_shape = [4, 4]
sp_features = sparse_tensor.SparseTensor(
constant_op.constant(input_indices, dtypes.int64),
constant_op.constant([42, 1, 42, -1000, 11], dtypes.int64),
constant_op.constant(input_shape, dtypes.int64))
table = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.KeyValueTensorInitializer(
(42, 1, -1000), (0, 1, 2), dtypes.int64, dtypes.int64), -1),
1,
key_dtype=dtypes.int64)
self.evaluate(table.initializer)
sp_ids = table.lookup(sp_features)
self.assertAllEqual([5], sp_ids.values._shape_as_list())
sp_ids_ind, sp_ids_val, sp_ids_shape = self.evaluate(
[sp_ids.indices, sp_ids.values, sp_ids.dense_shape])
self.assertAllEqual(input_indices, sp_ids_ind)
self.assertAllEqual([0, 1, 0, 2, 3], sp_ids_val)
self.assertAllEqual(input_shape, sp_ids_shape)
def testInt64RaggedTensor(self):
input_row_splits = [0, 2, 4, 5]
ragged_features = ragged_tensor.RaggedTensor.from_row_splits(
constant_op.constant([42, 1, 42, -1000, 11], dtypes.int64),
constant_op.constant(input_row_splits, dtypes.int64))
table = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.KeyValueTensorInitializer(
(42, 1, -1000), (0, 1, 2), dtypes.int64, dtypes.int64), -1),
1,
key_dtype=dtypes.int64)
self.evaluate(table.initializer)
ragged_ids = table.lookup(ragged_features)
self.assertAllEqual([5], ragged_ids.values._shape_as_list())
ragged_ids_val, ragged_ids_row_splits = self.evaluate(
[ragged_ids.values, ragged_ids.row_splits])
self.assertAllEqual([0, 1, 0, 2, 3], ragged_ids_val)
self.assertAllEqual(input_row_splits, ragged_ids_row_splits)
def testIdTableWithHashBucketsWithInvalidHashers(self):
vocab_file = self._createVocabFile("feat_to_id_4.txt")
with self.cached_session():
default_value = -1
vocab_size = 3
oov_buckets = 1
lookup_table = lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), default_value)
with self.assertRaises(TypeError):
lookup_ops.IdTableWithHashBuckets(
lookup_table, oov_buckets, hasher_spec=1)
table = lookup_ops.IdTableWithHashBuckets(
lookup_table,
oov_buckets,
hasher_spec=lookup_ops.HasherSpec("my-awesome-hash", None))
input_string = constant_op.constant(["brain", "salad", "surgery", "UNK"])
with self.assertRaises(ValueError):
table.lookup(input_string)
with self.assertRaises(ValueError):
table = lookup_ops.IdTableWithHashBuckets(
lookup_table,
oov_buckets,
hasher_spec=lookup_ops.StrongHashSpec([]))
with self.assertRaises(ValueError):
table = lookup_ops.IdTableWithHashBuckets(
lookup_table,
oov_buckets,
hasher_spec=lookup_ops.StrongHashSpec([1, 2, 3]))
with self.assertRaises(TypeError):
table = lookup_ops.IdTableWithHashBuckets(
lookup_table,
oov_buckets,
hasher_spec=lookup_ops.StrongHashSpec([None, 2]))
def testIdTableWithHashBucketsNoInnerTable(self):
with self.cached_session():
table = lookup_ops.IdTableWithHashBuckets(None, num_oov_buckets=1)
self.assertIsNone(table.resource_handle)
class MutableHashTableOpTest(test.TestCase):
def testMutableHashTable(self):
with self.cached_session():
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery", "tarkus"])
values = constant_op.constant([0, 1, 2, 3], dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(4, self.evaluate(table.size()))
remove_string = constant_op.constant(["tarkus", "tank"])
self.evaluate(table.remove(remove_string))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
self.assertAllEqual([3], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
exported_keys, exported_values = table.export()
# exported data is in the order of the internal map, i.e. undefined
sorted_keys = np.sort(self.evaluate(exported_keys))
sorted_values = np.sort(self.evaluate(exported_values))
self.assertAllEqual([b"brain", b"salad", b"surgery"], sorted_keys)
self.assertAllEqual([0, 1, 2], sorted_values)
@test_util.run_v1_only("SaverV1")
def testSaveRestore(self):
save_dir = os.path.join(self.get_temp_dir(), "save_restore")
save_path = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
with self.session(graph=ops.Graph()) as sess:
v0 = variables.Variable(10.0, name="v0")
v1 = variables.Variable(20.0, name="v1")
default_val = -1
keys = constant_op.constant(["b", "c", "d"], dtypes.string)
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.MutableHashTable(
dtypes.string, dtypes.int64, default_val, name="t1", checkpoint=True)
save = saver.Saver()
self.evaluate(variables.global_variables_initializer())
# Check that the parameter nodes have been initialized.
self.assertEqual(10.0, self.evaluate(v0))
self.assertEqual(20.0, self.evaluate(v1))
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
val = save.save(sess, save_path)
self.assertIsInstance(val, six.string_types)
self.assertEqual(save_path, val)
with self.session(graph=ops.Graph()) as sess:
v0 = variables.Variable(-1.0, name="v0")
v1 = variables.Variable(-1.0, name="v1")
default_val = -1
table = lookup_ops.MutableHashTable(
dtypes.string, dtypes.int64, default_val, name="t1", checkpoint=True)
self.evaluate(
table.insert(
constant_op.constant(["a", "c"], dtypes.string),
constant_op.constant([12, 24], dtypes.int64)))
self.assertAllEqual(2, self.evaluate(table.size()))
save = saver.Saver()
# Restore the saved values in the parameter nodes.
save.restore(sess, save_path)
# Check that the parameter nodes have been restored.
self.assertEqual(10.0, self.evaluate(v0))
self.assertEqual(20.0, self.evaluate(v1))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["a", "b", "c", "d", "e"],
dtypes.string)
output = table.lookup(input_string)
self.assertAllEqual([-1, 0, 1, 2, -1], self.evaluate(output))
@test_util.run_v1_only("SaverV1")
def testSaveRestoreOnlyTable(self):
save_dir = os.path.join(self.get_temp_dir(), "save_restore")
save_path = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
with self.session(graph=ops.Graph()) as sess:
v0 = variables.Variable(10.0, name="v0")
v1 = variables.Variable(20.0, name="v1")
default_val = -1
keys = constant_op.constant(["b", "c", "d"], dtypes.string)
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.MutableHashTable(
dtypes.string, dtypes.int64, default_val, name="t1", checkpoint=True)
save = saver.Saver([table])
self.evaluate(variables.global_variables_initializer())
# Check that the parameter nodes have been initialized.
self.assertEqual(10.0, self.evaluate(v0))
self.assertEqual(20.0, self.evaluate(v1))
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
val = save.save(sess, save_path)
self.assertIsInstance(val, six.string_types)
self.assertEqual(save_path, val)
with self.session(graph=ops.Graph()) as sess:
default_val = -1
table = lookup_ops.MutableHashTable(
dtypes.string, dtypes.int64, default_val, name="t1", checkpoint=True)
self.evaluate(
table.insert(
constant_op.constant(["a", "c"], dtypes.string),
constant_op.constant([12, 24], dtypes.int64)))
self.assertAllEqual(2, self.evaluate(table.size()))
save = saver.Saver([table])
# Restore the saved values in the parameter nodes.
save.restore(sess, save_path)
# Check that the parameter nodes have been restored.
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["a", "b", "c", "d", "e"],
dtypes.string)
output = table.lookup(input_string)
self.assertAllEqual([-1, 0, 1, 2, -1], self.evaluate(output))
@test_util.run_in_graph_and_eager_modes
def testObjectSaveRestore(self):
save_dir = os.path.join(self.get_temp_dir(), "save_restore")
save_prefix = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
v0 = variables.Variable(10.0, name="v0")
v1 = variables.Variable(20.0, name="v1")
default_val = -1
keys = constant_op.constant(["b", "c", "d"], dtypes.string)
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.MutableHashTable(
dtypes.string, dtypes.int64, default_val, name="t1", checkpoint=True)
checkpoint = trackable.Checkpoint(table=table, v0=v0, v1=v1)
self.evaluate([v0.initializer, v1.initializer])
# Check that the parameter nodes have been initialized.
self.assertEqual(10.0, self.evaluate(v0))
self.assertEqual(20.0, self.evaluate(v1))
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
save_path = checkpoint.save(save_prefix)
del table, checkpoint, v0, v1
v0 = variables.Variable(-1.0, name="v0")
v1 = variables.Variable(-1.0, name="v1")
default_val = -1
table = lookup_ops.MutableHashTable(
dtypes.string, dtypes.int64, default_val, name="t1", checkpoint=True)
self.evaluate(
table.insert(
constant_op.constant(["a", "c"], dtypes.string),
constant_op.constant([12, 24], dtypes.int64)))
self.assertAllEqual(2, self.evaluate(table.size()))
checkpoint = trackable.Checkpoint(table=table, v0=v0, v1=v1)
# Restore the saved values in the parameter nodes.
checkpoint.restore(save_path).run_restore_ops()
# Check that the parameter nodes have been restored.
self.assertEqual(10.0, self.evaluate(v0))
self.assertEqual(20.0, self.evaluate(v1))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["a", "b", "c", "d", "e"],
dtypes.string)
output = table.lookup(input_string)
self.assertAllEqual([-1, 0, 1, 2, -1], self.evaluate(output))
@test_util.run_v1_only("Multiple sessions")
def testSharing(self):
# Start a server to store the table state
server = server_lib.Server({"local0": ["localhost:0"]},
protocol="grpc",
start=True)
# Create two sessions sharing the same state
session1 = session.Session(server.target)
session2 = session.Session(server.target)
table = lookup_ops.MutableHashTable(
dtypes.int64, dtypes.string, "-", name="t1")
# Populate the table in the first session
with session1:
self.assertAllEqual(0, table.size())
keys = constant_op.constant([11, 12], dtypes.int64)
values = constant_op.constant(["a", "b"])
table.insert(keys, values).run()
self.assertAllEqual(2, table.size())
output = table.lookup(constant_op.constant([11, 12, 13], dtypes.int64))
self.assertAllEqual([b"a", b"b", b"-"], output)
# Verify that we can access the shared data from the second session
with session2:
self.assertAllEqual(2, table.size())
output = table.lookup(constant_op.constant([10, 11, 12], dtypes.int64))
self.assertAllEqual([b"-", b"a", b"b"], output)
def testMutableHashTableOfTensors(self):
with self.cached_session():
default_val = constant_op.constant([-1, -1], dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery", "tarkus"])
values = constant_op.constant([[0, 1], [2, 3], [4, 5], [6, 7]],
dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(4, self.evaluate(table.size()))
remove_string = constant_op.constant(["tarkus", "tank"])
self.evaluate(table.remove(remove_string))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
self.assertAllEqual([3, 2], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([[0, 1], [2, 3], [-1, -1]], result)
exported_keys, exported_values = table.export()
# exported data is in the order of the internal map, i.e. undefined
sorted_keys = np.sort(self.evaluate(exported_keys))
sorted_values = np.sort(self.evaluate(exported_values), axis=0)
self.assertAllEqual([b"brain", b"salad", b"surgery"], sorted_keys)
sorted_expected_values = np.sort([[4, 5], [2, 3], [0, 1]], axis=0)
self.assertAllEqual(sorted_expected_values, sorted_values)
def testMutableHashTableExportInsert(self):
with self.cached_session():
default_val = constant_op.constant([-1, -1], dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([[0, 1], [2, 3], [4, 5]], dtypes.int64)
table1 = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.assertAllEqual(0, self.evaluate(table1.size()))
self.evaluate(table1.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table1.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
expected_output = [[0, 1], [2, 3], [-1, -1]]
output1 = table1.lookup(input_string)
self.assertAllEqual(expected_output, self.evaluate(output1))
exported_keys, exported_values = table1.export()
self.assertAllEqual(3, self.evaluate(exported_keys).size)
self.assertAllEqual(6, self.evaluate(exported_values).size)
# Populate a second table from the exported data
table2 = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.assertAllEqual(0, self.evaluate(table2.size()))
self.evaluate(table2.insert(exported_keys, exported_values))
self.assertAllEqual(3, self.evaluate(table2.size()))
# Verify lookup result is still the same
output2 = table2.lookup(input_string)
self.assertAllEqual(expected_output, self.evaluate(output2))
def testMutableHashTableOfTensorsInvalidShape(self):
with self.cached_session():
default_val = constant_op.constant([-1, -1], dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery"])
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
# Shape [6] instead of [3, 2]
values = constant_op.constant([0, 1, 2, 3, 4, 5], dtypes.int64)
with self.assertRaisesOpError("Expected shape"):
self.evaluate(table.insert(keys, values))
# Shape [2,3] instead of [3, 2]
values = constant_op.constant([[0, 1, 2], [3, 4, 5]], dtypes.int64)
with self.assertRaisesOpError("Expected shape"):
self.evaluate(table.insert(keys, values))
# Shape [2, 2] instead of [3, 2]
values = constant_op.constant([[0, 1], [2, 3]], dtypes.int64)
with self.assertRaisesOpError("Expected shape"):
self.evaluate(table.insert(keys, values))
# Shape [3, 1] instead of [3, 2]
values = constant_op.constant([[0], [2], [4]], dtypes.int64)
with self.assertRaisesOpError("Expected shape"):
self.evaluate(table.insert(keys, values))
# Valid Insert
values = constant_op.constant([[0, 1], [2, 3], [4, 5]], dtypes.int64)
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
def testMutableHashTableInvalidDefaultValue(self):
with self.cached_session():
default_val = constant_op.constant([[-1, -1]], dtypes.int64)
with self.assertRaisesOpError("Default value must be a vector"):
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.assertAllEqual(0, self.evaluate(table.size()))
def testMutableHashTableDuplicateInsert(self):
with self.cached_session():
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery", "brain"])
values = constant_op.constant([0, 1, 2, 3], dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([3, 1, -1], result)
def testMutableHashTableFindHighRank(self):
with self.cached_session():
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([["brain", "salad"],
["tank", "tarkus"]])
output = table.lookup(input_string)
self.assertAllEqual([2, 2], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([[0, 1], [-1, -1]], result)
def testMutableHashTableInsertHighRank(self):
with self.cached_session():
default_val = -1
keys = constant_op.constant([["brain", "salad"], ["surgery", "tank"]])
values = constant_op.constant([[0, 1], [2, 3]], dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.evaluate(table.insert(keys, values))
self.assertAllEqual(4, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank", "tarkus"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([0, 1, 3, -1], result)
def testMutableHashTableRemoveHighRank(self):
with self.test_session():
default_val = -1
keys = constant_op.constant([["brain", "salad"], ["surgery", "tank"]])
values = constant_op.constant([[0, 1], [2, 3]], dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.evaluate(table.insert(keys, values))
self.assertAllEqual(4, self.evaluate(table.size()))
remove_string = constant_op.constant(["salad", "tarkus"])
self.evaluate(table.remove(remove_string))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank", "tarkus"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([0, -1, 3, -1], result)
def testMutableHashTableOfTensorsFindHighRank(self):
with self.cached_session():
default_val = constant_op.constant([-1, -1, -1], dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([[0, 1, 2], [2, 3, 4], [4, 5, 6]],
dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([["brain", "salad"],
["tank", "tarkus"]])
output = table.lookup(input_string)
self.assertAllEqual([2, 2, 3], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual(
[[[0, 1, 2], [2, 3, 4]], [[-1, -1, -1], [-1, -1, -1]]], result)
def testMutableHashTableOfTensorsRemoveHighRank(self):
with self.test_session():
default_val = constant_op.constant([-1, -1, -1], dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([[0, 1, 2], [2, 3, 4], [4, 5, 6]],
dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
remove_string = constant_op.constant([["brain", "tank"]])
self.evaluate(table.remove(remove_string))
self.assertAllEqual(2, self.evaluate(table.size()))
input_string = constant_op.constant([["brain", "salad"],
["surgery", "tank"]])
output = table.lookup(input_string)
self.assertAllEqual([2, 2, 3], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual(
[[[-1, -1, -1], [2, 3, 4]], [[4, 5, 6], [-1, -1, -1]]], result)
def testMultipleMutableHashTables(self):
with self.cached_session():
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table1 = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
table2 = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
table3 = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.evaluate(table1.insert(keys, values))
self.evaluate(table2.insert(keys, values))
self.evaluate(table3.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table1.size()))
self.assertAllEqual(3, self.evaluate(table2.size()))
self.assertAllEqual(3, self.evaluate(table3.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output1 = table1.lookup(input_string)
output2 = table2.lookup(input_string)
output3 = table3.lookup(input_string)
out1, out2, out3 = self.evaluate([output1, output2, output3])
self.assertAllEqual([0, 1, -1], out1)
self.assertAllEqual([0, 1, -1], out2)
self.assertAllEqual([0, 1, -1], out3)
def testMutableHashTableWithTensorDefault(self):
with self.cached_session():
default_val = constant_op.constant(-1, dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testSignatureMismatch(self):
with self.cached_session():
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
# insert with keys of the wrong type
with self.assertRaises(ValueError):
self.evaluate(table.insert(constant_op.constant([4, 5, 6]), values))
# insert with values of the wrong type
with self.assertRaises(ValueError):
self.evaluate(table.insert(keys, constant_op.constant(["a", "b", "c"])))
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string_ref = variables.Variable("brain")
input_int64_ref = variables.Variable(-1, dtype=dtypes.int64)
self.evaluate(variables.global_variables_initializer())
# Ref types do not produce an insert signature mismatch.
self.evaluate(table.insert(input_string_ref, input_int64_ref))
self.assertAllEqual(3, self.evaluate(table.size()))
# Ref types do not produce a lookup signature mismatch.
self.assertEqual(-1, self.evaluate(table.lookup(input_string_ref)))
# lookup with keys of the wrong type
input_string = constant_op.constant([1, 2, 3], dtypes.int64)
with self.assertRaises(ValueError):
self.evaluate(table.lookup(input_string))
# default value of the wrong type
with self.assertRaises(TypeError):
lookup_ops.MutableHashTable(dtypes.string, dtypes.int64, "UNK")
def testMutableHashTableStringFloat(self):
with self.cached_session():
default_val = -1.5
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1.1, 2.2], dtypes.float32)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.float32,
default_val)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllClose([0, 1.1, default_val], result)
def testMutableHashTableIntFloat(self):
with self.cached_session():
default_val = -1.0
keys = constant_op.constant([3, 7, 0], dtypes.int64)
values = constant_op.constant([7.5, -1.2, 9.9], dtypes.float32)
table = lookup_ops.MutableHashTable(dtypes.int64, dtypes.float32,
default_val)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([7, 0, 11], dtypes.int64)
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllClose([-1.2, 9.9, default_val], result)
def testMutableHashTableInt64String(self):
with self.cached_session():
default_val = "n/a"
keys = constant_op.constant([0, 1, 2], dtypes.int64)
values = constant_op.constant(["brain", "salad", "surgery"])
table = lookup_ops.MutableHashTable(dtypes.int64, dtypes.string,
default_val)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([0, 1, 3], dtypes.int64)
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual((b"brain", b"salad", b"n/a"), result)
def testExportShapeInference(self):
default_value = -1
table = lookup_ops.MutableHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value)
actual_shapes = [t.shape for t in table.export()]
inferred_shapes = []
@def_function.function
def f():
for t in table.export():
inferred_shapes.append(t.shape)
f()
self.assertLen(actual_shapes, 2)
self.assertLen(inferred_shapes, 2)
self.assertTrue(inferred_shapes[0].is_compatible_with(actual_shapes[0]))
self.assertTrue(inferred_shapes[1].is_compatible_with(actual_shapes[1]))
class MutableHashTableBenchmark(test.Benchmark):
def _create_table(self):
return lookup_ops.MutableHashTable(dtypes.int64, dtypes.float32, 0.0)
def benchmark_single_repeated_scalar_insert_scalar(self):
table = self._create_table()
value = variables.Variable(1.0)
insert = table.insert(0, value)
size = table.size()
with session.Session() as sess:
sess.run(value.initializer)
self.run_op_benchmark(sess, insert, burn_iters=10, min_iters=10000)
assert sess.run(size) == 1
def benchmark_many_repeated_scalar_insert_scalar(self):
table = self._create_table()
c = dataset_ops.make_one_shot_iterator(counter.Counter()).get_next()
value = variables.Variable(1.0)
insert = table.insert(c, value)
size = table.size()
with session.Session() as sess:
sess.run(value.initializer)
self.run_op_benchmark(sess, insert, burn_iters=10, min_iters=10000)
assert sess.run(size) >= 10000
def benchmark_single_repeated_batch_32_insert_scalar(self):
table = self._create_table()
value = variables.Variable([1.0] * 32)
insert = table.insert(list(range(32)), value)
size = table.size()
with session.Session() as sess:
sess.run(value.initializer)
self.run_op_benchmark(sess, insert, burn_iters=10, min_iters=1000)
assert sess.run(size) == 32
def benchmark_many_repeated_batch_32_insert_scalar(self):
table = self._create_table()
c = dataset_ops.make_one_shot_iterator(counter.Counter()).get_next()
value = variables.Variable([1.0] * 32)
insert = table.insert(32 * c + list(range(32)), value)
size = table.size()
with session.Session() as sess:
sess.run(value.initializer)
self.run_op_benchmark(sess, insert, burn_iters=10, min_iters=1000)
assert sess.run(size) >= 1000 * 32
class DenseHashTableBenchmark(MutableHashTableBenchmark):
def _create_table(self):
return lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.float32,
default_value=0.0,
empty_key=-1,
deleted_key=-2)
if __name__ == "__main__":
test.main()
|
karllessard/tensorflow
|
tensorflow/python/kernel_tests/lookup_ops_test.py
|
Python
|
apache-2.0
| 145,826
|
"""Support for Abode Security System cameras."""
from datetime import timedelta
import logging
import requests
from homeassistant.components.camera import Camera
from homeassistant.util import Throttle
from . import DOMAIN as ABODE_DOMAIN, AbodeDevice
DEPENDENCIES = ['abode']
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=90)
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Abode camera devices."""
import abodepy.helpers.constants as CONST
import abodepy.helpers.timeline as TIMELINE
data = hass.data[ABODE_DOMAIN]
devices = []
for device in data.abode.get_devices(generic_type=CONST.TYPE_CAMERA):
if data.is_excluded(device):
continue
devices.append(AbodeCamera(data, device, TIMELINE.CAPTURE_IMAGE))
data.devices.extend(devices)
add_entities(devices)
class AbodeCamera(AbodeDevice, Camera):
"""Representation of an Abode camera."""
def __init__(self, data, device, event):
"""Initialize the Abode device."""
AbodeDevice.__init__(self, data, device)
Camera.__init__(self)
self._event = event
self._response = None
async def async_added_to_hass(self):
"""Subscribe Abode events."""
await super().async_added_to_hass()
self.hass.async_add_job(
self._data.abode.events.add_timeline_callback,
self._event, self._capture_callback
)
def capture(self):
"""Request a new image capture."""
return self._device.capture()
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def refresh_image(self):
"""Find a new image on the timeline."""
if self._device.refresh_image():
self.get_image()
def get_image(self):
"""Attempt to download the most recent capture."""
if self._device.image_url:
try:
self._response = requests.get(
self._device.image_url, stream=True)
self._response.raise_for_status()
except requests.HTTPError as err:
_LOGGER.warning("Failed to get camera image: %s", err)
self._response = None
else:
self._response = None
def camera_image(self):
"""Get a camera image."""
self.refresh_image()
if self._response:
return self._response.content
return None
def _capture_callback(self, capture):
"""Update the image with the device then refresh device."""
self._device.update_image_location(capture)
self.get_image()
self.schedule_update_ha_state()
|
jamespcole/home-assistant
|
homeassistant/components/abode/camera.py
|
Python
|
apache-2.0
| 2,683
|
# -*- coding: utf-8 -*-
#
# Nefertari documentation build configuration file, created by
# sphinx-quickstart on Fri Mar 27 11:16:31 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
# 'sphinxcontrib.fulltoc',
'releases'
]
releases_github_path = 'brandicted/ramses'
releases_debug = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Ramses'
copyright = u'2015, Brandicted'
author = u'Brandicted'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Ramsesdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Ramses.tex', u'Ramses Documentation',
u'Brandicted', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'ramses', u'Ramses Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Ramses', u'Ramses Documentation',
author, 'Ramses', 'API generator for Pyramid using RAML',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
ziad-saab/ramses
|
docs/source/conf.py
|
Python
|
apache-2.0
| 9,733
|
# -*- coding: utf-8 -*-
# File: model_box.py
import numpy as np
from collections import namedtuple
import tensorflow as tf
from tensorpack.tfutils.scope_utils import under_name_scope
from config import config
@under_name_scope()
def clip_boxes(boxes, window, name=None):
"""
Args:
boxes: nx4, xyxy
window: [h, w]
"""
boxes = tf.maximum(boxes, 0.0)
m = tf.tile(tf.reverse(window, [0]), [2]) # (4,)
boxes = tf.minimum(boxes, tf.to_float(m), name=name)
return boxes
@under_name_scope()
def decode_bbox_target(box_predictions, anchors):
"""
Args:
box_predictions: (..., 4), logits
anchors: (..., 4), floatbox. Must have the same shape
Returns:
box_decoded: (..., 4), float32. With the same shape.
"""
orig_shape = tf.shape(anchors)
box_pred_txtytwth = tf.reshape(box_predictions, (-1, 2, 2))
box_pred_txty, box_pred_twth = tf.split(box_pred_txtytwth, 2, axis=1)
# each is (...)x1x2
anchors_x1y1x2y2 = tf.reshape(anchors, (-1, 2, 2))
anchors_x1y1, anchors_x2y2 = tf.split(anchors_x1y1x2y2, 2, axis=1)
waha = anchors_x2y2 - anchors_x1y1
xaya = (anchors_x2y2 + anchors_x1y1) * 0.5
clip = np.log(config.PREPROC.MAX_SIZE / 16.)
wbhb = tf.exp(tf.minimum(box_pred_twth, clip)) * waha
xbyb = box_pred_txty * waha + xaya
x1y1 = xbyb - wbhb * 0.5
x2y2 = xbyb + wbhb * 0.5 # (...)x1x2
out = tf.concat([x1y1, x2y2], axis=-2)
return tf.reshape(out, orig_shape)
@under_name_scope()
def encode_bbox_target(boxes, anchors):
"""
Args:
boxes: (..., 4), float32
anchors: (..., 4), float32
Returns:
box_encoded: (..., 4), float32 with the same shape.
"""
anchors_x1y1x2y2 = tf.reshape(anchors, (-1, 2, 2))
anchors_x1y1, anchors_x2y2 = tf.split(anchors_x1y1x2y2, 2, axis=1)
waha = anchors_x2y2 - anchors_x1y1
xaya = (anchors_x2y2 + anchors_x1y1) * 0.5
boxes_x1y1x2y2 = tf.reshape(boxes, (-1, 2, 2))
boxes_x1y1, boxes_x2y2 = tf.split(boxes_x1y1x2y2, 2, axis=1)
wbhb = boxes_x2y2 - boxes_x1y1
xbyb = (boxes_x2y2 + boxes_x1y1) * 0.5
# Note that here not all boxes are valid. Some may be zero
txty = (xbyb - xaya) / waha
twth = tf.log(wbhb / waha) # may contain -inf for invalid boxes
encoded = tf.concat([txty, twth], axis=1) # (-1x2x2)
return tf.reshape(encoded, tf.shape(boxes))
@under_name_scope()
def crop_and_resize(image, boxes, box_ind, crop_size, pad_border=True):
"""
Aligned version of tf.image.crop_and_resize, following our definition of floating point boxes.
Args:
image: NCHW
boxes: nx4, x1y1x2y2
box_ind: (n,)
crop_size (int):
Returns:
n,C,size,size
"""
assert isinstance(crop_size, int), crop_size
boxes = tf.stop_gradient(boxes)
# TF's crop_and_resize produces zeros on border
if pad_border:
# this can be quite slow
image = tf.pad(image, [[0, 0], [0, 0], [1, 1], [1, 1]], mode='SYMMETRIC')
boxes = boxes + 1
@under_name_scope()
def transform_fpcoor_for_tf(boxes, image_shape, crop_shape):
"""
The way tf.image.crop_and_resize works (with normalized box):
Initial point (the value of output[0]): x0_box * (W_img - 1)
Spacing: w_box * (W_img - 1) / (W_crop - 1)
Use the above grid to bilinear sample.
However, what we want is (with fpcoor box):
Spacing: w_box / W_crop
Initial point: x0_box + spacing/2 - 0.5
(-0.5 because bilinear sample (in my definition) assumes floating point coordinate
(0.0, 0.0) is the same as pixel value (0, 0))
This function transform fpcoor boxes to a format to be used by tf.image.crop_and_resize
Returns:
y1x1y2x2
"""
x0, y0, x1, y1 = tf.split(boxes, 4, axis=1)
spacing_w = (x1 - x0) / tf.to_float(crop_shape[1])
spacing_h = (y1 - y0) / tf.to_float(crop_shape[0])
nx0 = (x0 + spacing_w / 2 - 0.5) / tf.to_float(image_shape[1] - 1)
ny0 = (y0 + spacing_h / 2 - 0.5) / tf.to_float(image_shape[0] - 1)
nw = spacing_w * tf.to_float(crop_shape[1] - 1) / tf.to_float(image_shape[1] - 1)
nh = spacing_h * tf.to_float(crop_shape[0] - 1) / tf.to_float(image_shape[0] - 1)
return tf.concat([ny0, nx0, ny0 + nh, nx0 + nw], axis=1)
# Expand bbox to a minium size of 1
# boxes_x1y1, boxes_x2y2 = tf.split(boxes, 2, axis=1)
# boxes_wh = boxes_x2y2 - boxes_x1y1
# boxes_center = tf.reshape((boxes_x2y2 + boxes_x1y1) * 0.5, [-1, 2])
# boxes_newwh = tf.maximum(boxes_wh, 1.)
# boxes_x1y1new = boxes_center - boxes_newwh * 0.5
# boxes_x2y2new = boxes_center + boxes_newwh * 0.5
# boxes = tf.concat([boxes_x1y1new, boxes_x2y2new], axis=1)
image_shape = tf.shape(image)[2:]
boxes = transform_fpcoor_for_tf(boxes, image_shape, [crop_size, crop_size])
image = tf.transpose(image, [0, 2, 3, 1]) # nhwc
ret = tf.image.crop_and_resize(
image, boxes, tf.to_int32(box_ind),
crop_size=[crop_size, crop_size])
ret = tf.transpose(ret, [0, 3, 1, 2]) # ncss
return ret
@under_name_scope()
def roi_align(featuremap, boxes, resolution):
"""
Args:
featuremap: 1xCxHxW
boxes: Nx4 floatbox
resolution: output spatial resolution
Returns:
NxCx res x res
"""
# sample 4 locations per roi bin
ret = crop_and_resize(
featuremap, boxes,
tf.zeros([tf.shape(boxes)[0]], dtype=tf.int32),
resolution * 2)
ret = tf.nn.avg_pool(ret, [1, 1, 2, 2], [1, 1, 2, 2], padding='SAME', data_format='NCHW')
return ret
class RPNAnchors(namedtuple('_RPNAnchors', ['boxes', 'gt_labels', 'gt_boxes'])):
"""
boxes (FS x FS x NA x 4): The anchor boxes.
gt_labels (FS x FS x NA):
gt_boxes (FS x FS x NA x 4): Groundtruth boxes corresponding to each anchor.
"""
def encoded_gt_boxes(self):
return encode_bbox_target(self.gt_boxes, self.boxes)
def decode_logits(self, logits):
return decode_bbox_target(logits, self.boxes)
@under_name_scope()
def narrow_to(self, featuremap):
"""
Slice anchors to the spatial size of this featuremap.
"""
shape2d = tf.shape(featuremap)[2:] # h,w
slice3d = tf.concat([shape2d, [-1]], axis=0)
slice4d = tf.concat([shape2d, [-1, -1]], axis=0)
boxes = tf.slice(self.boxes, [0, 0, 0, 0], slice4d)
gt_labels = tf.slice(self.gt_labels, [0, 0, 0], slice3d)
gt_boxes = tf.slice(self.gt_boxes, [0, 0, 0, 0], slice4d)
return RPNAnchors(boxes, gt_labels, gt_boxes)
if __name__ == '__main__':
"""
Demonstrate what's wrong with tf.image.crop_and_resize:
"""
import tensorflow.contrib.eager as tfe
tfe.enable_eager_execution()
# want to crop 2x2 out of a 5x5 image, and resize to 4x4
image = np.arange(25).astype('float32').reshape(5, 5)
boxes = np.asarray([[1, 1, 3, 3]], dtype='float32')
target = 4
print(crop_and_resize(
image[None, None, :, :], boxes, [0], target)[0][0])
"""
Expected values:
4.5 5 5.5 6
7 7.5 8 8.5
9.5 10 10.5 11
12 12.5 13 13.5
You cannot easily get the above results with tf.image.crop_and_resize.
Try out yourself here:
"""
print(tf.image.crop_and_resize(
image[None, :, :, None],
np.asarray([[1, 1, 2, 2]]) / 4.0, [0], [target, target])[0][:, :, 0])
|
eyaler/tensorpack
|
examples/FasterRCNN/model_box.py
|
Python
|
apache-2.0
| 7,519
|
'''
This module is to create model of Course
'''
from openerp import api, fields, models, _
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True) # Field reserved
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id',
string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
_("The title of the course should not be the description")),
('name_unique',
'UNIQUE(name)',
_("The course title must be unique")),
]
@api.one # api.one send defaults params: cr, uid, id, context
def copy(self, default=None):
# print "estoy pasando por la funcion heredada de copy en cursos"
if default is None:
default = {}
# default['name'] = self.name + ' (copy)'
copied_count = self.search_count(
[('name', '=like', _(u"Copy of {}%").format(self.name))])
if not copied_count:
new_name = _(u"Copy of {}").format(self.name)
else:
new_name = _(u"Copy of {} ({})").format(self.name, copied_count)
default['name'] = new_name
return super(Course, self).copy(default)
|
glizek/openacademy-project
|
openacademy/model/openacademy_course.py
|
Python
|
apache-2.0
| 1,587
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.utils.timezone import utc
import datetime
class Migration(migrations.Migration):
dependencies = [
('feed', '0003_auto_20141227_2343'),
]
operations = [
migrations.AddField(
model_name='newsarticle',
name='created',
field=models.DateTimeField(default=datetime.datetime(2014, 12, 29, 11, 11, 7, 540368, tzinfo=utc), auto_now_add=True),
preserve_default=False,
),
migrations.AddField(
model_name='newsarticle',
name='slug',
field=models.SlugField(default=datetime.datetime(2014, 12, 29, 11, 11, 29, 101175, tzinfo=utc)),
preserve_default=False,
),
migrations.AddField(
model_name='newsarticle',
name='updated',
field=models.DateTimeField(default=datetime.datetime(2014, 12, 29, 11, 11, 42, 82623, tzinfo=utc), auto_now=True),
preserve_default=False,
),
]
|
mseln/klufweb
|
klufweb/feed/migrations/0004_auto_20141229_1211.py
|
Python
|
apache-2.0
| 1,090
|